diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 44eda08020a..d8f49b0b136 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -35,7 +35,7 @@ Use local transport (default since 1.3): === Running Elasticsearch from a checkout In order to run Elasticsearch from source without building a package, you can -run it using Maven: +run it using Gradle: ------------------------------------- gradle run @@ -416,8 +416,8 @@ that'd consume a ton of ram. == Testing scripts more directly In general its best to stick to testing in vagrant because the bats scripts are -destructive. When working with a single package its generally faster to run its -tests in a tighter loop than maven provides. In one window: +destructive. When working with a single package it's generally faster to run its +tests in a tighter loop than gradle provides. In one window: -------------------------------- gradle :distribution:rpm:assemble diff --git a/client/rest/src/main/java/org/elasticsearch/client/Response.java b/client/rest/src/main/java/org/elasticsearch/client/Response.java index 7edaa49c2ea..ef3b702f2c9 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/Response.java +++ b/client/rest/src/main/java/org/elasticsearch/client/Response.java @@ -96,6 +96,10 @@ public final class Response { return response.getEntity(); } + HttpResponse getHttpResponse() { + return response; + } + @Override public String toString() { return "Response{" + diff --git a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java index 35e31d921c7..af897ba93d1 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java +++ b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java @@ -19,6 +19,10 @@ package org.elasticsearch.client; +import org.apache.http.HttpEntity; +import org.apache.http.entity.BufferedHttpEntity; +import org.apache.http.util.EntityUtils; + import java.io.IOException; /** @@ -34,9 +38,19 @@ public final class ResponseException extends IOException { this.response = response; } - private static String buildMessage(Response response) { - return response.getRequestLine().getMethod() + " " + response.getHost() + response.getRequestLine().getUri() + private static String buildMessage(Response response) throws IOException { + String message = response.getRequestLine().getMethod() + " " + response.getHost() + response.getRequestLine().getUri() + ": " + response.getStatusLine().toString(); + + HttpEntity entity = response.getEntity(); + if (entity != null) { + if (entity.isRepeatable() == false) { + entity = new BufferedHttpEntity(entity); + response.getHttpResponse().setEntity(entity); + } + message += "\n" + EntityUtils.toString(entity); + } + return message; } /** diff --git a/client/rest/src/test/java/org/elasticsearch/client/RequestLoggerTests.java b/client/rest/src/test/java/org/elasticsearch/client/RequestLoggerTests.java index f00060a02de..789f2bf6f6d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RequestLoggerTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RequestLoggerTests.java @@ -149,6 +149,7 @@ public class RequestLoggerTests extends RestClientTestCase { if (getRandom().nextBoolean()) { entity = new StringEntity(responseBody, StandardCharsets.UTF_8); } else { + //test a non repeatable entity entity = new InputStreamEntity(new ByteArrayInputStream(responseBody.getBytes(StandardCharsets.UTF_8))); } httpResponse.setEntity(entity); diff --git a/client/rest/src/test/java/org/elasticsearch/client/ResponseExceptionTests.java b/client/rest/src/test/java/org/elasticsearch/client/ResponseExceptionTests.java new file mode 100644 index 00000000000..9185222f510 --- /dev/null +++ b/client/rest/src/test/java/org/elasticsearch/client/ResponseExceptionTests.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.ProtocolVersion; +import org.apache.http.RequestLine; +import org.apache.http.StatusLine; +import org.apache.http.entity.InputStreamEntity; +import org.apache.http.entity.StringEntity; +import org.apache.http.message.BasicHttpResponse; +import org.apache.http.message.BasicRequestLine; +import org.apache.http.message.BasicStatusLine; +import org.apache.http.util.EntityUtils; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; + +public class ResponseExceptionTests extends RestClientTestCase { + + public void testResponseException() throws IOException { + ProtocolVersion protocolVersion = new ProtocolVersion("http", 1, 1); + StatusLine statusLine = new BasicStatusLine(protocolVersion, 500, "Internal Server Error"); + HttpResponse httpResponse = new BasicHttpResponse(statusLine); + + String responseBody = "{\"error\":{\"root_cause\": {}}}"; + boolean hasBody = getRandom().nextBoolean(); + if (hasBody) { + HttpEntity entity; + if (getRandom().nextBoolean()) { + entity = new StringEntity(responseBody, StandardCharsets.UTF_8); + } else { + //test a non repeatable entity + entity = new InputStreamEntity(new ByteArrayInputStream(responseBody.getBytes(StandardCharsets.UTF_8))); + } + httpResponse.setEntity(entity); + } + + RequestLine requestLine = new BasicRequestLine("GET", "/", protocolVersion); + HttpHost httpHost = new HttpHost("localhost", 9200); + Response response = new Response(requestLine, httpHost, httpResponse); + ResponseException responseException = new ResponseException(response); + + assertSame(response, responseException.getResponse()); + if (hasBody) { + assertEquals(responseBody, EntityUtils.toString(responseException.getResponse().getEntity())); + } else { + assertNull(responseException.getResponse().getEntity()); + } + + String message = response.getRequestLine().getMethod() + " " + response.getHost() + response.getRequestLine().getUri() + + ": " + response.getStatusLine().toString(); + if (hasBody) { + message += "\n" + responseBody; + } + assertEquals(message, responseException.getMessage()); + } +} diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 8222955c60b..c6cfb9e9a02 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -692,8 +692,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte NO_LONGER_PRIMARY_SHARD_EXCEPTION(ShardStateAction.NoLongerPrimaryShardException.class, ShardStateAction.NoLongerPrimaryShardException::new, 142), SCRIPT_EXCEPTION(org.elasticsearch.script.ScriptException.class, org.elasticsearch.script.ScriptException::new, 143), - NOT_MASTER_EXCEPTION(org.elasticsearch.cluster.NotMasterException.class, org.elasticsearch.cluster.NotMasterException::new, 144); - + NOT_MASTER_EXCEPTION(org.elasticsearch.cluster.NotMasterException.class, org.elasticsearch.cluster.NotMasterException::new, 144), + STATUS_EXCEPTION(org.elasticsearch.ElasticsearchStatusException.class, org.elasticsearch.ElasticsearchStatusException::new, 145); final Class exceptionClass; final FunctionThatThrowsIOException constructor; diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchSecurityException.java b/core/src/main/java/org/elasticsearch/ElasticsearchSecurityException.java index b6cd420c856..0cf5fb474e0 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchSecurityException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchSecurityException.java @@ -19,7 +19,6 @@ package org.elasticsearch; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -27,40 +26,39 @@ import java.io.IOException; /** * Generic security exception */ -public class ElasticsearchSecurityException extends ElasticsearchException { - - private final RestStatus status; - +public class ElasticsearchSecurityException extends ElasticsearchStatusException { + /** + * Build the exception with a specific status and cause. + */ public ElasticsearchSecurityException(String msg, RestStatus status, Throwable cause, Object... args) { - super(msg, cause, args); - this.status = status ; + super(msg, status, cause, args); } + /** + * Build the exception with the status derived from the cause. + */ public ElasticsearchSecurityException(String msg, Exception cause, Object... args) { this(msg, ExceptionsHelper.status(cause), cause, args); } + /** + * Build the exception with a status of {@link RestStatus#INTERNAL_SERVER_ERROR} without a cause. + */ public ElasticsearchSecurityException(String msg, Object... args) { - this(msg, RestStatus.INTERNAL_SERVER_ERROR, null, args); + this(msg, RestStatus.INTERNAL_SERVER_ERROR, args); } + /** + * Build the exception without a cause. + */ public ElasticsearchSecurityException(String msg, RestStatus status, Object... args) { - this(msg, status, null, args); + super(msg, status, args); } + /** + * Read from a stream. + */ public ElasticsearchSecurityException(StreamInput in) throws IOException { super(in); - status = RestStatus.readFrom(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - RestStatus.writeTo(out, status); - } - - @Override - public final RestStatus status() { - return status; } } diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchStatusException.java b/core/src/main/java/org/elasticsearch/ElasticsearchStatusException.java new file mode 100644 index 00000000000..55f12db69e1 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ElasticsearchStatusException.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +/** + * Exception who's {@link RestStatus} is arbitrary rather than derived. Used, for example, by reindex-from-remote to wrap remote exceptions + * that contain a status. + */ +public class ElasticsearchStatusException extends ElasticsearchException { + private final RestStatus status; + + /** + * Build the exception with a specific status and cause. + */ + public ElasticsearchStatusException(String msg, RestStatus status, Throwable cause, Object... args) { + super(msg, cause, args); + this.status = status; + } + + /** + * Build the exception without a cause. + */ + public ElasticsearchStatusException(String msg, RestStatus status, Object... args) { + this(msg, status, null, args); + } + + /** + * Read from a stream. + */ + public ElasticsearchStatusException(StreamInput in) throws IOException { + super(in); + status = RestStatus.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + RestStatus.writeTo(out, status); + } + + @Override + public final RestStatus status() { + return status; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index d88cfe35f8f..82c54f8a1bd 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -47,6 +47,7 @@ import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.Locale; import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -171,7 +172,7 @@ public class TransportRolloverAction extends TransportMasterNodeAction(); diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java index e50ad7f1306..9261bea945c 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java @@ -207,7 +207,9 @@ public abstract class TransportWriteAction< if (location != null) { pendingOps = true; indexShard.addRefreshListener(location, forcedRefresh -> { - logger.warn("block_until_refresh request ran out of slots and forced a refresh: [{}]", request); + if (forcedRefresh) { + logger.warn("block_until_refresh request ran out of slots and forced a refresh: [{}]", request); + } respond.respondAfterAsyncAction(forcedRefresh); }); } diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index a50a1e7bbea..e8b8c5043e5 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -108,7 +108,7 @@ public abstract class TransportClient extends AbstractClient { final List resourcesToClose = new ArrayList<>(); final ThreadPool threadPool = new ThreadPool(settings); resourcesToClose.add(() -> ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS)); - final NetworkService networkService = new NetworkService(settings); + final NetworkService networkService = new NetworkService(settings, Collections.emptyList()); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); try { final List> additionalSettings = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/ParseField.java b/core/src/main/java/org/elasticsearch/common/ParseField.java index c04bcb14dcb..ed00394640e 100644 --- a/core/src/main/java/org/elasticsearch/common/ParseField.java +++ b/core/src/main/java/org/elasticsearch/common/ParseField.java @@ -26,7 +26,8 @@ import java.util.HashSet; import java.util.Set; /** - * Holds a field that can be found in a request while parsing and its different variants, which may be deprecated. + * Holds a field that can be found in a request while parsing and its different + * variants, which may be deprecated. */ public class ParseField { @@ -37,6 +38,14 @@ public class ParseField { private String allReplacedWith = null; private final String[] allNames; + /** + * @param name + * the primary name for this field. This will be returned by + * {@link #getPreferredName()} + * @param deprecatedNames + * names for this field which are deprecated and will not be + * accepted when strict matching is used. + */ public ParseField(String name, String... deprecatedNames) { this.name = name; if (deprecatedNames == null || deprecatedNames.length == 0) { @@ -52,20 +61,35 @@ public class ParseField { this.allNames = allNames.toArray(new String[allNames.size()]); } - public String getPreferredName(){ + /** + * @return the preferred name used for this field + */ + public String getPreferredName() { return name; } + /** + * @return All names for this field regardless of whether they are + * deprecated + */ public String[] getAllNamesIncludedDeprecated() { return allNames; } + /** + * @param deprecatedNames + * deprecated names to include with the returned + * {@link ParseField} + * @return a new {@link ParseField} using the preferred name from this one + * but with the specified deprecated names + */ public ParseField withDeprecation(String... deprecatedNames) { return new ParseField(this.name, deprecatedNames); } /** - * Return a new ParseField where all field names are deprecated and replaced with {@code allReplacedWith}. + * Return a new ParseField where all field names are deprecated and replaced + * with {@code allReplacedWith}. */ public ParseField withAllDeprecated(String allReplacedWith) { ParseField parseField = this.withDeprecation(getAllNamesIncludedDeprecated()); @@ -73,16 +97,34 @@ public class ParseField { return parseField; } - boolean match(String currentFieldName, boolean strict) { - if (allReplacedWith == null && currentFieldName.equals(name)) { + /** + * @param fieldName + * the field name to match against this {@link ParseField} + * @param strict + * if true an exception will be thrown if a deprecated field name + * is given. If false the deprecated name will be matched but a + * message will also be logged to the {@link DeprecationLogger} + * @return true if fieldName matches any of the acceptable + * names for this {@link ParseField}. + */ + boolean match(String fieldName, boolean strict) { + // if this parse field has not been completely deprecated then try to + // match the preferred name + if (allReplacedWith == null && fieldName.equals(name)) { return true; } + // Now try to match against one of the deprecated names. Note that if + // the parse field is entirely deprecated (allReplacedWith != null) all + // fields will be in the deprecatedNames array String msg; for (String depName : deprecatedNames) { - if (currentFieldName.equals(depName)) { - msg = "Deprecated field [" + currentFieldName + "] used, expected [" + name + "] instead"; + if (fieldName.equals(depName)) { + msg = "Deprecated field [" + fieldName + "] used, expected [" + name + "] instead"; if (allReplacedWith != null) { - msg = "Deprecated field [" + currentFieldName + "] used, replaced by [" + allReplacedWith + "]"; + // If the field is entirely deprecated then there is no + // preferred name so instead use the `allReplaceWith` + // message to indicate what should be used instead + msg = "Deprecated field [" + fieldName + "] used, replaced by [" + allReplacedWith + "]"; } if (strict) { throw new IllegalArgumentException(msg); @@ -100,10 +142,20 @@ public class ParseField { return getPreferredName(); } + /** + * @return the message to use if this {@link ParseField} has been entirely + * deprecated in favor of something else. This method will return + * null if the ParseField has not been completely + * deprecated. + */ public String getAllReplacedWith() { return allReplacedWith; } + /** + * @return an array of the names for the {@link ParseField} which are + * deprecated. + */ public String[] getDeprecatedNames() { return deprecatedNames; } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index c8698e8223d..fd7c8f6c49d 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -37,6 +37,8 @@ import org.elasticsearch.common.lucene.Lucene; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Objects; @@ -224,17 +226,23 @@ public class FiltersFunctionScoreQuery extends Query { filterExplanations.add(filterExplanation); } } + FiltersFunctionFactorScorer scorer = functionScorer(context); + int actualDoc = scorer.iterator().advance(doc); + assert (actualDoc == doc); + double score = scorer.computeScore(doc, expl.getValue()); + Explanation factorExplanation; if (filterExplanations.size() > 0) { - FiltersFunctionFactorScorer scorer = functionScorer(context); - int actualDoc = scorer.iterator().advance(doc); - assert (actualDoc == doc); - double score = scorer.computeScore(doc, expl.getValue()); - Explanation factorExplanation = Explanation.match( + factorExplanation = Explanation.match( CombineFunction.toFloat(score), "function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]", filterExplanations); - expl = combineFunction.explain(expl, factorExplanation, maxBoost); + + } else { + // it is a little weird to add a match although no function matches but that is the way function_score behaves right now + factorExplanation = Explanation.match(1.0f, + "No function matched", Collections.emptyList()); } + expl = combineFunction.explain(expl, factorExplanation, maxBoost); if (minScore != null && minScore > expl.getValue()) { expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl); } diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index a91f36ad307..9d3a92a65b9 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.network; import org.elasticsearch.action.support.replication.ReplicationTask; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; @@ -75,13 +74,13 @@ public class NetworkModule extends AbstractModule { /** * Creates a network module that custom networking classes can be plugged into. - * - * @param networkService A constructed network service object to bind. + * @param networkService A constructed network service object to bind. * @param settings The settings for the node * @param transportClient True if only transport classes should be allowed to be registered, false otherwise. * @param namedWriteableRegistry registry for named writeables for use during streaming */ - public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient, NamedWriteableRegistry namedWriteableRegistry) { + public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient, + NamedWriteableRegistry namedWriteableRegistry) { this.networkService = networkService; this.settings = settings; this.transportClient = transportClient; diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index ff1f3912cc5..2652f9ff646 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -33,7 +33,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import java.util.function.Function; @@ -90,18 +89,12 @@ public class NetworkService extends AbstractComponent { InetAddress[] resolveIfPossible(String value) throws IOException; } - private final List customNameResolvers = new CopyOnWriteArrayList<>(); + private final List customNameResolvers; - public NetworkService(Settings settings) { + public NetworkService(Settings settings, List customNameResolvers) { super(settings); IfConfig.logIfNecessary(); - } - - /** - * Add a custom name resolver. - */ - public void addCustomNameResolver(CustomNameResolver customNameResolver) { - customNameResolvers.add(customNameResolver); + this.customNameResolvers = customNameResolvers; } /** @@ -120,11 +113,13 @@ public class NetworkService extends AbstractComponent { // if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline bindHosts = GLOBAL_NETWORK_BINDHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); } else { - // next check any registered custom resolvers - for (CustomNameResolver customNameResolver : customNameResolvers) { - InetAddress addresses[] = customNameResolver.resolveDefault(); - if (addresses != null) { - return addresses; + // next check any registered custom resolvers if any + if (customNameResolvers != null) { + for (CustomNameResolver customNameResolver : customNameResolvers) { + InetAddress addresses[] = customNameResolver.resolveDefault(); + if (addresses != null) { + return addresses; + } } } // we know it's not here. get the defaults @@ -166,11 +161,13 @@ public class NetworkService extends AbstractComponent { // if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline publishHosts = GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); } else { - // next check any registered custom resolvers - for (CustomNameResolver customNameResolver : customNameResolvers) { - InetAddress addresses[] = customNameResolver.resolveDefault(); - if (addresses != null) { - return addresses[0]; + // next check any registered custom resolvers if any + if (customNameResolvers != null) { + for (CustomNameResolver customNameResolver : customNameResolvers) { + InetAddress addresses[] = customNameResolver.resolveDefault(); + if (addresses != null) { + return addresses[0]; + } } } // we know it's not here. get the defaults @@ -229,11 +226,13 @@ public class NetworkService extends AbstractComponent { private InetAddress[] resolveInternal(String host) throws IOException { if ((host.startsWith("#") && host.endsWith("#")) || (host.startsWith("_") && host.endsWith("_"))) { host = host.substring(1, host.length() - 1); - // allow custom resolvers to have special names - for (CustomNameResolver customNameResolver : customNameResolvers) { - InetAddress addresses[] = customNameResolver.resolveIfPossible(host); - if (addresses != null) { - return addresses; + // next check any registered custom resolvers if any + if (customNameResolvers != null) { + for (CustomNameResolver customNameResolver : customNameResolvers) { + InetAddress addresses[] = customNameResolver.resolveIfPossible(host); + if (addresses != null) { + return addresses; + } } } switch (host) { diff --git a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java index db8299cdc9a..ed67019c103 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java @@ -326,7 +326,10 @@ public class TimeValue implements Writeable { return new TimeValue(parse(sValue, normalized, 2), TimeUnit.MILLISECONDS); } else if (normalized.endsWith("s")) { return new TimeValue(parse(sValue, normalized, 1), TimeUnit.SECONDS); - } else if (normalized.endsWith("m")) { + } else if (sValue.endsWith("m")) { + // parsing minutes should be case sensitive as `M` is generally + // accepted to mean months not minutes. This is the only case where + // the upper and lower case forms indicate different time units return new TimeValue(parse(sValue, normalized, 1), TimeUnit.MINUTES); } else if (normalized.endsWith("h")) { return new TimeValue(parse(sValue, normalized, 1), TimeUnit.HOURS); diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index a57492750ad..56bcdcf9c8d 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -92,6 +92,7 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.AnalysisPlugin; +import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; @@ -294,7 +295,8 @@ public class Node implements Closeable { // so we might be late here already final SettingsModule settingsModule = new SettingsModule(this.settings, additionalSettings, additionalSettingsFilter); resourcesToClose.add(resourceWatcherService); - final NetworkService networkService = new NetworkService(settings); + final NetworkService networkService = new NetworkService(settings, + getCustomNameResolvers(pluginsService.filterPlugins(DiscoveryPlugin.class))); final ClusterService clusterService = new ClusterService(settings, settingsModule.getClusterSettings(), threadPool); clusterService.add(scriptModule.getScriptService()); resourcesToClose.add(clusterService); @@ -721,4 +723,19 @@ public class Node implements Closeable { BigArrays createBigArrays(Settings settings, CircuitBreakerService circuitBreakerService) { return new BigArrays(settings, circuitBreakerService); } + + /** + * Get Custom Name Resolvers list based on a Discovery Plugins list + * @param discoveryPlugins Discovery plugins list + */ + private List getCustomNameResolvers(List discoveryPlugins) { + List customNameResolvers = new ArrayList<>(); + for (DiscoveryPlugin discoveryPlugin : discoveryPlugins) { + NetworkService.CustomNameResolver customNameResolver = discoveryPlugin.getCustomNameResolver(settings); + if (customNameResolver != null) { + customNameResolvers.add(customNameResolver); + } + } + return customNameResolvers; + } } diff --git a/core/src/main/java/org/elasticsearch/plugins/DiscoveryPlugin.java b/core/src/main/java/org/elasticsearch/plugins/DiscoveryPlugin.java new file mode 100644 index 00000000000..f6174c08d12 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/plugins/DiscoveryPlugin.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; + +/** + * An additional extension point for {@link Plugin}s that extends Elasticsearch's discovery functionality. To add an additional + * {@link NetworkService.CustomNameResolver} just implement the interface and implement the {@link #getCustomNameResolver(Settings)} method: + * + *
{@code
+ * public class MyDiscoveryPlugin extends Plugin implements DiscoveryPlugin {
+ *     @Override
+ *     public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) {
+ *         return new YourCustomNameResolverInstance(settings);
+ *     }
+ * }
+ * }
+ */ +public interface DiscoveryPlugin { + /** + * Override to add additional {@link NetworkService.CustomNameResolver}s. + * This can be handy if you want to provide your own Network interface name like _mycard_ + * and implement by yourself the logic to get an actual IP address/hostname based on this + * name. + * + * For example: you could call a third party service (an API) to resolve _mycard_. + * Then you could define in elasticsearch.yml settings like: + * + *
{@code
+     * network.host: _mycard_
+     * }
+ */ + default NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) { + return null; + } +} diff --git a/core/src/main/java/org/elasticsearch/rest/RestStatus.java b/core/src/main/java/org/elasticsearch/rest/RestStatus.java index d78b9c50e0c..d72eb2d11f4 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestStatus.java +++ b/core/src/main/java/org/elasticsearch/rest/RestStatus.java @@ -24,6 +24,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.unmodifiableMap; public enum RestStatus { /** @@ -477,6 +481,15 @@ public enum RestStatus { */ INSUFFICIENT_STORAGE(506); + private static final Map CODE_TO_STATUS; + static { + RestStatus[] values = values(); + Map codeToStatus = new HashMap<>(values.length); + for (RestStatus value : values) { + codeToStatus.put(value.status, value); + } + CODE_TO_STATUS = unmodifiableMap(codeToStatus); + } private int status; @@ -515,4 +528,11 @@ public enum RestStatus { } return status; } + + /** + * Turn a status code into a {@link RestStatus}, returning null if we don't know that status. + */ + public static RestStatus fromCode(int code) { + return CODE_TO_STATUS.get(code); + } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestStatusToXContentListener.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestStatusToXContentListener.java index 3e4677ab446..f6c4f67bd81 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestStatusToXContentListener.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestStatusToXContentListener.java @@ -37,8 +37,10 @@ public class RestStatusToXContentListener ext * Build an instance that doesn't support responses with the status {@code 201 CREATED}. */ public RestStatusToXContentListener(RestChannel channel) { - // TODO switch this to throwing an exception? - this(channel, r -> null); + this(channel, r -> { + assert false: "Returned a 201 CREATED but not set up to support a Location header"; + return null; + }); } /** diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index ef952f9f54b..a1aff430f26 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -487,7 +487,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust } } - private class ScriptChangesListener extends FileChangesListener { + private class ScriptChangesListener implements FileChangesListener { private Tuple getScriptNameExt(Path file) { Path scriptPath = scriptsDirectory.relativize(file); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketUtils.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketUtils.java index c1a0fee890c..d145e32c45b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketUtils.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketUtils.java @@ -21,7 +21,9 @@ package org.elasticsearch.search.aggregations.bucket; /** * Helper functions for common Bucketing functions */ -public class BucketUtils { +public final class BucketUtils { + + private BucketUtils() {} /** * Heuristic used to determine the size of shard-side PriorityQueues when @@ -34,16 +36,22 @@ public class BucketUtils { * @return A suggested default for the size of any shard-side PriorityQueues */ public static int suggestShardSideQueueSize(int finalSize, int numberOfShards) { - assert numberOfShards >= 1; + if (finalSize < 1) { + throw new IllegalArgumentException("size must be positive, got " + finalSize); + } + if (numberOfShards < 1) { + throw new IllegalArgumentException("number of shards must be positive, got " + numberOfShards); + } + if (numberOfShards == 1) { + // In the case of a single shard, we do not need to over-request return finalSize; } - //Cap the multiplier used for shards to avoid excessive data transfer - final long shardSampleSize = (long) finalSize * Math.min(10, numberOfShards); - // When finalSize is very small e.g. 1 and there is a low number of - // shards then we need to ensure we still gather a reasonable sample of statistics from each - // shard (at low cost) to improve the chances of the final result being accurate. - return (int) Math.min(Integer.MAX_VALUE, Math.max(10, shardSampleSize)); + + // Request 50% more buckets on the shards in order to improve accuracy + // as well as a small constant that should help with small values of 'size' + final long shardSampleSize = (long) (finalSize * 1.5 + 10); + return (int) Math.min(Integer.MAX_VALUE, shardSampleSize); } } diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 1fc66cf45dd..d31c3d371d6 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -91,7 +91,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ public static final ParseField SORT_FIELD = new ParseField("sort"); public static final ParseField TRACK_SCORES_FIELD = new ParseField("track_scores"); public static final ParseField INDICES_BOOST_FIELD = new ParseField("indices_boost"); - public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations", "aggs"); + public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations"); + public static final ParseField AGGS_FIELD = new ParseField("aggs"); public static final ParseField HIGHLIGHT_FIELD = new ParseField("highlight"); public static final ParseField SUGGEST_FIELD = new ParseField("suggest"); public static final ParseField RESCORE_FIELD = new ParseField("rescore"); @@ -998,7 +999,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ scriptFields.add(new ScriptField(context)); } } else if (context.getParseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) { - indexBoost = new ObjectFloatHashMap(); + indexBoost = new ObjectFloatHashMap<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -1009,7 +1010,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ parser.getTokenLocation()); } } - } else if (context.getParseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD)) { + } else if (context.getParseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD) + || context.getParseFieldMatcher().match(currentFieldName, AGGS_FIELD)) { aggregations = aggParsers.parseAggregators(context); } else if (context.getParseFieldMatcher().match(currentFieldName, HIGHLIGHT_FIELD)) { highlightBuilder = HighlightBuilder.fromXContent(context); diff --git a/core/src/main/java/org/elasticsearch/watcher/FileChangesListener.java b/core/src/main/java/org/elasticsearch/watcher/FileChangesListener.java index 6a6d61ea091..22e8637053f 100644 --- a/core/src/main/java/org/elasticsearch/watcher/FileChangesListener.java +++ b/core/src/main/java/org/elasticsearch/watcher/FileChangesListener.java @@ -23,53 +23,39 @@ import java.nio.file.Path; /** * Callback interface that file changes File Watcher is using to notify listeners about changes. */ -public class FileChangesListener { +public interface FileChangesListener { /** * Called for every file found in the watched directory during initialization */ - public void onFileInit(Path file) { - - } + default void onFileInit(Path file) {} /** * Called for every subdirectory found in the watched directory during initialization */ - public void onDirectoryInit(Path file) { - - } + default void onDirectoryInit(Path file) {} /** * Called for every new file found in the watched directory */ - public void onFileCreated(Path file) { - - } + default void onFileCreated(Path file) {} /** * Called for every file that disappeared in the watched directory */ - public void onFileDeleted(Path file) { - - } + default void onFileDeleted(Path file) {} /** * Called for every file that was changed in the watched directory */ - public void onFileChanged(Path file) { - - } + default void onFileChanged(Path file) {} /** * Called for every new subdirectory found in the watched directory */ - public void onDirectoryCreated(Path file) { - - } + default void onDirectoryCreated(Path file) {} /** * Called for every file that disappeared in the watched directory */ - public void onDirectoryDeleted(Path file) { - - } + default void onDirectoryDeleted(Path file) {} } diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index a7dbb145e40..b9f3d443089 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -792,6 +792,7 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(142, ShardStateAction.NoLongerPrimaryShardException.class); ids.put(143, org.elasticsearch.script.ScriptException.class); ids.put(144, org.elasticsearch.cluster.NotMasterException.class); + ids.put(145, org.elasticsearch.ElasticsearchStatusException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { @@ -842,4 +843,11 @@ public class ExceptionSerializationTests extends ESTestCase { } } } + + public void testElasticsearchRemoteException() throws IOException { + ElasticsearchStatusException ex = new ElasticsearchStatusException("something", RestStatus.TOO_MANY_REQUESTS); + ElasticsearchStatusException e = serialize(ex); + assertEquals(ex.status(), e.status()); + assertEquals(RestStatus.TOO_MANY_REQUESTS, e.status()); + } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index 4f553dfb88a..c2f9938861b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -45,12 +45,12 @@ import org.elasticsearch.test.rest.FakeRestRequest; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Supplier; -import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableList; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index b45383c47e4..6566eb96db8 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -39,14 +39,14 @@ public class RolloverIT extends ESIntegTestCase { assertAcked(prepareCreate("test_index-1").addAlias(new Alias("test_alias")).get()); final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get(); assertThat(response.getOldIndex(), equalTo("test_index-1")); - assertThat(response.getNewIndex(), equalTo("test_index-2")); + assertThat(response.getNewIndex(), equalTo("test_index-000002")); assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.getConditionStatus().size(), equalTo(0)); final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-1"); assertFalse(oldIndex.getAliases().containsKey("test_alias")); - final IndexMetaData newIndex = state.metaData().index("test_index-2"); + final IndexMetaData newIndex = state.metaData().index("test_index-000002"); assertTrue(newIndex.getAliases().containsKey("test_alias")); } @@ -56,14 +56,14 @@ public class RolloverIT extends ESIntegTestCase { flush("test_index-2"); final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get(); assertThat(response.getOldIndex(), equalTo("test_index-2")); - assertThat(response.getNewIndex(), equalTo("test_index-3")); + assertThat(response.getNewIndex(), equalTo("test_index-000003")); assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.getConditionStatus().size(), equalTo(0)); final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-2"); assertFalse(oldIndex.getAliases().containsKey("test_alias")); - final IndexMetaData newIndex = state.metaData().index("test_index-3"); + final IndexMetaData newIndex = state.metaData().index("test_index-000003"); assertTrue(newIndex.getAliases().containsKey("test_alias")); } @@ -78,14 +78,14 @@ public class RolloverIT extends ESIntegTestCase { final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias") .settings(settings).alias(new Alias("extra_alias")).get(); assertThat(response.getOldIndex(), equalTo("test_index-2")); - assertThat(response.getNewIndex(), equalTo("test_index-3")); + assertThat(response.getNewIndex(), equalTo("test_index-000003")); assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.getConditionStatus().size(), equalTo(0)); final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-2"); assertFalse(oldIndex.getAliases().containsKey("test_alias")); - final IndexMetaData newIndex = state.metaData().index("test_index-3"); + final IndexMetaData newIndex = state.metaData().index("test_index-000003"); assertThat(newIndex.getNumberOfShards(), equalTo(1)); assertThat(newIndex.getNumberOfReplicas(), equalTo(0)); assertTrue(newIndex.getAliases().containsKey("test_alias")); @@ -98,14 +98,14 @@ public class RolloverIT extends ESIntegTestCase { flush("test_index-1"); final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").dryRun(true).get(); assertThat(response.getOldIndex(), equalTo("test_index-1")); - assertThat(response.getNewIndex(), equalTo("test_index-2")); + assertThat(response.getNewIndex(), equalTo("test_index-000002")); assertThat(response.isDryRun(), equalTo(true)); assertThat(response.isRolledOver(), equalTo(false)); assertThat(response.getConditionStatus().size(), equalTo(0)); final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-1"); assertTrue(oldIndex.getAliases().containsKey("test_alias")); - final IndexMetaData newIndex = state.metaData().index("test_index-2"); + final IndexMetaData newIndex = state.metaData().index("test_index-000002"); assertNull(newIndex); } @@ -126,7 +126,7 @@ public class RolloverIT extends ESIntegTestCase { final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-0"); assertTrue(oldIndex.getAliases().containsKey("test_alias")); - final IndexMetaData newIndex = state.metaData().index("test_index-1"); + final IndexMetaData newIndex = state.metaData().index("test_index-000001"); assertNull(newIndex); } @@ -151,14 +151,14 @@ public class RolloverIT extends ESIntegTestCase { public void testRolloverOnExistingIndex() throws Exception { assertAcked(prepareCreate("test_index-0").addAlias(new Alias("test_alias")).get()); index("test_index-0", "type1", "1", "field", "value"); - assertAcked(prepareCreate("test_index-1").get()); - index("test_index-1", "type1", "1", "field", "value"); - flush("test_index-0", "test_index-1"); + assertAcked(prepareCreate("test_index-000001").get()); + index("test_index-000001", "type1", "1", "field", "value"); + flush("test_index-0", "test_index-000001"); try { client().admin().indices().prepareRolloverIndex("test_alias").get(); fail("expected failure due to existing rollover index"); } catch (IndexAlreadyExistsException e) { - assertThat(e.getIndex().getName(), equalTo("test_index-1")); + assertThat(e.getIndex().getName(), equalTo("test_index-000001")); } } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 8a4a62f9728..0feedd1a5dc 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.test.ESTestCase; import java.util.HashSet; +import java.util.Locale; import java.util.Set; import static org.elasticsearch.action.admin.indices.rollover.TransportRolloverAction.evaluateConditions; @@ -158,9 +159,9 @@ public class TransportRolloverActionTests extends ESTestCase { final String indexPrefix = randomAsciiOfLength(10); String indexEndingInNumbers = indexPrefix + "-" + num; assertThat(TransportRolloverAction.generateRolloverIndexName(indexEndingInNumbers), - equalTo(indexPrefix + "-" + (num + 1))); - assertThat(TransportRolloverAction.generateRolloverIndexName("index-name-1"), equalTo("index-name-2")); - assertThat(TransportRolloverAction.generateRolloverIndexName("index-name-2"), equalTo("index-name-3")); + equalTo(indexPrefix + "-" + String.format(Locale.ROOT, "%06d", num + 1))); + assertThat(TransportRolloverAction.generateRolloverIndexName("index-name-1"), equalTo("index-name-000002")); + assertThat(TransportRolloverAction.generateRolloverIndexName("index-name-2"), equalTo("index-name-000003")); } public void testCreateIndexRequest() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java new file mode 100644 index 00000000000..2c2506308ac --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.ingest; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import static org.elasticsearch.ingest.IngestDocumentTests.assertIngestDocument; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.nullValue; + +public class SimulatePipelineRequestTests extends ESTestCase { + + public void testSerialization() throws IOException { + SimulatePipelineRequest request = new SimulatePipelineRequest(new BytesArray("")); + // Sometimes we set an id + if (randomBoolean()) { + request.setId(randomAsciiOfLengthBetween(1, 10)); + } + + // Sometimes we explicitly set a boolean (with whatever value) + if (randomBoolean()) { + request.setVerbose(randomBoolean()); + } + + BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + SimulatePipelineRequest otherRequest = new SimulatePipelineRequest(); + otherRequest.readFrom(streamInput); + + assertThat(otherRequest.getId(), equalTo(request.getId())); + assertThat(otherRequest.isVerbose(), equalTo(request.isVerbose())); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index 576e8e01724..485dc8934c4 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -39,6 +39,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { public void testSerialization() throws IOException { boolean isVerbose = randomBoolean(); + String id = randomBoolean() ? randomAsciiOfLengthBetween(1, 10) : null; int numResults = randomIntBetween(1, 10); List results = new ArrayList<>(numResults); for (int i = 0; i < numResults; i++) { @@ -70,7 +71,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { } } - SimulatePipelineResponse response = new SimulatePipelineResponse(randomAsciiOfLengthBetween(1, 10), isVerbose, results); + SimulatePipelineResponse response = new SimulatePipelineResponse(id, isVerbose, results); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); StreamInput streamInput = out.bytes().streamInput(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index f95fb687c76..9624e32dfce 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -51,8 +51,11 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; +import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.test.ESAllocationTestCase; +import java.util.Collections; + import static java.util.Collections.singleton; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; @@ -481,7 +484,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { parser.nextToken(); parser.nextToken(); AllocationCommandRegistry registry = new NetworkModule(null, Settings.EMPTY, true, new NamedWriteableRegistry()) - .getAllocationCommandRegistry(); + .getAllocationCommandRegistry(); AllocationCommands sCommands = AllocationCommands.fromXContent(parser, ParseFieldMatcher.STRICT, registry); assertThat(sCommands.commands().size(), equalTo(5)); diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index f5a5928c980..9abcaa0ce3e 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.Collections; public class NetworkModuleTests extends ModuleTestCase { @@ -112,13 +113,14 @@ public class NetworkModuleTests extends ModuleTestCase { .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(NetworkModule.TRANSPORT_TYPE_KEY, "local") .build(); - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); + NetworkModule module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false, + new NamedWriteableRegistry()); module.registerTransportService("custom", FakeTransportService.class); assertBinding(module, TransportService.class, FakeTransportService.class); assertFalse(module.isTransportClient()); // check it works with transport only as well - module = new NetworkModule(new NetworkService(settings), settings, true, new NamedWriteableRegistry()); + module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, true, new NamedWriteableRegistry()); module.registerTransportService("custom", FakeTransportService.class); assertBinding(module, TransportService.class, FakeTransportService.class); assertTrue(module.isTransportClient()); @@ -128,13 +130,14 @@ public class NetworkModuleTests extends ModuleTestCase { Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom") .put(NetworkModule.HTTP_ENABLED.getKey(), false) .build(); - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); + NetworkModule module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false, + new NamedWriteableRegistry()); module.registerTransport("custom", FakeTransport.class); assertBinding(module, Transport.class, FakeTransport.class); assertFalse(module.isTransportClient()); // check it works with transport only as well - module = new NetworkModule(new NetworkService(settings), settings, true, new NamedWriteableRegistry()); + module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, true, new NamedWriteableRegistry()); module.registerTransport("custom", FakeTransport.class); assertBinding(module, Transport.class, FakeTransport.class); assertTrue(module.isTransportClient()); @@ -144,13 +147,14 @@ public class NetworkModuleTests extends ModuleTestCase { Settings settings = Settings.builder() .put(NetworkModule.HTTP_TYPE_SETTING.getKey(), "custom") .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); + NetworkModule module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false, + new NamedWriteableRegistry()); module.registerHttpTransport("custom", FakeHttpTransport.class); assertBinding(module, HttpServerTransport.class, FakeHttpTransport.class); assertFalse(module.isTransportClient()); // check registration not allowed for transport only - module = new NetworkModule(new NetworkService(settings), settings, true, new NamedWriteableRegistry()); + module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, true, new NamedWriteableRegistry()); assertTrue(module.isTransportClient()); try { module.registerHttpTransport("custom", FakeHttpTransport.class); @@ -163,7 +167,7 @@ public class NetworkModuleTests extends ModuleTestCase { // not added if http is disabled settings = Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); - module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); + module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false, new NamedWriteableRegistry()); assertNotBound(module, HttpServerTransport.class); assertFalse(module.isTransportClient()); } @@ -171,7 +175,7 @@ public class NetworkModuleTests extends ModuleTestCase { public void testRegisterTaskStatus() { NamedWriteableRegistry registry = new NamedWriteableRegistry(); Settings settings = Settings.EMPTY; - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, registry); + NetworkModule module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false, registry); assertFalse(module.isTransportClient()); // Builtin reader comes back diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java index 7ec4756d784..096d3b0a9a9 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import java.net.InetAddress; +import java.util.Collections; import static org.hamcrest.Matchers.is; @@ -36,7 +37,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure exception if we bind to multicast ipv4 address */ public void testBindMulticastV4() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); try { service.resolveBindHostAddresses(new String[] { "239.1.1.1" }); fail("should have hit exception"); @@ -48,7 +49,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure exception if we bind to multicast ipv6 address */ public void testBindMulticastV6() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); try { service.resolveBindHostAddresses(new String[] { "FF08::108" }); fail("should have hit exception"); @@ -61,7 +62,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure exception if we publish to multicast ipv4 address */ public void testPublishMulticastV4() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); try { service.resolvePublishHostAddresses(new String[] { "239.1.1.1" }); fail("should have hit exception"); @@ -74,7 +75,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure exception if we publish to multicast ipv6 address */ public void testPublishMulticastV6() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); try { service.resolvePublishHostAddresses(new String[] { "FF08::108" }); fail("should have hit exception"); @@ -87,7 +88,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure specifying wildcard ipv4 address will bind to all interfaces */ public void testBindAnyLocalV4() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); assertEquals(InetAddress.getByName("0.0.0.0"), service.resolveBindHostAddresses(new String[] { "0.0.0.0" })[0]); } @@ -95,7 +96,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure specifying wildcard ipv6 address will bind to all interfaces */ public void testBindAnyLocalV6() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); assertEquals(InetAddress.getByName("::"), service.resolveBindHostAddresses(new String[] { "::" })[0]); } @@ -103,7 +104,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure specifying wildcard ipv4 address selects reasonable publish address */ public void testPublishAnyLocalV4() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); InetAddress address = service.resolvePublishHostAddresses(new String[] { "0.0.0.0" }); assertFalse(address.isAnyLocalAddress()); } @@ -112,7 +113,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure specifying wildcard ipv6 address selects reasonable publish address */ public void testPublishAnyLocalV6() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); InetAddress address = service.resolvePublishHostAddresses(new String[] { "::" }); assertFalse(address.isAnyLocalAddress()); } @@ -121,7 +122,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure we can bind to multiple addresses */ public void testBindMultipleAddresses() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); InetAddress[] addresses = service.resolveBindHostAddresses(new String[]{"127.0.0.1", "127.0.0.2"}); assertThat(addresses.length, is(2)); } @@ -130,7 +131,7 @@ public class NetworkServiceTests extends ESTestCase { * ensure we can't bind to multiple addresses when using wildcard */ public void testBindMultipleAddressesWithWildcard() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); + NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList()); try { service.resolveBindHostAddresses(new String[]{"0.0.0.0", "127.0.0.1"}); fail("should have hit exception"); diff --git a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java index 003d78ce42e..4d0ac5257a3 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java @@ -92,10 +92,6 @@ public class TimeValueTests extends ESTestCase { TimeValue.parseTimeValue("10 m", null, "test")); assertEquals(new TimeValue(10, TimeUnit.MINUTES), TimeValue.parseTimeValue("10m", null, "test")); - assertEquals(new TimeValue(10, TimeUnit.MINUTES), - TimeValue.parseTimeValue("10 M", null, "test")); - assertEquals(new TimeValue(10, TimeUnit.MINUTES), - TimeValue.parseTimeValue("10M", null, "test")); assertEquals(new TimeValue(10, TimeUnit.HOURS), TimeValue.parseTimeValue("10 h", null, "test")); @@ -115,6 +111,17 @@ public class TimeValueTests extends ESTestCase { assertEquals(new TimeValue(10, TimeUnit.DAYS), TimeValue.parseTimeValue("10D", null, "test")); + // Time values of months should throw an exception as months are not + // supported. Note that this is the only unit that is not case sensitive + // as `m` is the only character that is overloaded in terms of which + // time unit is expected between the upper and lower case versions + expectThrows(ElasticsearchParseException.class, () -> { + TimeValue.parseTimeValue("10 M", null, "test"); + }); + expectThrows(ElasticsearchParseException.class, () -> { + TimeValue.parseTimeValue("10M", null, "test"); + }); + final int length = randomIntBetween(0, 8); final String zeros = new String(new char[length]).replace('\0', '0'); assertTrue(TimeValue.parseTimeValue("-" + zeros + "1", null, "test") == TimeValue.MINUS_ONE); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java index 6696174c08f..884ac1aa98f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java @@ -45,6 +45,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; import java.net.InetSocketAddress; +import java.util.Collections; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; @@ -66,7 +67,7 @@ public class UnicastZenPingIT extends ESTestCase { .put(TransportSettings.PORT.getKey(), startPort + "-" + endPort).build(); ThreadPool threadPool = new TestThreadPool(getClass().getName()); - NetworkService networkService = new NetworkService(settings); + NetworkService networkService = new NetworkService(settings, Collections.emptyList()); ElectMasterService electMasterService = new ElectMasterService(settings); NetworkHandle handleA = startServices(settings, threadPool, networkService, "UZP_A", Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index ba80aca31e9..652c4b4aef3 100644 --- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -599,7 +599,7 @@ public class FunctionScoreTests extends ESTestCase { Explanation ffsqExpl = searcher.explain(ffsq, 0); assertTrue(ffsqExpl.isMatch()); assertEquals(queryExpl.getValue(), ffsqExpl.getValue(), 0f); - assertEquals(queryExpl.getDescription(), ffsqExpl.getDescription()); + assertEquals(queryExpl.getDescription(), ffsqExpl.getDetails()[0].getDescription()); ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 10f, CombineFunction.MULTIPLY); @@ -726,6 +726,31 @@ public class FunctionScoreTests extends ESTestCase { } } + public void testExplanationAndScoreEqualsEvenIfNoFunctionMatches() throws IOException { + IndexSearcher localSearcher = newSearcher(reader); + ScoreMode scoreMode = randomFrom(new + ScoreMode[]{ScoreMode.SUM, ScoreMode.AVG, ScoreMode.FIRST, ScoreMode.MIN, ScoreMode.MAX, ScoreMode.MULTIPLY}); + CombineFunction combineFunction = randomFrom(new + CombineFunction[]{CombineFunction.SUM, CombineFunction.AVG, CombineFunction.MIN, CombineFunction.MAX, + CombineFunction.MULTIPLY, CombineFunction.REPLACE}); + + // check for document that has no macthing function + FiltersFunctionScoreQuery query = new FiltersFunctionScoreQuery(new TermQuery(new Term(FIELD, "out")), scoreMode, + new FilterFunction[]{new FilterFunction(new TermQuery(new Term("_uid", "2")), new WeightFactorFunction(10))}, + Float.MAX_VALUE, Float.NEGATIVE_INFINITY, combineFunction); + TopDocs searchResult = localSearcher.search(query, 1); + Explanation explanation = localSearcher.explain(query, searchResult.scoreDocs[0].doc); + assertThat(searchResult.scoreDocs[0].score, equalTo(explanation.getValue())); + + // check for document that has a matching function + query = new FiltersFunctionScoreQuery(new TermQuery(new Term(FIELD, "out")), scoreMode, + new FilterFunction[]{new FilterFunction(new TermQuery(new Term("_uid", "1")), new WeightFactorFunction(10))}, + Float.MAX_VALUE, Float.NEGATIVE_INFINITY, combineFunction); + searchResult = localSearcher.search(query, 1); + explanation = localSearcher.explain(query, searchResult.scoreDocs[0].doc); + assertThat(searchResult.scoreDocs[0].score, equalTo(explanation.getValue())); + } + private static class DummyScoreFunction extends ScoreFunction { protected DummyScoreFunction(CombineFunction scoreCombiner) { super(scoreCombiner); diff --git a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java index fa8a87cf382..7d5b1b4ed59 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java @@ -153,7 +153,8 @@ public class TruncateTranslogIT extends ESIntegTestCase { Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { // Great, do nothing, we just wanted to obtain the lock } catch (LockObtainFailedException lofe) { - throw new ElasticsearchException("Still waiting for lock release at [" + idxLocation + "]"); + logger.info("--> failed acquiring lock for {}", idxLocation); + fail("still waiting for lock release at [" + idxLocation + "]"); } catch (IOException ioe) { fail("Got an IOException: " + ioe); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java new file mode 100644 index 00000000000..aa9068b651e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java @@ -0,0 +1,183 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +public class BucketUtilsTests extends ESTestCase { + + public void testBadInput() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> BucketUtils.suggestShardSideQueueSize(0, 10)); + assertEquals(e.getMessage(), "size must be positive, got 0"); + + e = expectThrows(IllegalArgumentException.class, + () -> BucketUtils.suggestShardSideQueueSize(10, 0)); + assertEquals(e.getMessage(), "number of shards must be positive, got 0"); + } + + public void testOptimizesSingleShard() { + for (int iter = 0; iter < 10; ++iter) { + final int size = randomIntBetween(1, Integer.MAX_VALUE); + assertEquals(size, BucketUtils.suggestShardSideQueueSize( size, 1)); + } + } + + public void testOverFlow() { + for (int iter = 0; iter < 10; ++iter) { + final int size = Integer.MAX_VALUE - randomInt(10); + final int numberOfShards = randomIntBetween(1, 10); + final int shardSize = BucketUtils.suggestShardSideQueueSize( size, numberOfShards); + assertThat(shardSize, greaterThanOrEqualTo(shardSize)); + } + } + + public void testShardSizeIsGreaterThanGlobalSize() { + for (int iter = 0; iter < 10; ++iter) { + final int size = randomIntBetween(1, Integer.MAX_VALUE); + final int numberOfShards = randomIntBetween(1, 10); + final int shardSize = BucketUtils.suggestShardSideQueueSize( size, numberOfShards); + assertThat(shardSize, greaterThanOrEqualTo(size)); + } + } + + /*// You may use the code below to evaluate the impact of the BucketUtils.suggestShardSideQueueSize + // heuristic + public static void main(String[] args) { + final int numberOfUniqueTerms = 10000; + final int totalNumberOfTerms = 1000000; + final int numberOfShards = 10; + final double skew = 2; // parameter of the zipf distribution + final int size = 100; + + double totalWeight = 0; + for (int rank = 1; rank <= numberOfUniqueTerms; ++rank) { + totalWeight += weight(rank, skew); + } + + int[] terms = new int[totalNumberOfTerms]; + int len = 0; + + final int[] actualTopFreqs = new int[size]; + for (int rank = 1; len < totalNumberOfTerms; ++rank) { + int freq = (int) (weight(rank, skew) / totalWeight * totalNumberOfTerms); + freq = Math.max(freq, 1); + Arrays.fill(terms, len, Math.min(len + freq, totalNumberOfTerms), rank - 1); + len += freq; + if (rank <= size) { + actualTopFreqs[rank-1] = freq; + } + } + + final int maxTerm = terms[terms.length - 1] + 1; + + // shuffle terms + Random r = new Random(0); + for (int i = terms.length - 1; i > 0; --i) { + final int swapWith = r.nextInt(i); + int tmp = terms[i]; + terms[i] = terms[swapWith]; + terms[swapWith] = tmp; + } + // distribute into shards like routing would + int[][] shards = new int[numberOfShards][]; + int upTo = 0; + for (int i = 0; i < numberOfShards; ++i) { + shards[i] = Arrays.copyOfRange(terms, upTo, upTo + (terms.length - upTo) / (numberOfShards - i)); + upTo += shards[i].length; + } + + final int[][] topShards = new int[numberOfShards][]; + final int shardSize = BucketUtils.suggestShardSideQueueSize(size, numberOfShards); + for (int shard = 0; shard < numberOfShards; ++shard) { + final int[] data = shards[shard]; + final int[] freqs = new int[maxTerm]; + for (int d : data) { + freqs[d]++; + } + int[] termIds = new int[maxTerm]; + for (int i = 0; i < maxTerm; ++i) { + termIds[i] = i; + } + new InPlaceMergeSorter() { + + @Override + protected void swap(int i, int j) { + int tmp = termIds[i]; + termIds[i] = termIds[j]; + termIds[j] = tmp; + tmp = freqs[i]; + freqs[i] = freqs[j]; + freqs[j] = tmp; + } + + @Override + protected int compare(int i, int j) { + return freqs[j] - freqs[i]; + } + }.sort(0, maxTerm); + + Arrays.fill(freqs, shardSize, freqs.length, 0); + new InPlaceMergeSorter() { + + @Override + protected void swap(int i, int j) { + int tmp = termIds[i]; + termIds[i] = termIds[j]; + termIds[j] = tmp; + tmp = freqs[i]; + freqs[i] = freqs[j]; + freqs[j] = tmp; + } + + @Override + protected int compare(int i, int j) { + return termIds[i] - termIds[j]; + } + }.sort(0, maxTerm); + + topShards[shard] = freqs; + } + + final int[] computedTopFreqs = new int[size]; + for (int[] freqs : topShards) { + for (int i = 0; i < size; ++i) { + computedTopFreqs[i] += freqs[i]; + } + } + int numErrors = 0; + int totalFreq = 0; + for (int i = 0; i < size; ++i) { + numErrors += Math.abs(computedTopFreqs[i] - actualTopFreqs[i]); + totalFreq += actualTopFreqs[i]; + } + System.out.println("Number of unique terms: " + maxTerm); + System.out.println("Global freqs of top terms: " + Arrays.toString(actualTopFreqs)); + System.out.println("Computed freqs of top terms: " + Arrays.toString(computedTopFreqs)); + System.out.println("Number of errors: " + numErrors + "/" + totalFreq); + } + + private static double weight(int rank, double skew) { + return 1d / Math.pow(rank, skew); + }*/ + +} diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 19a9b1c65f3..273d19a2659 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -545,6 +545,41 @@ public class SearchSourceBuilderTests extends ESTestCase { } } + public void testAggsParsing() throws IOException { + { + String restContent = "{\n" + " " + + "\"aggs\": {" + + " \"test_agg\": {\n" + + " " + "\"terms\" : {\n" + + " \"field\": \"foo\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), aggParsers, + suggesters); + assertEquals(1, searchSourceBuilder.aggregations().count()); + } + } + { + String restContent = "{\n" + + " \"aggregations\": {" + + " \"test_agg\": {\n" + + " \"terms\" : {\n" + + " \"field\": \"foo\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), aggParsers, + suggesters); + assertEquals(1, searchSourceBuilder.aggregations().count()); + } + } + } + /** * test that we can parse the `rescore` element either as single object or as array */ diff --git a/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index 39ab7949a65..a9bf3f0f01d 100644 --- a/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -36,6 +36,7 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; @@ -63,7 +64,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { BigArrays.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService(), new NamedWriteableRegistry(), - new NetworkService(settings)); + new NetworkService(settings, Collections.emptyList())); TransportService transportService = new MockTransportService(settings, transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); diff --git a/core/src/test/java/org/elasticsearch/watcher/FileWatcherTests.java b/core/src/test/java/org/elasticsearch/watcher/FileWatcherTests.java index 5b5a16c6043..a13bf2e122d 100644 --- a/core/src/test/java/org/elasticsearch/watcher/FileWatcherTests.java +++ b/core/src/test/java/org/elasticsearch/watcher/FileWatcherTests.java @@ -37,7 +37,7 @@ import static org.hamcrest.Matchers.hasSize; @LuceneTestCase.SuppressFileSystems("ExtrasFS") public class FileWatcherTests extends ESTestCase { - private class RecordingChangeListener extends FileChangesListener { + private class RecordingChangeListener implements FileChangesListener { private Path rootDir; private RecordingChangeListener(Path rootDir) { diff --git a/docs/java-rest/configuration.asciidoc b/docs/java-rest/configuration.asciidoc new file mode 100644 index 00000000000..b3546c0f75b --- /dev/null +++ b/docs/java-rest/configuration.asciidoc @@ -0,0 +1,113 @@ +== Common configuration + +The `RestClientBuilder` supports providing both a `RequestConfigCallback` and +an `HttpClientConfigCallback` which allow for any customization that the Apache +Async Http Client exposes. Those callbacks make it possible to modify some +specific behaviour of the client without overriding every other default +configuration that the `RestClient` is initialized with. This section +describes some common scenarios that require additional configuration for the +low-level Java REST Client. + +=== Timeouts + +Configuring requests timeouts can be done by providing an instance of +`RequestConfigCallback` while building the `RestClient` through its builder. +The interface has one method that receives an instance of +https://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/client/config/RequestConfig.Builder.html[`org.apache.http.client.config.RequestConfig.Builder`] + as an argument and has the same return type. The request config builder can +be modified and then returned. In the following example we increase the +connect timeout (defaults to 1 second) and the socket timeout (defaults to 10 +seconds). Also we adjust the max retry timeout accordingly (defaults to 10 +seconds too). + +[source,java] +-------------------------------------------------- +RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) + .setRequestConfigCallback(new RestClientBuilder.RequestConfigCallback() { + @Override + public RequestConfig.Builder customizeRequestConfig(RequestConfig.Builder requestConfigBuilder) { + return requestConfigBuilder.setConnectTimeout(5000) + .setSocketTimeout(30000); + } + }) + .setMaxRetryTimeoutMillis(30000) + .build(); +-------------------------------------------------- + +=== Number of threads + +The Apache Http Async Client starts by default one dispatcher thread, and a +number of worker threads used by the connection manager, as many as the number +of locally detected processors (depending on what +`Runtime.getRuntime().availableProcessors()` returns). The number of threads +can be modified as follows: + +[source,java] +-------------------------------------------------- +RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) + .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { + return httpClientBuilder.setDefaultIOReactorConfig( + IOReactorConfig.custom().setIoThreadCount(1).build()); + } + }) + .build(); +-------------------------------------------------- + +=== Basic authentication + +Configuring basic authentication can be done by providing an +`HttpClientConfigCallback` while building the `RestClient` through its builder. +The interface has one method that receives an instance of +https://hc.apache.org/httpcomponents-asyncclient-dev/httpasyncclient/apidocs/org/apache/http/impl/nio/client/HttpAsyncClientBuilder.html[`org.apache.http.impl.nio.client.HttpAsyncClientBuilder`] + as an argument and has the same return type. The http client builder can be +modified and then returned. In the following example we set a default +credentials provider that requires basic authentication. + +[source,java] +-------------------------------------------------- +final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); +credentialsProvider.setCredentials(AuthScope.ANY, + new UsernamePasswordCredentials("user", "password")); + +RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) + .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { + return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + } + }) + .build(); +-------------------------------------------------- + +=== Encrypted communication + +Encrypted communication can also be configured through the +`HttpClientConfigCallback`. The +https://hc.apache.org/httpcomponents-asyncclient-dev/httpasyncclient/apidocs/org/apache/http/impl/nio/client/HttpAsyncClientBuilder.html[`org.apache.http.impl.nio.client.HttpAsyncClientBuilder`] + received as an argument exposes multiple methods to configure encrypted + communication: `setSSLContext`, `setSSLSessionStrategy` and + `setConnectionManager`, in order of precedence from the least important. + The following is an example: + +[source,java] +-------------------------------------------------- +KeyStore keyStore = KeyStore.getInstance("jks"); +try (InputStream is = Files.newInputStream(keyStorePath)) { + keyStore.load(is, keyStorePass.toCharArray()); +} +RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) + .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { + return httpClientBuilder.setSSLContext(sslcontext); + } + }) + .build(); +-------------------------------------------------- + +=== Others + +For any other required configuration needed, the Apache HttpAsyncClient docs +should be consulted: https://hc.apache.org/httpcomponents-asyncclient-4.1.x/ . \ No newline at end of file diff --git a/docs/java-rest/index.asciidoc b/docs/java-rest/index.asciidoc new file mode 100644 index 00000000000..11a6d67eaba --- /dev/null +++ b/docs/java-rest/index.asciidoc @@ -0,0 +1,12 @@ +[[java-rest]] += Java REST Client + +:version: 5.0.0-alpha4 + +include::overview.asciidoc[] + +include::usage.asciidoc[] + +include::configuration.asciidoc[] + +include::sniffer.asciidoc[] diff --git a/docs/java-rest/overview.asciidoc b/docs/java-rest/overview.asciidoc new file mode 100644 index 00000000000..206fcb931b4 --- /dev/null +++ b/docs/java-rest/overview.asciidoc @@ -0,0 +1,42 @@ +== Overview + +Official low-level client for Elasticsearch. Allows to communicate with an +Elasticsearch cluster through http. Compatible with all elasticsearch versions. + +=== Features + +The low-level client's features include: + +* minimal dependencies + +* load balancing across all available nodes + +* failover in case of node failures and upon specific response codes + +* failed connection penalization (whether a failed node is retried depends on + how many consecutive times it failed; the more failed attempts the longer the + client will wait before trying that same node again) + +* persistent connections + +* trace logging of requests and responses + +* optional automatic <> + + +=== License + +Copyright 2013-2016 Elasticsearch + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + diff --git a/docs/java-rest/sniffer.asciidoc b/docs/java-rest/sniffer.asciidoc new file mode 100644 index 00000000000..233c4c74a3e --- /dev/null +++ b/docs/java-rest/sniffer.asciidoc @@ -0,0 +1,136 @@ +[[sniffer]] +== Sniffer + +Minimal library that allows to automatically discover nodes from a running +Elasticsearch cluster and set them to an existing `RestClient` instance. +It retrieves by default the nodes that belong to the cluster using the +Nodes Info api and uses jackson to parse the obtained json response. + +Compatible with Elasticsearch 2.x and onwards. + +=== Maven Repository + +Here is how you can configure the dependency using maven as a dependency manager. +Add the following to your `pom.xml` file: + +["source","xml",subs="attributes"] +-------------------------------------------------- + + org.elasticsearch.client + sniffer + {version} + +-------------------------------------------------- + +The low-level REST client is subject to the same release cycle as +elasticsearch. Replace `${es.version}` with the desired sniffer version, first +released with `5.0.0-alpha4`. There is no relation between the sniffer version +and the elasticsearch version that the client can communicate with. Sniffer +supports fetching the nodes list from elasticsearch 2.x and onwards. + +=== Usage + +Once a `RestClient` instance has been created, a `Sniffer` can be associated +to it. The `Sniffer` will make use of the provided `RestClient` to periodically +(every 5 minutes by default) fetch the list of current nodes from the cluster +and update them by calling `RestClient#setHosts`. + + + +[source,java] +-------------------------------------------------- +Sniffer sniffer = Sniffer.builder(restClient).build(); +-------------------------------------------------- + +It is important to close the `Sniffer` so that its background thread gets +properly shutdown and all of its resources are released. The `Sniffer` +object should have the same lifecycle as the `RestClient` and get closed +right before the client: + +[source,java] +-------------------------------------------------- +sniffer.close(); +restClient.close(); +-------------------------------------------------- + +The Elasticsearch Nodes Info api doesn't return the protocol to use when +connecting to the nodes but only their `host:port` key-pair, hence `http` +is used by default. In case `https` should be used instead, the +`ElasticsearchHostsSniffer` object has to be manually created and provided +as follows: + +[source,java] +-------------------------------------------------- +HostsSniffer hostsSniffer = new ElasticsearchHostsSniffer(restClient, + ElasticsearchHostsSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT, + ElasticsearchHostsSniffer.Scheme.HTTPS); +Sniffer sniffer = Sniffer.builder(restClient) + .setHostsSniffer(hostsSniffer).build(); +-------------------------------------------------- + +In the same way it is also possible to customize the `sniffRequestTimeout`, +which defaults to one second. That is the `timeout` parameter provided as a +querystring parameter when calling the Nodes Info api, so that when the +timeout expires on the server side, a valid response is still returned +although it may contain only a subset of the nodes that are part of the +cluster, the ones that have responsed until then. +Also, a custom `HostsSniffer` implementation can be provided for advanced +use-cases that may require fetching the hosts from external sources. + +The `Sniffer` updates the nodes by default every 5 minutes. This interval can +be customized by providing it (in milliseconds) as follows: + +[source,java] +-------------------------------------------------- +Sniffer sniffer = Sniffer.builder(restClient) + .setSniffIntervalMillis(60000).build(); +-------------------------------------------------- + +It is also possible to enable sniffing on failure, meaning that after each +failure the nodes list gets updated straightaway rather than at the following +ordinary sniffing round. In this case a `SniffOnFailureListener` needs to +be created at first and provided at `RestClient` creation. Also once the +`Sniffer` is later created, it needs to be associated with that same +`SniffOnFailureListener` instance, which will be notified at each failure +and use the `Sniffer` to perform the additional sniffing round as described. + +[source,java] +-------------------------------------------------- +SniffOnFailureListener sniffOnFailureListener = new SniffOnFailureListener(); +RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) + .setFailureListener(sniffOnFailureListener).build(); +Sniffer sniffer = Sniffer.builder(restClient).build(); +sniffOnFailureListener.setSniffer(sniffer); +-------------------------------------------------- + +When using sniffing on failure, not only do the nodes get updated after each +failure, but an additional sniffing round is also scheduled sooner than usual, +by default one minute after the failure, assuming that things will go back to +normal and we want to detect that as soon as possible. Said interval can be +customized at `Sniffer` creation time as follows: + +[source,java] +-------------------------------------------------- +Sniffer sniffer = Sniffer.builder(restClient) + .setSniffAfterFailureDelayMillis(30000).build(); +-------------------------------------------------- + +Note that this last configuration parameter has no effect in case sniffing +on failure is not enabled like explained above. + +=== License + +Copyright 2013-2016 Elasticsearch + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + diff --git a/docs/java-rest/usage.asciidoc b/docs/java-rest/usage.asciidoc new file mode 100644 index 00000000000..69f95413a70 --- /dev/null +++ b/docs/java-rest/usage.asciidoc @@ -0,0 +1,227 @@ +== Getting started + +=== Maven Repository + +The low-level Java REST client is hosted on +http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.elasticsearch.client%22[Maven +Central]. The minimum Java version required is `1.7`. + +Here is how you can configure the dependency using maven as a dependency manager. +Add the following to your `pom.xml` file: + +["source","xml",subs="attributes"] +-------------------------------------------------- + + org.elasticsearch.client + rest + {version} + +-------------------------------------------------- + +The low-level REST client is subject to the same release cycle as +elasticsearch. Replace `${es.version}` with the desired client version, first +released with `5.0.0-alpha4`. There is no relation between the client version +and the elasticsearch version that the client can communicate with. The +low-level REST client is compatible with all elasticsearch versions. + +=== Dependencies + +The low-level Java REST client internally uses the +http://hc.apache.org/httpcomponents-asyncclient-dev/[Apache Http Async Client] + to send http requests. It depends on the following artifacts, namely the async + http client and its own transitive dependencies: + +- org.apache.httpcomponents:httpasyncclient +- org.apache.httpcomponents:httpcore-nio +- org.apache.httpcomponents:httpclient +- org.apache.httpcomponents:httpcore +- commons-codec:commons-codec +- commons-logging:commons-logging + + +=== Initialization + +A `RestClient` instance can be built through the corresponding +`RestClientBuilder` class, created via `RestClient#builder(HttpHost...)` +static method. The only required argument is one or more hosts that the +client will communicate with, provided as instances of +https://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/org/apache/http/HttpHost.html[HttpHost] + as follows: + +[source,java] +-------------------------------------------------- +RestClient restClient = RestClient.builder( + new HttpHost("http", "localhost", 9200), + new HttpHost("http", "localhost", 9201)).build(); +-------------------------------------------------- + +The `RestClient` class is thread-safe and ideally has the same lifecycle as +the application that uses it. It is important that it gets closed when no +longer needed so that all the resources used by it get properly released, +as well as the underlying http client instance and its threads: + +[source,java] +-------------------------------------------------- +restClient.close(); +-------------------------------------------------- + +`RestClientBuilder` also allows to optionally set the following configuration +parameters while building the `RestClient` instance: + +`setDefaultHeaders`:: default headers that need to be sent with each request, +to prevent having to specify them with each single request +`setMaxRetryTimeoutMillis`:: the timeout that should be honoured in case +multiple attempts are made for the same request. The default value is 10 +seconds, same as the default socket timeout. In case the socket timeout is +customized, the maximum retry timeout should be adjusted accordingly +`setFailureListener`:: a listener that gets notified every time a node +fails, in case actions need to be taken. Used internally when sniffing on +failure is enabled +`setRequestConfigCallback`:: callback that allows to modify the default +request configuration (e.g. request timeouts, authentication, or anything that +the https://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/client/config/RequestConfig.Builder.html[`org.apache.http.client.config.RequestConfig.Builder`] + allows to set) +`setHttpClientConfigCallback`:: callback that allows to modify the http client + configuration (e.g. encrypted communication over ssl, or anything that the +http://hc.apache.org/httpcomponents-asyncclient-dev/httpasyncclient/apidocs/org/apache/http/impl/nio/client/HttpAsyncClientBuilder.html[`org.apache.http.impl.nio.client.HttpAsyncClientBuilder`] + allows to set) + + +=== Performing requests + +Once the `RestClient` has been created, requests can be sent by calling one of +the available `performRequest` method variants. The ones that return the +`Response` are executed synchronously, meaning that the client will block and +wait for a response to be returned. The `performRequest` variants that return +`void` accept a `ResponseListener` as an argument and are executed +asynchronously. The provided listener will be notified upon completion or +failure. The following are the arguments accepted by the different +`performRequest` methods: + +`method`:: the http method or verb +`endpoint`:: the request path, which identifies the Elasticsearch api to +call (e.g. `/_cluster/health`) +`params`:: the optional parameters to be sent as querystring parameters +`entity`:: the optional request body enclosed in an +`org.apache.http.HttpEntity` object +`responseConsumer`:: the optional +http://hc.apache.org/httpcomponents-core-ga/httpcore-nio/apidocs/org/apache/http/nio/protocol/HttpAsyncResponseConsumer.html[`org.apache.http.nio.protocol.HttpAsyncResponseConsumer`] + callback. Controls how the response body gets streamed from a non-blocking +HTTP connection on the client side. When not provided, the default +implementation is used which buffers the whole response body in heap memory +`responseListener`:: the listener to be notified upon request success or failure +whenever the async `performRequest` method variants are used +`headers`:: optional request headers + +=== Reading responses + +The `Response` object, either returned by the sync `performRequest` methods or + received as an argument in `ResponseListener#onSucces(Response)`, wraps the +response object returned by the http client and exposes the following information: + +`getRequestLine`:: information about the performed request +`getHost`:: the host that returned the response +`getStatusLine`:: the response status line +`getHeaders`:: the response headers, which can also be retrieved by name +though `getHeader(String)` +`getEntity`:: the response body enclosed in an +https://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/org/apache/http/HttpEntity.html[`org.apache.http.HttpEntity`] + object + +When performing a request, an exception is thrown (or received as an argument + in `ResponseListener#onSucces(Exception)` in the following scenarios: + +`IOException`:: communication problem (e.g. SocketTimeoutException etc.) +`ResponseException`:: a response was returned, but its status code indicated +an error (either `4xx` or `5xx`). A `ResponseException` originates from a valid +http response, hence it exposes its corresponding `Response` object which gives +access to the returned response. + + +=== Example requests + +Here are a couple of examples: + +[source,java] +-------------------------------------------------- +Response response = restClient.performRequest("GET", "/", + Collections.singletonMap("pretty", "true")); +System.out.println(EntityUtils.toString(response.getEntity())); + +//index a document +HttpEntity entity = new NStringEntity( + "{\n" + + " \"user\" : \"kimchy\",\n" + + " \"post_date\" : \"2009-11-15T14:12:12\",\n" + + " \"message\" : \"trying out Elasticsearch\"\n" + + "}", ContentType.APPLICATION_JSON); +Response indexResponse = restClient.performRequest( + "PUT", + "/twitter/tweet/1", + Collections.emptyMap(), + entity); + + +-------------------------------------------------- + +Note that the low-level client doesn't expose any helper for json marshalling +and un-marshalling. Users are free to use the library that they prefer for that +purpose. +The underlying Apache Async Http Client ships with different +https://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/org/apache/http/HttpEntity.html[`org.apache.http.HttpEntity`] + implementations that allow to provide the request body in different formats +(stream, byte array, string etc.). As for reading the response body, the +`HttpEntity#getContent` method comes handy which returns an `InputStream` +reading from the previously buffered response body. As an alternative, it is +possible to provide a custom +http://hc.apache.org/httpcomponents-core-ga/httpcore-nio/apidocs/org/apache/http/nio/protocol/HttpAsyncResponseConsumer.html[`org.apache.http.nio.protocol.HttpAsyncResponseConsumer`] + that controls how bytes are read and buffered. + +The following is a basic example of how async requests can be sent: + +[source,java] +-------------------------------------------------- +int numRequests = 10; +final CountDownLatch latch = new CountDownLatch(numRequests); +for (int i = 0; i < numRequests; i++) { + restClient.performRequest( + "PUT", + "/twitter/tweet/" + i, + Collections.emptyMap(), + //assume that the documents are stored in an entities array + entities[i], + new ResponseListener() { + @Override + public void onSuccess(Response response) { + System.out.println(response); + latch.countDown(); + } + + @Override + public void onFailure(Exception exception) { + latch.countDown(); + } + } + ); +} +//wait for all requests to be completed +latch.await(); + +-------------------------------------------------- + +=== Logging + +The Java REST client uses the same logging library that the Apache Async Http +Client uses: https://commons.apache.org/proper/commons-logging/[Apache Commons Logging], + which comes with support for a number of popular logging implementations. The +java packages to enable logging for are `org.elasticsearch.client` for the +client itself and `org.elasticsearch.client.sniffer` for the sniffer. + +The request tracer logging can also be enabled to log every request and +corresponding response in curl format. That comes handy when debugging, for +instance in case a request needs to be manually executed to check whether it +still yields the same response as it did. Enable trace logging for the `tracer` +package to have such log lines printed out. Do note that this type of logging is +expensive and should not be enabled at all times in production environments, +but rather temporarily used only when needed. + diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 987cc7c9758..f8fb8814129 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -163,22 +163,22 @@ sudo bin/elasticsearch-plugin install analysis-icu --timeout 0 [float] === Proxy settings -To install a plugin via a proxy, you can pass the proxy details in with the -Java settings `proxyHost` and `proxyPort`. On Unix based systems, these -options can be set on the command line: +To install a plugin via a proxy, you can add the proxy details to the +`ES_JAVA_OPTS` environment variable with the Java settings `http.proxyHost` +and `http.proxyPort` (or `https.proxyHost` and `https.proxyPort`): [source,shell] ----------------------------------- -sudo ES_JAVA_OPTS="-DproxyHost=host_name -DproxyPort=port_number" bin/elasticsearch-plugin install mobz/elasticsearch-head +sudo ES_JAVA_OPTS="-Dhttp.proxyHost=host_name -Dhttp.proxyPort=port_number -Dhttps.proxyHost=host_name -Dhttps.proxyPort=https_port_number" bin/elasticsearch-plugin install analysis-icu ----------------------------------- -On Windows, they need to be added to the `ES_JAVA_OPTS` environment variable: +Or on Windows: [source,shell] ------------------------------------ -set ES_JAVA_OPTS="-DproxyHost=host_name -DproxyPort=port_number" +------------------------------------ +set ES_JAVA_OPTS="-DproxyHost=host_name -DproxyPort=port_number -Dhttps.proxyHost=host_name -Dhttps.proxyPort=https_port_number" bin/elasticsearch-plugin install analysis-icu ------------------------------------ +------------------------------------ === Plugins directory diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index 959b93611d8..877da289314 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -160,7 +160,8 @@ NOTE: `shard_size` cannot be smaller than `size` (as it doesn't make much sens override it and reset it to be equal to `size`. -The default `shard_size` is a multiple of the `size` parameter which is dependant on the number of shards. +The default `shard_size` will be `size` if the search request needs to go to a single shard, and `(size * 1.5 + 10)` +otherwise. ==== Calculating Document Count Error @@ -446,7 +447,7 @@ Generating the terms using a script: "genres" : { "terms" : { "script" : { - "inline": "doc['genre'].value" + "inline": "doc['genre'].value", "lang": "painless" } } diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index 194dda80cf8..7fb7b0d963d 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -12,7 +12,7 @@ the new alias. [source,js] -------------------------------------------------- -PUT /logs-0001 <1> +PUT /logs-000001 <1> { "aliases": { "logs_write": {} @@ -28,18 +28,18 @@ POST logs_write/_rollover <2> } -------------------------------------------------- // CONSOLE -<1> Creates an index called `logs-0001` with the alias `logs_write`. +<1> Creates an index called `logs-0000001` with the alias `logs_write`. <2> If the index pointed to by `logs_write` was created 7 or more days ago, or contains 1,000 or more documents, then the `logs-0002` index is created - and the `logs_write` alias is updated to point to `logs-0002`. + and the `logs_write` alias is updated to point to `logs-000002`. The above request might return the following response: [source,js] -------------------------------------------------- { - "old_index": "logs-0001", - "new_index": "logs-0002", + "old_index": "logs-000001", + "new_index": "logs-000002", "rolled_over": true, <1> "dry_run": false, <2> "conditions": { <3> @@ -56,8 +56,9 @@ The above request might return the following response: === Naming the new index If the name of the existing index ends with `-` and a number -- e.g. -`logs-0001` -- then the name of the new index will follow the same pattern, -just incrementing the number (`logs-0002`). +`logs-000001` -- then the name of the new index will follow the same pattern, +incrementing the number (`logs-000002`). The number is zero-padded with a length +of 6, regardless of the old index name. If the old name doesn't match this pattern then you must specify the name for the new index as follows: @@ -80,7 +81,7 @@ override any values set in matching index templates. For example, the following [source,js] -------------------------------------------------- -PUT /logs-0001 +PUT /logs-000001 { "aliases": { "logs_write": {} @@ -108,7 +109,7 @@ checked without performing the actual rollover: [source,js] -------------------------------------------------- -PUT /logs-0001 +PUT /logs-000001 { "aliases": { "logs_write": {} diff --git a/docs/reference/migration/migrate_5_0/docs.asciidoc b/docs/reference/migration/migrate_5_0/docs.asciidoc index 9149eed6142..104f047056d 100644 --- a/docs/reference/migration/migrate_5_0/docs.asciidoc +++ b/docs/reference/migration/migrate_5_0/docs.asciidoc @@ -11,6 +11,20 @@ url entirely. If you add `?refresh=wait_for` Elasticsearch will wait for the changes to become visible before replying to the request but won't take any immediate refresh related action. See <>. +==== `created` field deprecated in the Index API + +The `created` field has been deprecated in the Index API. It now returns +`operation`, returning `"operation": "create"` when it created a document and +`"operation": "index"` when it updated the document. This is also true for +`index` bulk operations. + +==== `found` field deprecated in the Delete API + +The `found` field has been deprecated in the Delete API. It now returns +`operation`, returning `"operation": "deleted"` when it deleted a document and +`"operation": "noop"` when it didn't found the document. This is also true for +`index` bulk operations. + ==== Reindex and Update By Query Before 5.0.0 `_reindex` and `_update_by_query` only retried bulk failures so they used the following response format: diff --git a/docs/reference/modules/cluster/allocation_filtering.asciidoc b/docs/reference/modules/cluster/allocation_filtering.asciidoc index 533a3e83c6c..437f243c018 100644 --- a/docs/reference/modules/cluster/allocation_filtering.asciidoc +++ b/docs/reference/modules/cluster/allocation_filtering.asciidoc @@ -62,7 +62,7 @@ All attribute values can be specified with wildcards, eg: PUT _cluster/settings { "transient": { - "cluster.routing.allocation.include._ip": "192.168.2.*" + "cluster.routing.allocation.exclude._ip": "192.168.2.*" } } ------------------------ diff --git a/docs/reference/modules/discovery/zen.asciidoc b/docs/reference/modules/discovery/zen.asciidoc index f51fecb9079..f1439adc8b6 100644 --- a/docs/reference/modules/discovery/zen.asciidoc +++ b/docs/reference/modules/discovery/zen.asciidoc @@ -133,5 +133,5 @@ read or write operations, like the get index settings, put mapping and cluster s `write`:: (default) Write operations will be rejected. Read operations will succeed, based on the last known cluster configuration. This may result in partial reads of stale data as this node may be isolated from the rest of the cluster. -The `discovery.zen.no_master_block` setting doesn't apply to nodes based apis (for example cluster stats, node info and -node stats apis) which will not be blocked and try to execute on any node possible. +The `discovery.zen.no_master_block` setting doesn't apply to nodes-based apis (for example cluster stats, node info and +node stats apis). Requests to these apis will not be blocked and can run on any available node. diff --git a/docs/reference/modules/gateway.asciidoc b/docs/reference/modules/gateway.asciidoc index 3ce7a920a2f..cad05d1baaa 100644 --- a/docs/reference/modules/gateway.asciidoc +++ b/docs/reference/modules/gateway.asciidoc @@ -4,9 +4,9 @@ The local gateway module stores the cluster state and shard data across full cluster restarts. -The following _static_ settings, which must be set on every data node in the -cluster, controls how long nodes should wait before they try to recover any -shards which are stored locally: + The following _static_ settings, which must be set on every master node, + control how long a freshly elected master should wait before it tries to + recover the cluster state and the cluster's data: `gateway.expected_nodes`:: @@ -48,4 +48,3 @@ as long as the following conditions are met: Recover as long as this many data nodes have joined the cluster. NOTE: These settings only take effect on a full cluster restart. - diff --git a/docs/reference/modules/threadpool.asciidoc b/docs/reference/modules/threadpool.asciidoc index 374b2b2bd7b..a333312e0fb 100644 --- a/docs/reference/modules/threadpool.asciidoc +++ b/docs/reference/modules/threadpool.asciidoc @@ -59,7 +59,7 @@ There are several thread pools, but the important ones include: Changing a specific thread pool can be done by setting its type-specific parameters; for example, changing the `index` thread pool to have more threads: -[source,js] +[source,yaml] -------------------------------------------------- thread_pool: index: @@ -87,7 +87,7 @@ requests that have no threads to execute them. By default, it is set to `-1` which means its unbounded. When a request comes in and the queue is full, it will abort the request. -[source,js] +[source,yaml] -------------------------------------------------- thread_pool: index: @@ -105,7 +105,7 @@ the `core` and `max` parameters. The `keep_alive` parameter determines how long a thread should be kept around in the thread pool without it doing any work. -[source,js] +[source,yaml] -------------------------------------------------- thread_pool: warmer: @@ -122,5 +122,10 @@ settings are automatically set based on it. Sometimes, the number of processors are wrongly detected, in such cases, the number of processors can be explicitly set using the `processors` setting. +[source,yaml] +-------------------------------------------------- +processors: 2 +-------------------------------------------------- + In order to check the number of processors detected, use the nodes info API with the `os` flag. diff --git a/docs/reference/search/search-template.asciidoc b/docs/reference/search/search-template.asciidoc index 359b692f528..7ed6f2de8c8 100644 --- a/docs/reference/search/search-template.asciidoc +++ b/docs/reference/search/search-template.asciidoc @@ -54,23 +54,16 @@ GET /_search/template ------------------------------------------ [float] -===== Passing an array of strings +===== Converting parameters to JSON + +The `{{toJson}}parameter{{/toJson}}` function can be used to convert parameters +like maps and array to their JSON representation: [source,js] ------------------------------------------ GET /_search/template { - "inline": { - "query": { - "terms": { - "status": [ - "{{#status}}", - "{{.}}", - "{{/status}}" - ] - } - } - }, + "inline": "{ \"query\": { \"terms\": { \"status\": {{#toJson}}status{{/toJson}} }}}", "params": { "status": [ "pending", "published" ] } @@ -82,13 +75,52 @@ which is rendered as: [source,js] ------------------------------------------ { -"query": { - "terms": { - "status": [ "pending", "published" ] + "query": { + "terms": { + "status": [ + "pending", + "published" + ] + } } } ------------------------------------------ +A more complex example substitutes an array of JSON objects: + +[source,js] +------------------------------------------ +{ + "inline": "{\"query\":{\"bool\":{\"must\": {{#toJson}}clauses{{/toJson}} }}}", + "params": { + "clauses": [ + { "term": "foo" }, + { "term": "bar" } + ] + } +} +------------------------------------------ + +which is rendered as: + +[source,js] +------------------------------------------ +{ + "query" : { + "bool" : { + "must" : [ + { + "term" : "foo" + }, + { + "term" : "bar" + } + ] + } + } +} +------------------------------------------ + [float] ===== Concatenating array of values @@ -223,45 +255,6 @@ for `end`: } ------------------------------------------ -[float] -===== Converting parameters to JSON - -The `{{toJson}}parameter{{/toJson}}` function can be used to convert parameters -like maps and array to their JSON representation: - -[source,js] ------------------------------------------- -{ - "inline": "{\"query\":{\"bool\":{\"must\": {{#toJson}}clauses{{/toJson}} }}}", - "params": { - "clauses": [ - { "term": "foo" }, - { "term": "bar" } - ] - } -} ------------------------------------------- - -which is rendered as: - -[source,js] ------------------------------------------- -{ - "query" : { - "bool" : { - "must" : [ - { - "term" : "foo" - }, - { - "term" : "bar" - } - ] - } - } -} ------------------------------------------- - [float] ===== Conditional clauses @@ -330,7 +323,7 @@ We could write the query as: ================================== As written above, this template is not valid JSON because it includes the _section_ markers like `{{#line_no}}`. For this reason, the template should -either be stored in a file (see <>) or, when used +either be stored in a file (see <>) or, when used via the REST API, should be written as a string: [source,js] @@ -467,7 +460,7 @@ This call will return the rendered template: ------------------------------------------ <1> `status` array has been populated with values from the `params` object. -File and indexed templates can also be rendered by replacing `inline` with +File and indexed templates can also be rendered by replacing `inline` with `file` or `id` respectively. For example, to render a file template [source,js] diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 3f9ee8bc961..524b131c787 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -203,7 +203,7 @@ public class ForEachProcessorTests extends ESTestCase { )); processor.execute(ingestDocument); - List result = ingestDocument.getFieldValue("values", List.class); + List result = ingestDocument.getFieldValue("values", List.class); assertThat(result.get(0), equalTo("STRING")); assertThat(result.get(1), equalTo(1)); assertThat(result.get(2), equalTo(null)); diff --git a/modules/lang-painless/src/main/antlr/PainlessParser.g4 b/modules/lang-painless/src/main/antlr/PainlessParser.g4 index b102734a4f4..42876b18f0d 100644 --- a/modules/lang-painless/src/main/antlr/PainlessParser.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessParser.g4 @@ -95,80 +95,95 @@ delimiter | EOF ; -// Note we return the boolean s. This is returned as true -// if secondaries (postfixes) are allowed, otherwise, false. -// This prevents illegal secondaries from being appended to -// expressions using precedence that aren't variable/method chains. -expression returns [boolean s = true] - : u = unary[false] { $s = $u.s; } # single - | expression ( MUL | DIV | REM ) expression { $s = false; } # binary - | expression ( ADD | SUB ) expression { $s = false; } # binary - | expression ( FIND | MATCH ) expression { $s = false; } # binary - | expression ( LSH | RSH | USH ) expression { $s = false; } # binary - | expression ( LT | LTE | GT | GTE ) expression { $s = false; } # comp - | expression INSTANCEOF decltype { $s = false; } # instanceof - | expression ( EQ | EQR | NE | NER ) expression { $s = false; } # comp - | expression BWAND expression { $s = false; } # binary - | expression XOR expression { $s = false; } # binary - | expression BWOR expression { $s = false; } # binary - | expression BOOLAND expression { $s = false; } # bool - | expression BOOLOR expression { $s = false; } # bool - | expression COND e0 = expression COLON e1 = expression { $s = $e0.s && $e1.s; } # conditional - // TODO: Should we allow crazy syntax like (x = 5).call()? - // Other crazy syntaxes work, but this one requires - // a complete restructure of the rules as EChain isn't - // designed to handle more postfixes after an assignment. - | chain[true] ( ASSIGN | AADD | ASUB | AMUL | - ADIV | AREM | AAND | AXOR | - AOR | ALSH | ARSH | AUSH ) expression { $s = false; } # assignment +expression + : unary # single + | expression ( MUL | DIV | REM ) expression # binary + | expression ( ADD | SUB ) expression # binary + | expression ( FIND | MATCH ) expression # binary + | expression ( LSH | RSH | USH ) expression # binary + | expression ( LT | LTE | GT | GTE ) expression # comp + | expression INSTANCEOF decltype # instanceof + | expression ( EQ | EQR | NE | NER ) expression # comp + | expression BWAND expression # binary + | expression XOR expression # binary + | expression BWOR expression # binary + | expression BOOLAND expression # bool + | expression BOOLOR expression # bool + | expression COND expression COLON expression # conditional + | expression ( ASSIGN | AADD | ASUB | AMUL | + ADIV | AREM | AAND | AXOR | + AOR | ALSH | ARSH | AUSH ) expression # assignment ; -// Note we take in the boolean c. This is used to indicate -// whether or not this rule was called when we are already -// processing a variable/method chain. This prevents the chain -// from being applied to rules where it wouldn't be allowed. -unary[boolean c] returns [boolean s = true] - : { !$c }? ( INCR | DECR ) chain[true] # pre - | { !$c }? chain[true] (INCR | DECR ) # post - | { !$c }? chain[false] # read - | { !$c }? ( OCTAL | HEX | INTEGER | DECIMAL ) { $s = false; } # numeric - | { !$c }? TRUE { $s = false; } # true - | { !$c }? FALSE { $s = false; } # false - | { !$c }? NULL { $s = false; } # null - | { !$c }? listinitializer { $s = false; } # listinit - | { !$c }? mapinitializer { $s = false; } # mapinit - | { !$c }? ( BOOLNOT | BWNOT | ADD | SUB ) unary[false] # operator - | LP decltype RP unary[$c] # cast +unary + : ( INCR | DECR ) chain # pre + | chain (INCR | DECR ) # post + | chain # read + | ( BOOLNOT | BWNOT | ADD | SUB ) unary # operator + | LP decltype RP unary # cast ; -chain[boolean c] - : p = primary[$c] secondary[$p.s]* # dynamic - | decltype dot secondary[true]* # static - | arrayinitializer # newarray +chain + : primary postfix* # dynamic + | decltype postdot postfix* # static + | arrayinitializer # newarray ; -primary[boolean c] returns [boolean s = true] - : { !$c }? LP e = expression RP { $s = $e.s; } # exprprec - | { $c }? LP unary[true] RP # chainprec - | STRING # string - | REGEX # regex - | ID # variable - | ID arguments # calllocal - | NEW TYPE arguments # newobject +primary + : LP expression RP # precedence + | ( OCTAL | HEX | INTEGER | DECIMAL ) # numeric + | TRUE # true + | FALSE # false + | NULL # null + | STRING # string + | REGEX # regex + | listinitializer # listinit + | mapinitializer # mapinit + | ID # variable + | ID arguments # calllocal + | NEW TYPE arguments # newobject ; -secondary[boolean s] - : { $s }? dot - | { $s }? brace +postfix + : callinvoke + | fieldaccess + | braceaccess ; -dot - : DOT DOTID arguments # callinvoke - | DOT ( DOTID | DOTINTEGER ) # fieldaccess +postdot + : callinvoke + | fieldaccess ; -brace - : LBRACE expression RBRACE # braceaccess +callinvoke + : DOT DOTID arguments + ; + +fieldaccess + : DOT ( DOTID | DOTINTEGER ) + ; + +braceaccess + : LBRACE expression RBRACE + ; + +arrayinitializer + : NEW TYPE ( LBRACE expression RBRACE )+ ( postdot postfix* )? # newstandardarray + | NEW TYPE LBRACE RBRACE LBRACK ( expression ( COMMA expression )* )? SEMICOLON? RBRACK postfix* # newinitializedarray + ; + +listinitializer + : LBRACE expression ( COMMA expression)* RBRACE + | LBRACE RBRACE + ; + +mapinitializer + : LBRACE maptoken ( COMMA maptoken )* RBRACE + | LBRACE COLON RBRACE + ; + +maptoken + : expression COLON expression ; arguments @@ -190,49 +205,10 @@ lamtype ; funcref - : classFuncref - | constructorFuncref - | capturingFuncref - | localFuncref + : TYPE REF ID # classfuncref // reference to a static or instance method, + // e.g. ArrayList::size or Integer::compare + | decltype REF NEW # constructorfuncref // reference to a constructor, e.g. ArrayList::new + | ID REF ID # capturingfuncref // reference to an instance method, e.g. object::toString + // currently limited to capture of a simple variable (id). + | THIS REF ID # localfuncref // reference to a local function, e.g. this::myfunc ; - -// reference to a static or instance method, e.g. ArrayList::size or Integer::compare -classFuncref - : TYPE REF ID - ; - -// reference to a constructor, e.g. ArrayList::new -// currently limited to simple non-array types -constructorFuncref - : decltype REF NEW - ; - -// reference to an instance method, e.g. object::toString -// currently limited to capture of a simple variable (id). -capturingFuncref - : ID REF ID - ; - -// reference to a local function, e.g. this::myfunc -localFuncref - : THIS REF ID - ; - -arrayinitializer - : NEW TYPE (LBRACE expression RBRACE)+ (dot secondary[true]*)? # newstandardarray - | NEW TYPE LBRACE RBRACE LBRACK ( expression ( COMMA expression )* )? SEMICOLON? RBRACK # newinitializedarray - ; - -listinitializer - : LBRACE expression ( COMMA expression)* RBRACE - | LBRACE RBRACE - ; - -mapinitializer - : LBRACE maptoken ( COMMA maptoken )* RBRACE - | LBRACE COLON RBRACE - ; - -maptoken - : expression COLON expression - ; \ No newline at end of file diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index 69a74301bc8..e0bdfbf17c2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -91,43 +91,47 @@ public final class Definition { public static final Type MATCHER_TYPE = getType("Matcher"); public enum Sort { - VOID( void.class , 0 , true , false , false , false ), - BOOL( boolean.class , 1 , true , true , false , true ), - BYTE( byte.class , 1 , true , false , true , true ), - SHORT( short.class , 1 , true , false , true , true ), - CHAR( char.class , 1 , true , false , true , true ), - INT( int.class , 1 , true , false , true , true ), - LONG( long.class , 2 , true , false , true , true ), - FLOAT( float.class , 1 , true , false , true , true ), - DOUBLE( double.class , 2 , true , false , true , true ), + VOID( void.class , Void.class , null , 0 , true , false , false , false ), + BOOL( boolean.class , Boolean.class , null , 1 , true , true , false , true ), + BYTE( byte.class , Byte.class , null , 1 , true , false , true , true ), + SHORT( short.class , Short.class , null , 1 , true , false , true , true ), + CHAR( char.class , Character.class , null , 1 , true , false , true , true ), + INT( int.class , Integer.class , null , 1 , true , false , true , true ), + LONG( long.class , Long.class , null , 2 , true , false , true , true ), + FLOAT( float.class , Float.class , null , 1 , true , false , true , true ), + DOUBLE( double.class , Double.class , null , 2 , true , false , true , true ), - VOID_OBJ( Void.class , 1 , true , false , false , false ), - BOOL_OBJ( Boolean.class , 1 , false , true , false , false ), - BYTE_OBJ( Byte.class , 1 , false , false , true , false ), - SHORT_OBJ( Short.class , 1 , false , false , true , false ), - CHAR_OBJ( Character.class , 1 , false , false , true , false ), - INT_OBJ( Integer.class , 1 , false , false , true , false ), - LONG_OBJ( Long.class , 1 , false , false , true , false ), - FLOAT_OBJ( Float.class , 1 , false , false , true , false ), - DOUBLE_OBJ( Double.class , 1 , false , false , true , false ), + VOID_OBJ( Void.class , null , void.class , 1 , true , false , false , false ), + BOOL_OBJ( Boolean.class , null , boolean.class , 1 , false , true , false , false ), + BYTE_OBJ( Byte.class , null , byte.class , 1 , false , false , true , false ), + SHORT_OBJ( Short.class , null , short.class , 1 , false , false , true , false ), + CHAR_OBJ( Character.class , null , char.class , 1 , false , false , true , false ), + INT_OBJ( Integer.class , null , int.class , 1 , false , false , true , false ), + LONG_OBJ( Long.class , null , long.class , 1 , false , false , true , false ), + FLOAT_OBJ( Float.class , null , float.class , 1 , false , false , true , false ), + DOUBLE_OBJ( Double.class , null , double.class , 1 , false , false , true , false ), - NUMBER( Number.class , 1 , false , false , false , false ), - STRING( String.class , 1 , false , false , false , true ), + NUMBER( Number.class , null , null , 1 , false , false , false , false ), + STRING( String.class , null , null , 1 , false , false , false , true ), - OBJECT( null , 1 , false , false , false , false ), - DEF( null , 1 , false , false , false , false ), - ARRAY( null , 1 , false , false , false , false ); + OBJECT( null , null , null , 1 , false , false , false , false ), + DEF( null , null , null , 1 , false , false , false , false ), + ARRAY( null , null , null , 1 , false , false , false , false ); public final Class clazz; + public final Class boxed; + public final Class unboxed; public final int size; public final boolean primitive; public final boolean bool; public final boolean numeric; public final boolean constant; - Sort(final Class clazz, final int size, final boolean primitive, - final boolean bool, final boolean numeric, final boolean constant) { + Sort(final Class clazz, final Class boxed, final Class unboxed, final int size, + final boolean primitive, final boolean bool, final boolean numeric, final boolean constant) { this.clazz = clazz; + this.boxed = boxed; + this.unboxed = unboxed; this.size = size; this.bool = bool; this.primitive = primitive; @@ -204,8 +208,8 @@ public final class Definition { this.modifiers = modifiers; this.handle = handle; } - - /** + + /** * Returns MethodType for this method. *

* This works even for user-defined Methods (where the MethodHandle is null). @@ -252,7 +256,7 @@ public final class Definition { } return MethodType.methodType(returnValue, params); } - + public void write(MethodWriter writer) { final org.objectweb.asm.Type type; if (augmentation) { @@ -803,7 +807,7 @@ public final class Definition { final Class implClass; final Class[] params; - + if (augmentation == false) { implClass = owner.clazz; params = new Class[args.length]; @@ -818,7 +822,7 @@ public final class Definition { params[count+1] = args[count].clazz; } } - + final java.lang.reflect.Method reflect; try { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/ErrorHandlingLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/ErrorHandlingLexer.java deleted file mode 100644 index d490426c59c..00000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/ErrorHandlingLexer.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.antlr; - -import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.LexerNoViableAltException; -import org.antlr.v4.runtime.misc.Interval; -import org.elasticsearch.painless.Location; - -/** - * A lexer that will override the default error behavior to fail on the first error. - */ -final class ErrorHandlingLexer extends PainlessLexer { - final String sourceName; - - ErrorHandlingLexer(CharStream charStream, String sourceName) { - super(charStream); - this.sourceName = sourceName; - // Replace the TokenFactory with a stashing wrapper so we can do token-level lookbehind for regex detection - _factory = new StashingTokenFactory<>(_factory); - } - - @Override - public void recover(final LexerNoViableAltException lnvae) { - final CharStream charStream = lnvae.getInputStream(); - final int startIndex = lnvae.getStartIndex(); - final String text = charStream.getText(Interval.of(startIndex, charStream.index())); - - Location location = new Location(sourceName, _tokenStartCharIndex); - throw location.createError(new IllegalArgumentException("unexpected character [" + getErrorDisplay(text) + "].", lnvae)); - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java index 933a5f35dcd..8766dc9f89c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java @@ -33,18 +33,18 @@ class PainlessParser extends Parser { RULE_trailer = 4, RULE_block = 5, RULE_empty = 6, RULE_initializer = 7, RULE_afterthought = 8, RULE_declaration = 9, RULE_decltype = 10, RULE_declvar = 11, RULE_trap = 12, RULE_delimiter = 13, RULE_expression = 14, RULE_unary = 15, - RULE_chain = 16, RULE_primary = 17, RULE_secondary = 18, RULE_dot = 19, - RULE_brace = 20, RULE_arguments = 21, RULE_argument = 22, RULE_lambda = 23, - RULE_lamtype = 24, RULE_funcref = 25, RULE_classFuncref = 26, RULE_constructorFuncref = 27, - RULE_capturingFuncref = 28, RULE_localFuncref = 29, RULE_arrayinitializer = 30, - RULE_listinitializer = 31, RULE_mapinitializer = 32, RULE_maptoken = 33; + RULE_chain = 16, RULE_primary = 17, RULE_postfix = 18, RULE_postdot = 19, + RULE_callinvoke = 20, RULE_fieldaccess = 21, RULE_braceaccess = 22, RULE_arrayinitializer = 23, + RULE_listinitializer = 24, RULE_mapinitializer = 25, RULE_maptoken = 26, + RULE_arguments = 27, RULE_argument = 28, RULE_lambda = 29, RULE_lamtype = 30, + RULE_funcref = 31; public static final String[] ruleNames = { "source", "function", "parameters", "statement", "trailer", "block", "empty", "initializer", "afterthought", "declaration", "decltype", "declvar", "trap", - "delimiter", "expression", "unary", "chain", "primary", "secondary", "dot", - "brace", "arguments", "argument", "lambda", "lamtype", "funcref", "classFuncref", - "constructorFuncref", "capturingFuncref", "localFuncref", "arrayinitializer", - "listinitializer", "mapinitializer", "maptoken" + "delimiter", "expression", "unary", "chain", "primary", "postfix", "postdot", + "callinvoke", "fieldaccess", "braceaccess", "arrayinitializer", "listinitializer", + "mapinitializer", "maptoken", "arguments", "argument", "lambda", "lamtype", + "funcref" }; private static final String[] _LITERAL_NAMES = { @@ -147,43 +147,42 @@ class PainlessParser extends Parser { public final SourceContext source() throws RecognitionException { SourceContext _localctx = new SourceContext(_ctx, getState()); enterRule(_localctx, 0, RULE_source); + int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(71); + setState(67); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(68); + setState(64); function(); } } } - setState(73); + setState(69); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } - setState(77); + setState(73); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,1,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(74); - statement(); - } - } + _la = _input.LA(1); + while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + { + { + setState(70); + statement(); } - setState(79); + } + setState(75); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,1,_ctx); + _la = _input.LA(1); } - setState(80); + setState(76); match(EOF); } } @@ -226,13 +225,13 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(82); + setState(78); decltype(); - setState(83); + setState(79); match(ID); - setState(84); + setState(80); parameters(); - setState(85); + setState(81); block(); } } @@ -282,38 +281,38 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(87); + setState(83); match(LP); - setState(99); + setState(95); _la = _input.LA(1); if (_la==TYPE) { { - setState(88); + setState(84); decltype(); - setState(89); + setState(85); match(ID); - setState(96); + setState(92); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(90); + setState(86); match(COMMA); - setState(91); + setState(87); decltype(); - setState(92); + setState(88); match(ID); } } - setState(98); + setState(94); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(101); + setState(97); match(RP); } } @@ -576,37 +575,38 @@ class PainlessParser extends Parser { public final StatementContext statement() throws RecognitionException { StatementContext _localctx = new StatementContext(_ctx, getState()); enterRule(_localctx, 6, RULE_statement); + int _la; try { int _alt; - setState(189); + setState(185); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: _localctx = new IfContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(103); + setState(99); match(IF); - setState(104); + setState(100); match(LP); - setState(105); + setState(101); expression(0); - setState(106); + setState(102); match(RP); - setState(107); + setState(103); trailer(); - setState(111); + setState(107); switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { case 1: { - setState(108); + setState(104); match(ELSE); - setState(109); + setState(105); trailer(); } break; case 2: { - setState(110); + setState(106); if (!( _input.LA(1) != ELSE )) throw new FailedPredicateException(this, " _input.LA(1) != ELSE "); } break; @@ -617,28 +617,59 @@ class PainlessParser extends Parser { _localctx = new WhileContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(113); + setState(109); match(WHILE); - setState(114); + setState(110); match(LP); - setState(115); + setState(111); expression(0); - setState(116); + setState(112); match(RP); - setState(119); - switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { - case 1: + setState(115); + switch (_input.LA(1)) { + case LBRACK: + case LBRACE: + case LP: + case IF: + case WHILE: + case DO: + case FOR: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case TRY: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case TYPE: + case ID: { - setState(117); + setState(113); trailer(); } break; - case 2: + case SEMICOLON: { - setState(118); + setState(114); empty(); } break; + default: + throw new NoViableAltException(this); } } break; @@ -646,19 +677,19 @@ class PainlessParser extends Parser { _localctx = new DoContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(121); + setState(117); match(DO); - setState(122); + setState(118); block(); - setState(123); + setState(119); match(WHILE); - setState(124); + setState(120); match(LP); - setState(125); + setState(121); expression(0); - setState(126); + setState(122); match(RP); - setState(127); + setState(123); delimiter(); } break; @@ -666,57 +697,88 @@ class PainlessParser extends Parser { _localctx = new ForContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(129); + setState(125); match(FOR); - setState(130); + setState(126); match(LP); - setState(132); - switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { - case 1: + setState(128); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { { - setState(131); + setState(127); initializer(); } - break; } + + setState(130); + match(SEMICOLON); + setState(132); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + { + setState(131); + expression(0); + } + } + setState(134); match(SEMICOLON); setState(136); - switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { - case 1: + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { { setState(135); - expression(0); - } - break; - } - setState(138); - match(SEMICOLON); - setState(140); - switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) { - case 1: - { - setState(139); afterthought(); } - break; } - setState(142); + + setState(138); match(RP); - setState(145); - switch ( getInterpreter().adaptivePredict(_input,9,_ctx) ) { - case 1: + setState(141); + switch (_input.LA(1)) { + case LBRACK: + case LBRACE: + case LP: + case IF: + case WHILE: + case DO: + case FOR: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case TRY: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case TYPE: + case ID: { - setState(143); + setState(139); trailer(); } break; - case 2: + case SEMICOLON: { - setState(144); + setState(140); empty(); } break; + default: + throw new NoViableAltException(this); } } break; @@ -724,21 +786,21 @@ class PainlessParser extends Parser { _localctx = new EachContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(147); + setState(143); match(FOR); - setState(148); + setState(144); match(LP); - setState(149); + setState(145); decltype(); - setState(150); + setState(146); match(ID); - setState(151); + setState(147); match(COLON); - setState(152); + setState(148); expression(0); - setState(153); + setState(149); match(RP); - setState(154); + setState(150); trailer(); } break; @@ -746,19 +808,19 @@ class PainlessParser extends Parser { _localctx = new IneachContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(156); + setState(152); match(FOR); - setState(157); + setState(153); match(LP); - setState(158); + setState(154); match(ID); - setState(159); + setState(155); match(IN); - setState(160); + setState(156); expression(0); - setState(161); + setState(157); match(RP); - setState(162); + setState(158); trailer(); } break; @@ -766,9 +828,9 @@ class PainlessParser extends Parser { _localctx = new DeclContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(164); + setState(160); declaration(); - setState(165); + setState(161); delimiter(); } break; @@ -776,9 +838,9 @@ class PainlessParser extends Parser { _localctx = new ContinueContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(167); + setState(163); match(CONTINUE); - setState(168); + setState(164); delimiter(); } break; @@ -786,9 +848,9 @@ class PainlessParser extends Parser { _localctx = new BreakContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(169); + setState(165); match(BREAK); - setState(170); + setState(166); delimiter(); } break; @@ -796,11 +858,11 @@ class PainlessParser extends Parser { _localctx = new ReturnContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(171); + setState(167); match(RETURN); - setState(172); + setState(168); expression(0); - setState(173); + setState(169); delimiter(); } break; @@ -808,11 +870,11 @@ class PainlessParser extends Parser { _localctx = new TryContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(175); + setState(171); match(TRY); - setState(176); + setState(172); block(); - setState(178); + setState(174); _errHandler.sync(this); _alt = 1; do { @@ -820,7 +882,7 @@ class PainlessParser extends Parser { case 1: { { - setState(177); + setState(173); trap(); } } @@ -828,7 +890,7 @@ class PainlessParser extends Parser { default: throw new NoViableAltException(this); } - setState(180); + setState(176); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,10,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -838,11 +900,11 @@ class PainlessParser extends Parser { _localctx = new ThrowContext(_localctx); enterOuterAlt(_localctx, 12); { - setState(182); + setState(178); match(THROW); - setState(183); + setState(179); expression(0); - setState(184); + setState(180); delimiter(); } break; @@ -850,9 +912,9 @@ class PainlessParser extends Parser { _localctx = new ExprContext(_localctx); enterOuterAlt(_localctx, 13); { - setState(186); + setState(182); expression(0); - setState(187); + setState(183); delimiter(); } break; @@ -891,22 +953,52 @@ class PainlessParser extends Parser { TrailerContext _localctx = new TrailerContext(_ctx, getState()); enterRule(_localctx, 8, RULE_trailer); try { - setState(193); - switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { - case 1: + setState(189); + switch (_input.LA(1)) { + case LBRACK: enterOuterAlt(_localctx, 1); { - setState(191); + setState(187); block(); } break; - case 2: + case LBRACE: + case LP: + case IF: + case WHILE: + case DO: + case FOR: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case TRY: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case TYPE: + case ID: enterOuterAlt(_localctx, 2); { - setState(192); + setState(188); statement(); } break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -943,29 +1035,27 @@ class PainlessParser extends Parser { public final BlockContext block() throws RecognitionException { BlockContext _localctx = new BlockContext(_ctx, getState()); enterRule(_localctx, 10, RULE_block); + int _la; try { - int _alt; enterOuterAlt(_localctx, 1); { - setState(195); + setState(191); match(LBRACK); - setState(199); + setState(195); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,13,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(196); - statement(); - } - } + _la = _input.LA(1); + while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + { + { + setState(192); + statement(); } - setState(201); + } + setState(197); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,13,_ctx); + _la = _input.LA(1); } - setState(202); + setState(198); match(RBRACK); } } @@ -999,7 +1089,7 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(204); + setState(200); match(SEMICOLON); } } @@ -1036,19 +1126,19 @@ class PainlessParser extends Parser { InitializerContext _localctx = new InitializerContext(_ctx, getState()); enterRule(_localctx, 14, RULE_initializer); try { - setState(208); + setState(204); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(206); + setState(202); declaration(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(207); + setState(203); expression(0); } break; @@ -1086,7 +1176,7 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(210); + setState(206); expression(0); } } @@ -1133,23 +1223,23 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(212); + setState(208); decltype(); - setState(213); + setState(209); declvar(); - setState(218); + setState(214); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(214); + setState(210); match(COMMA); - setState(215); + setState(211); declvar(); } } - setState(220); + setState(216); _errHandler.sync(this); _la = _input.LA(1); } @@ -1194,23 +1284,23 @@ class PainlessParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(221); + setState(217); match(TYPE); - setState(226); + setState(222); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(222); + setState(218); match(LBRACE); - setState(223); + setState(219); match(RBRACE); } } } - setState(228); + setState(224); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); } @@ -1251,15 +1341,15 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(229); + setState(225); match(ID); - setState(232); + setState(228); _la = _input.LA(1); if (_la==ASSIGN) { { - setState(230); + setState(226); match(ASSIGN); - setState(231); + setState(227); expression(0); } } @@ -1303,17 +1393,17 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(234); + setState(230); match(CATCH); - setState(235); + setState(231); match(LP); - setState(236); + setState(232); match(TYPE); - setState(237); + setState(233); match(ID); - setState(238); + setState(234); match(RP); - setState(239); + setState(235); block(); } } @@ -1349,7 +1439,7 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(241); + setState(237); _la = _input.LA(1); if ( !(_la==EOF || _la==SEMICOLON) ) { _errHandler.recoverInline(this); @@ -1370,7 +1460,6 @@ class PainlessParser extends Parser { } public static class ExpressionContext extends ParserRuleContext { - public boolean s = true; public ExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -1379,11 +1468,9 @@ class PainlessParser extends Parser { public ExpressionContext() { } public void copyFrom(ExpressionContext ctx) { super.copyFrom(ctx); - this.s = ctx.s; } } public static class SingleContext extends ExpressionContext { - public UnaryContext u; public UnaryContext unary() { return getRuleContext(UnaryContext.class,0); } @@ -1433,8 +1520,6 @@ class PainlessParser extends Parser { } } public static class ConditionalContext extends ExpressionContext { - public ExpressionContext e0; - public ExpressionContext e1; public List expression() { return getRuleContexts(ExpressionContext.class); } @@ -1451,11 +1536,11 @@ class PainlessParser extends Parser { } } public static class AssignmentContext extends ExpressionContext { - public ChainContext chain() { - return getRuleContext(ChainContext.class,0); + public List expression() { + return getRuleContexts(ExpressionContext.class); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); } public TerminalNode ASSIGN() { return getToken(PainlessParser.ASSIGN, 0); } public TerminalNode AADD() { return getToken(PainlessParser.AADD, 0); } @@ -1535,259 +1620,238 @@ class PainlessParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(252); - switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { - case 1: - { - _localctx = new AssignmentContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; + { + _localctx = new SingleContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; - setState(244); - chain(true); - setState(245); - _la = _input.LA(1); - if ( !(((((_la - 58)) & ~0x3f) == 0 && ((1L << (_la - 58)) & ((1L << (ASSIGN - 58)) | (1L << (AADD - 58)) | (1L << (ASUB - 58)) | (1L << (AMUL - 58)) | (1L << (ADIV - 58)) | (1L << (AREM - 58)) | (1L << (AAND - 58)) | (1L << (AXOR - 58)) | (1L << (AOR - 58)) | (1L << (ALSH - 58)) | (1L << (ARSH - 58)) | (1L << (AUSH - 58)))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(246); - expression(1); - ((AssignmentContext)_localctx).s = false; - } - break; - case 2: - { - _localctx = new SingleContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(249); - ((SingleContext)_localctx).u = unary(false); - ((SingleContext)_localctx).s = ((SingleContext)_localctx).u.s; - } - break; + setState(240); + unary(); } _ctx.stop = _input.LT(-1); - setState(323); + setState(289); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,20,_ctx); + _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(321); - switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { + setState(287); + switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { case 1: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(254); + setState(242); if (!(precpred(_ctx, 14))) throw new FailedPredicateException(this, "precpred(_ctx, 14)"); - setState(255); + setState(243); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(256); + setState(244); expression(15); - ((BinaryContext)_localctx).s = false; } break; case 2: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(259); + setState(245); if (!(precpred(_ctx, 13))) throw new FailedPredicateException(this, "precpred(_ctx, 13)"); - setState(260); + setState(246); _la = _input.LA(1); if ( !(_la==ADD || _la==SUB) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(261); + setState(247); expression(14); - ((BinaryContext)_localctx).s = false; } break; case 3: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(264); + setState(248); if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); - setState(265); + setState(249); _la = _input.LA(1); if ( !(_la==FIND || _la==MATCH) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(266); + setState(250); expression(13); - ((BinaryContext)_localctx).s = false; } break; case 4: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(269); + setState(251); if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); - setState(270); + setState(252); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(271); + setState(253); expression(12); - ((BinaryContext)_localctx).s = false; } break; case 5: { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(274); + setState(254); if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(275); + setState(255); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(276); + setState(256); expression(11); - ((CompContext)_localctx).s = false; } break; case 6: { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(279); + setState(257); if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); - setState(280); + setState(258); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(281); + setState(259); expression(9); - ((CompContext)_localctx).s = false; } break; case 7: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(284); + setState(260); if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); - setState(285); + setState(261); match(BWAND); - setState(286); + setState(262); expression(8); - ((BinaryContext)_localctx).s = false; } break; case 8: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(289); + setState(263); if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); - setState(290); + setState(264); match(XOR); - setState(291); + setState(265); expression(7); - ((BinaryContext)_localctx).s = false; } break; case 9: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(294); + setState(266); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(295); + setState(267); match(BWOR); - setState(296); + setState(268); expression(6); - ((BinaryContext)_localctx).s = false; } break; case 10: { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(299); + setState(269); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(300); + setState(270); match(BOOLAND); - setState(301); + setState(271); expression(5); - ((BoolContext)_localctx).s = false; } break; case 11: { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(304); + setState(272); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(305); + setState(273); match(BOOLOR); - setState(306); + setState(274); expression(4); - ((BoolContext)_localctx).s = false; } break; case 12: { _localctx = new ConditionalContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(309); + setState(275); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(310); + setState(276); match(COND); - setState(311); - ((ConditionalContext)_localctx).e0 = expression(0); - setState(312); + setState(277); + expression(0); + setState(278); match(COLON); - setState(313); - ((ConditionalContext)_localctx).e1 = expression(2); - ((ConditionalContext)_localctx).s = ((ConditionalContext)_localctx).e0.s && ((ConditionalContext)_localctx).e1.s; + setState(279); + expression(2); } break; case 13: + { + _localctx = new AssignmentContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(281); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + setState(282); + _la = _input.LA(1); + if ( !(((((_la - 58)) & ~0x3f) == 0 && ((1L << (_la - 58)) & ((1L << (ASSIGN - 58)) | (1L << (AADD - 58)) | (1L << (ASUB - 58)) | (1L << (AMUL - 58)) | (1L << (ADIV - 58)) | (1L << (AREM - 58)) | (1L << (AAND - 58)) | (1L << (AXOR - 58)) | (1L << (AOR - 58)) | (1L << (ALSH - 58)) | (1L << (ARSH - 58)) | (1L << (AUSH - 58)))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(283); + expression(1); + } + break; + case 14: { _localctx = new InstanceofContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(316); + setState(284); if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); - setState(317); + setState(285); match(INSTANCEOF); - setState(318); + setState(286); decltype(); - ((InstanceofContext)_localctx).s = false; } break; } } } - setState(325); + setState(291); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,20,_ctx); + _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } } } @@ -1803,31 +1867,14 @@ class PainlessParser extends Parser { } public static class UnaryContext extends ParserRuleContext { - public boolean c; - public boolean s = true; - public UnaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - public UnaryContext(ParserRuleContext parent, int invokingState, boolean c) { + public UnaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); - this.c = c; } @Override public int getRuleIndex() { return RULE_unary; } public UnaryContext() { } public void copyFrom(UnaryContext ctx) { super.copyFrom(ctx); - this.c = ctx.c; - this.s = ctx.s; - } - } - public static class ListinitContext extends UnaryContext { - public ListinitializerContext listinitializer() { - return getRuleContext(ListinitializerContext.class,0); - } - public ListinitContext(UnaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitListinit(this); - else return visitor.visitChildren(this); } } public static class CastContext extends UnaryContext { @@ -1883,56 +1930,6 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class NullContext extends UnaryContext { - public TerminalNode NULL() { return getToken(PainlessParser.NULL, 0); } - public NullContext(UnaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNull(this); - else return visitor.visitChildren(this); - } - } - public static class MapinitContext extends UnaryContext { - public MapinitializerContext mapinitializer() { - return getRuleContext(MapinitializerContext.class,0); - } - public MapinitContext(UnaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMapinit(this); - else return visitor.visitChildren(this); - } - } - public static class TrueContext extends UnaryContext { - public TerminalNode TRUE() { return getToken(PainlessParser.TRUE, 0); } - public TrueContext(UnaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrue(this); - else return visitor.visitChildren(this); - } - } - public static class FalseContext extends UnaryContext { - public TerminalNode FALSE() { return getToken(PainlessParser.FALSE, 0); } - public FalseContext(UnaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFalse(this); - else return visitor.visitChildren(this); - } - } - public static class NumericContext extends UnaryContext { - public TerminalNode OCTAL() { return getToken(PainlessParser.OCTAL, 0); } - public TerminalNode HEX() { return getToken(PainlessParser.HEX, 0); } - public TerminalNode INTEGER() { return getToken(PainlessParser.INTEGER, 0); } - public TerminalNode DECIMAL() { return getToken(PainlessParser.DECIMAL, 0); } - public NumericContext(UnaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNumeric(this); - else return visitor.visitChildren(this); - } - } public static class OperatorContext extends UnaryContext { public UnaryContext unary() { return getRuleContext(UnaryContext.class,0); @@ -1949,39 +1946,35 @@ class PainlessParser extends Parser { } } - public final UnaryContext unary(boolean c) throws RecognitionException { - UnaryContext _localctx = new UnaryContext(_ctx, getState(), c); + public final UnaryContext unary() throws RecognitionException { + UnaryContext _localctx = new UnaryContext(_ctx, getState()); enterRule(_localctx, 30, RULE_unary); int _la; try { - setState(363); - switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { + setState(305); + switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { case 1: _localctx = new PreContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(326); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(327); + setState(292); _la = _input.LA(1); if ( !(_la==INCR || _la==DECR) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(328); - chain(true); + setState(293); + chain(); } break; case 2: _localctx = new PostContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(329); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(330); - chain(true); - setState(331); + setState(294); + chain(); + setState(295); _la = _input.LA(1); if ( !(_la==INCR || _la==DECR) ) { _errHandler.recoverInline(this); @@ -1994,112 +1987,37 @@ class PainlessParser extends Parser { _localctx = new ReadContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(333); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(334); - chain(false); + setState(297); + chain(); } break; case 4: - _localctx = new NumericContext(_localctx); + _localctx = new OperatorContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(335); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(336); - _la = _input.LA(1); - if ( !(((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - ((NumericContext)_localctx).s = false; - } - break; - case 5: - _localctx = new TrueContext(_localctx); - enterOuterAlt(_localctx, 5); - { - setState(338); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(339); - match(TRUE); - ((TrueContext)_localctx).s = false; - } - break; - case 6: - _localctx = new FalseContext(_localctx); - enterOuterAlt(_localctx, 6); - { - setState(341); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(342); - match(FALSE); - ((FalseContext)_localctx).s = false; - } - break; - case 7: - _localctx = new NullContext(_localctx); - enterOuterAlt(_localctx, 7); - { - setState(344); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(345); - match(NULL); - ((NullContext)_localctx).s = false; - } - break; - case 8: - _localctx = new ListinitContext(_localctx); - enterOuterAlt(_localctx, 8); - { - setState(347); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(348); - listinitializer(); - ((ListinitContext)_localctx).s = false; - } - break; - case 9: - _localctx = new MapinitContext(_localctx); - enterOuterAlt(_localctx, 9); - { - setState(351); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(352); - mapinitializer(); - ((MapinitContext)_localctx).s = false; - } - break; - case 10: - _localctx = new OperatorContext(_localctx); - enterOuterAlt(_localctx, 10); - { - setState(355); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(356); + setState(298); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(357); - unary(false); + setState(299); + unary(); } break; - case 11: + case 5: _localctx = new CastContext(_localctx); - enterOuterAlt(_localctx, 11); + enterOuterAlt(_localctx, 5); { - setState(358); + setState(300); match(LP); - setState(359); + setState(301); decltype(); - setState(360); + setState(302); match(RP); - setState(361); - unary(_localctx.c); + setState(303); + unary(); } break; } @@ -2116,32 +2034,28 @@ class PainlessParser extends Parser { } public static class ChainContext extends ParserRuleContext { - public boolean c; - public ChainContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - public ChainContext(ParserRuleContext parent, int invokingState, boolean c) { + public ChainContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); - this.c = c; } @Override public int getRuleIndex() { return RULE_chain; } public ChainContext() { } public void copyFrom(ChainContext ctx) { super.copyFrom(ctx); - this.c = ctx.c; } } public static class StaticContext extends ChainContext { public DecltypeContext decltype() { return getRuleContext(DecltypeContext.class,0); } - public DotContext dot() { - return getRuleContext(DotContext.class,0); + public PostdotContext postdot() { + return getRuleContext(PostdotContext.class,0); } - public List secondary() { - return getRuleContexts(SecondaryContext.class); + public List postfix() { + return getRuleContexts(PostfixContext.class); } - public SecondaryContext secondary(int i) { - return getRuleContext(SecondaryContext.class,i); + public PostfixContext postfix(int i) { + return getRuleContext(PostfixContext.class,i); } public StaticContext(ChainContext ctx) { copyFrom(ctx); } @Override @@ -2151,15 +2065,14 @@ class PainlessParser extends Parser { } } public static class DynamicContext extends ChainContext { - public PrimaryContext p; public PrimaryContext primary() { return getRuleContext(PrimaryContext.class,0); } - public List secondary() { - return getRuleContexts(SecondaryContext.class); + public List postfix() { + return getRuleContexts(PostfixContext.class); } - public SecondaryContext secondary(int i) { - return getRuleContext(SecondaryContext.class,i); + public PostfixContext postfix(int i) { + return getRuleContext(PostfixContext.class,i); } public DynamicContext(ChainContext ctx) { copyFrom(ctx); } @Override @@ -2180,34 +2093,34 @@ class PainlessParser extends Parser { } } - public final ChainContext chain(boolean c) throws RecognitionException { - ChainContext _localctx = new ChainContext(_ctx, getState(), c); + public final ChainContext chain() throws RecognitionException { + ChainContext _localctx = new ChainContext(_ctx, getState()); enterRule(_localctx, 32, RULE_chain); try { int _alt; - setState(381); - switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { + setState(323); + switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: _localctx = new DynamicContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(365); - ((DynamicContext)_localctx).p = primary(_localctx.c); - setState(369); + setState(307); + primary(); + setState(311); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,22,_ctx); + _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(366); - secondary(((DynamicContext)_localctx).p.s); + setState(308); + postfix(); } } } - setState(371); + setState(313); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,22,_ctx); + _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } } break; @@ -2215,25 +2128,25 @@ class PainlessParser extends Parser { _localctx = new StaticContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(372); + setState(314); decltype(); - setState(373); - dot(); - setState(377); + setState(315); + postdot(); + setState(319); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,23,_ctx); + _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(374); - secondary(true); + setState(316); + postfix(); } } } - setState(379); + setState(321); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,23,_ctx); + _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } } break; @@ -2241,7 +2154,7 @@ class PainlessParser extends Parser { _localctx = new NewarrayContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(380); + setState(322); arrayinitializer(); } break; @@ -2259,20 +2172,25 @@ class PainlessParser extends Parser { } public static class PrimaryContext extends ParserRuleContext { - public boolean c; - public boolean s = true; - public PrimaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - public PrimaryContext(ParserRuleContext parent, int invokingState, boolean c) { + public PrimaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); - this.c = c; } @Override public int getRuleIndex() { return RULE_primary; } public PrimaryContext() { } public void copyFrom(PrimaryContext ctx) { super.copyFrom(ctx); - this.c = ctx.c; - this.s = ctx.s; + } + } + public static class ListinitContext extends PrimaryContext { + public ListinitializerContext listinitializer() { + return getRuleContext(ListinitializerContext.class,0); + } + public ListinitContext(PrimaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitListinit(this); + else return visitor.visitChildren(this); } } public static class RegexContext extends PrimaryContext { @@ -2284,6 +2202,15 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } + public static class NullContext extends PrimaryContext { + public TerminalNode NULL() { return getToken(PainlessParser.NULL, 0); } + public NullContext(PrimaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNull(this); + else return visitor.visitChildren(this); + } + } public static class StringContext extends PrimaryContext { public TerminalNode STRING() { return getToken(PainlessParser.STRING, 0); } public StringContext(PrimaryContext ctx) { copyFrom(ctx); } @@ -2293,6 +2220,17 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } + public static class MapinitContext extends PrimaryContext { + public MapinitializerContext mapinitializer() { + return getRuleContext(MapinitializerContext.class,0); + } + public MapinitContext(PrimaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMapinit(this); + else return visitor.visitChildren(this); + } + } public static class CalllocalContext extends PrimaryContext { public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } public ArgumentsContext arguments() { @@ -2305,6 +2243,24 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } + public static class TrueContext extends PrimaryContext { + public TerminalNode TRUE() { return getToken(PainlessParser.TRUE, 0); } + public TrueContext(PrimaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrue(this); + else return visitor.visitChildren(this); + } + } + public static class FalseContext extends PrimaryContext { + public TerminalNode FALSE() { return getToken(PainlessParser.FALSE, 0); } + public FalseContext(PrimaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFalse(this); + else return visitor.visitChildren(this); + } + } public static class VariableContext extends PrimaryContext { public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } public VariableContext(PrimaryContext ctx) { copyFrom(ctx); } @@ -2314,17 +2270,15 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class ExprprecContext extends PrimaryContext { - public ExpressionContext e; - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public ExprprecContext(PrimaryContext ctx) { copyFrom(ctx); } + public static class NumericContext extends PrimaryContext { + public TerminalNode OCTAL() { return getToken(PainlessParser.OCTAL, 0); } + public TerminalNode HEX() { return getToken(PainlessParser.HEX, 0); } + public TerminalNode INTEGER() { return getToken(PainlessParser.INTEGER, 0); } + public TerminalNode DECIMAL() { return getToken(PainlessParser.DECIMAL, 0); } + public NumericContext(PrimaryContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExprprec(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNumeric(this); else return visitor.visitChildren(this); } } @@ -2341,98 +2295,135 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class ChainprecContext extends PrimaryContext { + public static class PrecedenceContext extends PrimaryContext { public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public UnaryContext unary() { - return getRuleContext(UnaryContext.class,0); + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); } public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public ChainprecContext(PrimaryContext ctx) { copyFrom(ctx); } + public PrecedenceContext(PrimaryContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitChainprec(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPrecedence(this); else return visitor.visitChildren(this); } } - public final PrimaryContext primary(boolean c) throws RecognitionException { - PrimaryContext _localctx = new PrimaryContext(_ctx, getState(), c); + public final PrimaryContext primary() throws RecognitionException { + PrimaryContext _localctx = new PrimaryContext(_ctx, getState()); enterRule(_localctx, 34, RULE_primary); + int _la; try { - setState(402); - switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { + setState(343); + switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: - _localctx = new ExprprecContext(_localctx); + _localctx = new PrecedenceContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(383); - if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(384); + setState(325); match(LP); - setState(385); - ((ExprprecContext)_localctx).e = expression(0); - setState(386); + setState(326); + expression(0); + setState(327); match(RP); - ((ExprprecContext)_localctx).s = ((ExprprecContext)_localctx).e.s; } break; case 2: - _localctx = new ChainprecContext(_localctx); + _localctx = new NumericContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(389); - if (!( _localctx.c )) throw new FailedPredicateException(this, " $c "); - setState(390); - match(LP); - setState(391); - unary(true); - setState(392); - match(RP); + setState(329); + _la = _input.LA(1); + if ( !(((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } } break; case 3: - _localctx = new StringContext(_localctx); + _localctx = new TrueContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(394); - match(STRING); + setState(330); + match(TRUE); } break; case 4: - _localctx = new RegexContext(_localctx); + _localctx = new FalseContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(395); - match(REGEX); + setState(331); + match(FALSE); } break; case 5: - _localctx = new VariableContext(_localctx); + _localctx = new NullContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(396); - match(ID); + setState(332); + match(NULL); } break; case 6: - _localctx = new CalllocalContext(_localctx); + _localctx = new StringContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(397); - match(ID); - setState(398); - arguments(); + setState(333); + match(STRING); } break; case 7: - _localctx = new NewobjectContext(_localctx); + _localctx = new RegexContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(399); + setState(334); + match(REGEX); + } + break; + case 8: + _localctx = new ListinitContext(_localctx); + enterOuterAlt(_localctx, 8); + { + setState(335); + listinitializer(); + } + break; + case 9: + _localctx = new MapinitContext(_localctx); + enterOuterAlt(_localctx, 9); + { + setState(336); + mapinitializer(); + } + break; + case 10: + _localctx = new VariableContext(_localctx); + enterOuterAlt(_localctx, 10); + { + setState(337); + match(ID); + } + break; + case 11: + _localctx = new CalllocalContext(_localctx); + enterOuterAlt(_localctx, 11); + { + setState(338); + match(ID); + setState(339); + arguments(); + } + break; + case 12: + _localctx = new NewobjectContext(_localctx); + enterOuterAlt(_localctx, 12); + { + setState(340); match(NEW); - setState(400); + setState(341); match(TYPE); - setState(401); + setState(342); arguments(); } break; @@ -2449,49 +2440,103 @@ class PainlessParser extends Parser { return _localctx; } - public static class SecondaryContext extends ParserRuleContext { - public boolean s; - public DotContext dot() { - return getRuleContext(DotContext.class,0); + public static class PostfixContext extends ParserRuleContext { + public CallinvokeContext callinvoke() { + return getRuleContext(CallinvokeContext.class,0); } - public BraceContext brace() { - return getRuleContext(BraceContext.class,0); + public FieldaccessContext fieldaccess() { + return getRuleContext(FieldaccessContext.class,0); } - public SecondaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - public SecondaryContext(ParserRuleContext parent, int invokingState, boolean s) { + public BraceaccessContext braceaccess() { + return getRuleContext(BraceaccessContext.class,0); + } + public PostfixContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); - this.s = s; } - @Override public int getRuleIndex() { return RULE_secondary; } + @Override public int getRuleIndex() { return RULE_postfix; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitSecondary(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPostfix(this); else return visitor.visitChildren(this); } } - public final SecondaryContext secondary(boolean s) throws RecognitionException { - SecondaryContext _localctx = new SecondaryContext(_ctx, getState(), s); - enterRule(_localctx, 36, RULE_secondary); + public final PostfixContext postfix() throws RecognitionException { + PostfixContext _localctx = new PostfixContext(_ctx, getState()); + enterRule(_localctx, 36, RULE_postfix); try { - setState(408); + setState(348); + switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(345); + callinvoke(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(346); + fieldaccess(); + } + break; + case 3: + enterOuterAlt(_localctx, 3); + { + setState(347); + braceaccess(); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class PostdotContext extends ParserRuleContext { + public CallinvokeContext callinvoke() { + return getRuleContext(CallinvokeContext.class,0); + } + public FieldaccessContext fieldaccess() { + return getRuleContext(FieldaccessContext.class,0); + } + public PostdotContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_postdot; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPostdot(this); + else return visitor.visitChildren(this); + } + } + + public final PostdotContext postdot() throws RecognitionException { + PostdotContext _localctx = new PostdotContext(_ctx, getState()); + enterRule(_localctx, 38, RULE_postdot); + try { + setState(352); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(404); - if (!( _localctx.s )) throw new FailedPredicateException(this, " $s "); - setState(405); - dot(); + setState(350); + callinvoke(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(406); - if (!( _localctx.s )) throw new FailedPredicateException(this, " $s "); - setState(407); - brace(); + setState(351); + fieldaccess(); } break; } @@ -2507,35 +2552,56 @@ class PainlessParser extends Parser { return _localctx; } - public static class DotContext extends ParserRuleContext { - public DotContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_dot; } - - public DotContext() { } - public void copyFrom(DotContext ctx) { - super.copyFrom(ctx); - } - } - public static class CallinvokeContext extends DotContext { + public static class CallinvokeContext extends ParserRuleContext { public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } public TerminalNode DOTID() { return getToken(PainlessParser.DOTID, 0); } public ArgumentsContext arguments() { return getRuleContext(ArgumentsContext.class,0); } - public CallinvokeContext(DotContext ctx) { copyFrom(ctx); } + public CallinvokeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_callinvoke; } @Override public T accept(ParseTreeVisitor visitor) { if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCallinvoke(this); else return visitor.visitChildren(this); } } - public static class FieldaccessContext extends DotContext { + + public final CallinvokeContext callinvoke() throws RecognitionException { + CallinvokeContext _localctx = new CallinvokeContext(_ctx, getState()); + enterRule(_localctx, 40, RULE_callinvoke); + try { + enterOuterAlt(_localctx, 1); + { + setState(354); + match(DOT); + setState(355); + match(DOTID); + setState(356); + arguments(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class FieldaccessContext extends ParserRuleContext { public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } public TerminalNode DOTID() { return getToken(PainlessParser.DOTID, 0); } public TerminalNode DOTINTEGER() { return getToken(PainlessParser.DOTINTEGER, 0); } - public FieldaccessContext(DotContext ctx) { copyFrom(ctx); } + public FieldaccessContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_fieldaccess; } @Override public T accept(ParseTreeVisitor visitor) { if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFieldaccess(this); @@ -2543,37 +2609,285 @@ class PainlessParser extends Parser { } } - public final DotContext dot() throws RecognitionException { - DotContext _localctx = new DotContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_dot); + public final FieldaccessContext fieldaccess() throws RecognitionException { + FieldaccessContext _localctx = new FieldaccessContext(_ctx, getState()); + enterRule(_localctx, 42, RULE_fieldaccess); int _la; try { - setState(415); - switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { + enterOuterAlt(_localctx, 1); + { + setState(358); + match(DOT); + setState(359); + _la = _input.LA(1); + if ( !(_la==DOTINTEGER || _la==DOTID) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class BraceaccessContext extends ParserRuleContext { + public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } + public BraceaccessContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_braceaccess; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBraceaccess(this); + else return visitor.visitChildren(this); + } + } + + public final BraceaccessContext braceaccess() throws RecognitionException { + BraceaccessContext _localctx = new BraceaccessContext(_ctx, getState()); + enterRule(_localctx, 44, RULE_braceaccess); + try { + enterOuterAlt(_localctx, 1); + { + setState(361); + match(LBRACE); + setState(362); + expression(0); + setState(363); + match(RBRACE); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ArrayinitializerContext extends ParserRuleContext { + public ArrayinitializerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_arrayinitializer; } + + public ArrayinitializerContext() { } + public void copyFrom(ArrayinitializerContext ctx) { + super.copyFrom(ctx); + } + } + public static class NewstandardarrayContext extends ArrayinitializerContext { + public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } + public List LBRACE() { return getTokens(PainlessParser.LBRACE); } + public TerminalNode LBRACE(int i) { + return getToken(PainlessParser.LBRACE, i); + } + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public List RBRACE() { return getTokens(PainlessParser.RBRACE); } + public TerminalNode RBRACE(int i) { + return getToken(PainlessParser.RBRACE, i); + } + public PostdotContext postdot() { + return getRuleContext(PostdotContext.class,0); + } + public List postfix() { + return getRuleContexts(PostfixContext.class); + } + public PostfixContext postfix(int i) { + return getRuleContext(PostfixContext.class,i); + } + public NewstandardarrayContext(ArrayinitializerContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewstandardarray(this); + else return visitor.visitChildren(this); + } + } + public static class NewinitializedarrayContext extends ArrayinitializerContext { + public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } + public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } + public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } + public TerminalNode LBRACK() { return getToken(PainlessParser.LBRACK, 0); } + public TerminalNode RBRACK() { return getToken(PainlessParser.RBRACK, 0); } + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } + public List postfix() { + return getRuleContexts(PostfixContext.class); + } + public PostfixContext postfix(int i) { + return getRuleContext(PostfixContext.class,i); + } + public List COMMA() { return getTokens(PainlessParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); + } + public NewinitializedarrayContext(ArrayinitializerContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewinitializedarray(this); + else return visitor.visitChildren(this); + } + } + + public final ArrayinitializerContext arrayinitializer() throws RecognitionException { + ArrayinitializerContext _localctx = new ArrayinitializerContext(_ctx, getState()); + enterRule(_localctx, 46, RULE_arrayinitializer); + int _la; + try { + int _alt; + setState(409); + switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: - _localctx = new CallinvokeContext(_localctx); + _localctx = new NewstandardarrayContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(410); - match(DOT); - setState(411); - match(DOTID); - setState(412); - arguments(); + setState(365); + match(NEW); + setState(366); + match(TYPE); + setState(371); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: + { + { + setState(367); + match(LBRACE); + setState(368); + expression(0); + setState(369); + match(RBRACE); + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(373); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + setState(382); + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + case 1: + { + setState(375); + postdot(); + setState(379); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(376); + postfix(); + } + } + } + setState(381); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + } + } + break; + } } break; case 2: - _localctx = new FieldaccessContext(_localctx); + _localctx = new NewinitializedarrayContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(413); - match(DOT); - setState(414); + setState(384); + match(NEW); + setState(385); + match(TYPE); + setState(386); + match(LBRACE); + setState(387); + match(RBRACE); + setState(388); + match(LBRACK); + setState(397); _la = _input.LA(1); - if ( !(_la==DOTINTEGER || _la==DOTID) ) { - _errHandler.recoverInline(this); - } else { - consume(); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + { + setState(389); + expression(0); + setState(394); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==COMMA) { + { + { + setState(390); + match(COMMA); + setState(391); + expression(0); + } + } + setState(396); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + + setState(400); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(399); + match(SEMICOLON); + } + } + + setState(402); + match(RBRACK); + setState(406); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(403); + postfix(); + } + } + } + setState(408); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } } break; @@ -2590,44 +2904,200 @@ class PainlessParser extends Parser { return _localctx; } - public static class BraceContext extends ParserRuleContext { - public BraceContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_brace; } - - public BraceContext() { } - public void copyFrom(BraceContext ctx) { - super.copyFrom(ctx); - } - } - public static class BraceaccessContext extends BraceContext { + public static class ListinitializerContext extends ParserRuleContext { public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); } public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } - public BraceaccessContext(BraceContext ctx) { copyFrom(ctx); } + public List COMMA() { return getTokens(PainlessParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); + } + public ListinitializerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_listinitializer; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBraceaccess(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitListinitializer(this); else return visitor.visitChildren(this); } } - public final BraceContext brace() throws RecognitionException { - BraceContext _localctx = new BraceContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_brace); + public final ListinitializerContext listinitializer() throws RecognitionException { + ListinitializerContext _localctx = new ListinitializerContext(_ctx, getState()); + enterRule(_localctx, 48, RULE_listinitializer); + int _la; + try { + setState(424); + switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(411); + match(LBRACE); + setState(412); + expression(0); + setState(417); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==COMMA) { + { + { + setState(413); + match(COMMA); + setState(414); + expression(0); + } + } + setState(419); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(420); + match(RBRACE); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(422); + match(LBRACE); + setState(423); + match(RBRACE); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class MapinitializerContext extends ParserRuleContext { + public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } + public List maptoken() { + return getRuleContexts(MaptokenContext.class); + } + public MaptokenContext maptoken(int i) { + return getRuleContext(MaptokenContext.class,i); + } + public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } + public List COMMA() { return getTokens(PainlessParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); + } + public TerminalNode COLON() { return getToken(PainlessParser.COLON, 0); } + public MapinitializerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_mapinitializer; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMapinitializer(this); + else return visitor.visitChildren(this); + } + } + + public final MapinitializerContext mapinitializer() throws RecognitionException { + MapinitializerContext _localctx = new MapinitializerContext(_ctx, getState()); + enterRule(_localctx, 50, RULE_mapinitializer); + int _la; + try { + setState(440); + switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(426); + match(LBRACE); + setState(427); + maptoken(); + setState(432); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==COMMA) { + { + { + setState(428); + match(COMMA); + setState(429); + maptoken(); + } + } + setState(434); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(435); + match(RBRACE); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(437); + match(LBRACE); + setState(438); + match(COLON); + setState(439); + match(RBRACE); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class MaptokenContext extends ParserRuleContext { + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public TerminalNode COLON() { return getToken(PainlessParser.COLON, 0); } + public MaptokenContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_maptoken; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMaptoken(this); + else return visitor.visitChildren(this); + } + } + + public final MaptokenContext maptoken() throws RecognitionException { + MaptokenContext _localctx = new MaptokenContext(_ctx, getState()); + enterRule(_localctx, 52, RULE_maptoken); try { - _localctx = new BraceaccessContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(417); - match(LBRACE); - setState(418); + setState(442); + expression(0); + setState(443); + match(COLON); + setState(444); expression(0); - setState(419); - match(RBRACE); } } catch (RecognitionException re) { @@ -2667,40 +3137,40 @@ class PainlessParser extends Parser { public final ArgumentsContext arguments() throws RecognitionException { ArgumentsContext _localctx = new ArgumentsContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_arguments); + enterRule(_localctx, 54, RULE_arguments); int _la; try { enterOuterAlt(_localctx, 1); { { - setState(421); + setState(446); match(LP); - setState(430); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { - case 1: + setState(455); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << THIS) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { { - setState(422); + setState(447); argument(); - setState(427); + setState(452); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(423); + setState(448); match(COMMA); - setState(424); + setState(449); argument(); } } - setState(429); + setState(454); _errHandler.sync(this); _la = _input.LA(1); } } - break; } - setState(432); + + setState(457); match(RP); } } @@ -2739,28 +3209,28 @@ class PainlessParser extends Parser { public final ArgumentContext argument() throws RecognitionException { ArgumentContext _localctx = new ArgumentContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_argument); + enterRule(_localctx, 56, RULE_argument); try { - setState(437); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + setState(462); + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(434); + setState(459); expression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(435); + setState(460); lambda(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(436); + setState(461); funcref(); } break; @@ -2810,72 +3280,93 @@ class PainlessParser extends Parser { public final LambdaContext lambda() throws RecognitionException { LambdaContext _localctx = new LambdaContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_lambda); + enterRule(_localctx, 58, RULE_lambda); int _la; try { enterOuterAlt(_localctx, 1); { - setState(452); + setState(477); switch (_input.LA(1)) { case TYPE: case ID: { - setState(439); + setState(464); lamtype(); } break; case LP: { - setState(440); + setState(465); match(LP); - setState(449); + setState(474); _la = _input.LA(1); if (_la==TYPE || _la==ID) { { - setState(441); + setState(466); lamtype(); - setState(446); + setState(471); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(442); + setState(467); match(COMMA); - setState(443); + setState(468); lamtype(); } } - setState(448); + setState(473); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(451); + setState(476); match(RP); } break; default: throw new NoViableAltException(this); } - setState(454); + setState(479); match(ARROW); - setState(457); - switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { - case 1: + setState(482); + switch (_input.LA(1)) { + case LBRACK: { - setState(455); + setState(480); block(); } break; - case 2: + case LBRACE: + case LP: + case NEW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case TYPE: + case ID: { - setState(456); + setState(481); expression(0); } break; + default: + throw new NoViableAltException(this); } } } @@ -2908,21 +3399,21 @@ class PainlessParser extends Parser { public final LamtypeContext lamtype() throws RecognitionException { LamtypeContext _localctx = new LamtypeContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_lamtype); + enterRule(_localctx, 60, RULE_lamtype); int _la; try { enterOuterAlt(_localctx, 1); { - setState(460); + setState(485); _la = _input.LA(1); if (_la==TYPE) { { - setState(459); + setState(484); decltype(); } } - setState(462); + setState(487); match(ID); } } @@ -2938,61 +3429,117 @@ class PainlessParser extends Parser { } public static class FuncrefContext extends ParserRuleContext { - public ClassFuncrefContext classFuncref() { - return getRuleContext(ClassFuncrefContext.class,0); - } - public ConstructorFuncrefContext constructorFuncref() { - return getRuleContext(ConstructorFuncrefContext.class,0); - } - public CapturingFuncrefContext capturingFuncref() { - return getRuleContext(CapturingFuncrefContext.class,0); - } - public LocalFuncrefContext localFuncref() { - return getRuleContext(LocalFuncrefContext.class,0); - } public FuncrefContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_funcref; } + + public FuncrefContext() { } + public void copyFrom(FuncrefContext ctx) { + super.copyFrom(ctx); + } + } + public static class ClassfuncrefContext extends FuncrefContext { + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } + public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } + public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } + public ClassfuncrefContext(FuncrefContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFuncref(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitClassfuncref(this); + else return visitor.visitChildren(this); + } + } + public static class CapturingfuncrefContext extends FuncrefContext { + public List ID() { return getTokens(PainlessParser.ID); } + public TerminalNode ID(int i) { + return getToken(PainlessParser.ID, i); + } + public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } + public CapturingfuncrefContext(FuncrefContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCapturingfuncref(this); + else return visitor.visitChildren(this); + } + } + public static class ConstructorfuncrefContext extends FuncrefContext { + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class,0); + } + public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } + public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } + public ConstructorfuncrefContext(FuncrefContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitConstructorfuncref(this); + else return visitor.visitChildren(this); + } + } + public static class LocalfuncrefContext extends FuncrefContext { + public TerminalNode THIS() { return getToken(PainlessParser.THIS, 0); } + public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } + public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } + public LocalfuncrefContext(FuncrefContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLocalfuncref(this); else return visitor.visitChildren(this); } } public final FuncrefContext funcref() throws RecognitionException { FuncrefContext _localctx = new FuncrefContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_funcref); + enterRule(_localctx, 62, RULE_funcref); try { - setState(468); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + setState(502); + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: + _localctx = new ClassfuncrefContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(464); - classFuncref(); + setState(489); + match(TYPE); + setState(490); + match(REF); + setState(491); + match(ID); } break; case 2: + _localctx = new ConstructorfuncrefContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(465); - constructorFuncref(); + setState(492); + decltype(); + setState(493); + match(REF); + setState(494); + match(NEW); } break; case 3: + _localctx = new CapturingfuncrefContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(466); - capturingFuncref(); + setState(496); + match(ID); + setState(497); + match(REF); + setState(498); + match(ID); } break; case 4: + _localctx = new LocalfuncrefContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(467); - localFuncref(); + setState(499); + match(THIS); + setState(500); + match(REF); + setState(501); + match(ID); } break; } @@ -3008,591 +3555,12 @@ class PainlessParser extends Parser { return _localctx; } - public static class ClassFuncrefContext extends ParserRuleContext { - public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } - public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public ClassFuncrefContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_classFuncref; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitClassFuncref(this); - else return visitor.visitChildren(this); - } - } - - public final ClassFuncrefContext classFuncref() throws RecognitionException { - ClassFuncrefContext _localctx = new ClassFuncrefContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_classFuncref); - try { - enterOuterAlt(_localctx, 1); - { - setState(470); - match(TYPE); - setState(471); - match(REF); - setState(472); - match(ID); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class ConstructorFuncrefContext extends ParserRuleContext { - public DecltypeContext decltype() { - return getRuleContext(DecltypeContext.class,0); - } - public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } - public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } - public ConstructorFuncrefContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_constructorFuncref; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitConstructorFuncref(this); - else return visitor.visitChildren(this); - } - } - - public final ConstructorFuncrefContext constructorFuncref() throws RecognitionException { - ConstructorFuncrefContext _localctx = new ConstructorFuncrefContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_constructorFuncref); - try { - enterOuterAlt(_localctx, 1); - { - setState(474); - decltype(); - setState(475); - match(REF); - setState(476); - match(NEW); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class CapturingFuncrefContext extends ParserRuleContext { - public List ID() { return getTokens(PainlessParser.ID); } - public TerminalNode ID(int i) { - return getToken(PainlessParser.ID, i); - } - public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } - public CapturingFuncrefContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_capturingFuncref; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCapturingFuncref(this); - else return visitor.visitChildren(this); - } - } - - public final CapturingFuncrefContext capturingFuncref() throws RecognitionException { - CapturingFuncrefContext _localctx = new CapturingFuncrefContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_capturingFuncref); - try { - enterOuterAlt(_localctx, 1); - { - setState(478); - match(ID); - setState(479); - match(REF); - setState(480); - match(ID); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class LocalFuncrefContext extends ParserRuleContext { - public TerminalNode THIS() { return getToken(PainlessParser.THIS, 0); } - public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public LocalFuncrefContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_localFuncref; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLocalFuncref(this); - else return visitor.visitChildren(this); - } - } - - public final LocalFuncrefContext localFuncref() throws RecognitionException { - LocalFuncrefContext _localctx = new LocalFuncrefContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_localFuncref); - try { - enterOuterAlt(_localctx, 1); - { - setState(482); - match(THIS); - setState(483); - match(REF); - setState(484); - match(ID); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class ArrayinitializerContext extends ParserRuleContext { - public ArrayinitializerContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_arrayinitializer; } - - public ArrayinitializerContext() { } - public void copyFrom(ArrayinitializerContext ctx) { - super.copyFrom(ctx); - } - } - public static class NewstandardarrayContext extends ArrayinitializerContext { - public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } - public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } - public List LBRACE() { return getTokens(PainlessParser.LBRACE); } - public TerminalNode LBRACE(int i) { - return getToken(PainlessParser.LBRACE, i); - } - public List expression() { - return getRuleContexts(ExpressionContext.class); - } - public ExpressionContext expression(int i) { - return getRuleContext(ExpressionContext.class,i); - } - public List RBRACE() { return getTokens(PainlessParser.RBRACE); } - public TerminalNode RBRACE(int i) { - return getToken(PainlessParser.RBRACE, i); - } - public DotContext dot() { - return getRuleContext(DotContext.class,0); - } - public List secondary() { - return getRuleContexts(SecondaryContext.class); - } - public SecondaryContext secondary(int i) { - return getRuleContext(SecondaryContext.class,i); - } - public NewstandardarrayContext(ArrayinitializerContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewstandardarray(this); - else return visitor.visitChildren(this); - } - } - public static class NewinitializedarrayContext extends ArrayinitializerContext { - public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } - public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } - public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } - public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } - public TerminalNode LBRACK() { return getToken(PainlessParser.LBRACK, 0); } - public TerminalNode RBRACK() { return getToken(PainlessParser.RBRACK, 0); } - public List expression() { - return getRuleContexts(ExpressionContext.class); - } - public ExpressionContext expression(int i) { - return getRuleContext(ExpressionContext.class,i); - } - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public NewinitializedarrayContext(ArrayinitializerContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewinitializedarray(this); - else return visitor.visitChildren(this); - } - } - - public final ArrayinitializerContext arrayinitializer() throws RecognitionException { - ArrayinitializerContext _localctx = new ArrayinitializerContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_arrayinitializer); - int _la; - try { - int _alt; - setState(524); - switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { - case 1: - _localctx = new NewstandardarrayContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(486); - match(NEW); - setState(487); - match(TYPE); - setState(492); - _errHandler.sync(this); - _alt = 1; - do { - switch (_alt) { - case 1: - { - { - setState(488); - match(LBRACE); - setState(489); - expression(0); - setState(490); - match(RBRACE); - } - } - break; - default: - throw new NoViableAltException(this); - } - setState(494); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); - } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(503); - switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { - case 1: - { - setState(496); - dot(); - setState(500); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,38,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(497); - secondary(true); - } - } - } - setState(502); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,38,_ctx); - } - } - break; - } - } - break; - case 2: - _localctx = new NewinitializedarrayContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(505); - match(NEW); - setState(506); - match(TYPE); - setState(507); - match(LBRACE); - setState(508); - match(RBRACE); - setState(509); - match(LBRACK); - setState(518); - switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { - case 1: - { - setState(510); - expression(0); - setState(515); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(511); - match(COMMA); - setState(512); - expression(0); - } - } - setState(517); - _errHandler.sync(this); - _la = _input.LA(1); - } - } - break; - } - setState(521); - _la = _input.LA(1); - if (_la==SEMICOLON) { - { - setState(520); - match(SEMICOLON); - } - } - - setState(523); - match(RBRACK); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class ListinitializerContext extends ParserRuleContext { - public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } - public List expression() { - return getRuleContexts(ExpressionContext.class); - } - public ExpressionContext expression(int i) { - return getRuleContext(ExpressionContext.class,i); - } - public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public ListinitializerContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_listinitializer; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitListinitializer(this); - else return visitor.visitChildren(this); - } - } - - public final ListinitializerContext listinitializer() throws RecognitionException { - ListinitializerContext _localctx = new ListinitializerContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_listinitializer); - int _la; - try { - setState(539); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(526); - match(LBRACE); - setState(527); - expression(0); - setState(532); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(528); - match(COMMA); - setState(529); - expression(0); - } - } - setState(534); - _errHandler.sync(this); - _la = _input.LA(1); - } - setState(535); - match(RBRACE); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(537); - match(LBRACE); - setState(538); - match(RBRACE); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class MapinitializerContext extends ParserRuleContext { - public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } - public List maptoken() { - return getRuleContexts(MaptokenContext.class); - } - public MaptokenContext maptoken(int i) { - return getRuleContext(MaptokenContext.class,i); - } - public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public TerminalNode COLON() { return getToken(PainlessParser.COLON, 0); } - public MapinitializerContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_mapinitializer; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMapinitializer(this); - else return visitor.visitChildren(this); - } - } - - public final MapinitializerContext mapinitializer() throws RecognitionException { - MapinitializerContext _localctx = new MapinitializerContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_mapinitializer); - int _la; - try { - setState(555); - switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(541); - match(LBRACE); - setState(542); - maptoken(); - setState(547); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(543); - match(COMMA); - setState(544); - maptoken(); - } - } - setState(549); - _errHandler.sync(this); - _la = _input.LA(1); - } - setState(550); - match(RBRACE); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(552); - match(LBRACE); - setState(553); - match(COLON); - setState(554); - match(RBRACE); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class MaptokenContext extends ParserRuleContext { - public List expression() { - return getRuleContexts(ExpressionContext.class); - } - public ExpressionContext expression(int i) { - return getRuleContext(ExpressionContext.class,i); - } - public TerminalNode COLON() { return getToken(PainlessParser.COLON, 0); } - public MaptokenContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_maptoken; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMaptoken(this); - else return visitor.visitChildren(this); - } - } - - public final MaptokenContext maptoken() throws RecognitionException { - MaptokenContext _localctx = new MaptokenContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_maptoken); - try { - enterOuterAlt(_localctx, 1); - { - setState(557); - expression(0); - setState(558); - match(COLON); - setState(559); - expression(0); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 3: return statement_sempred((StatementContext)_localctx, predIndex); case 14: return expression_sempred((ExpressionContext)_localctx, predIndex); - case 15: - return unary_sempred((UnaryContext)_localctx, predIndex); - case 17: - return primary_sempred((PrimaryContext)_localctx, predIndex); - case 18: - return secondary_sempred((SecondaryContext)_localctx, predIndex); } return true; } @@ -3630,267 +3598,207 @@ class PainlessParser extends Parser { case 12: return precpred(_ctx, 2); case 13: - return precpred(_ctx, 9); - } - return true; - } - private boolean unary_sempred(UnaryContext _localctx, int predIndex) { - switch (predIndex) { + return precpred(_ctx, 1); case 14: - return !_localctx.c ; - case 15: - return !_localctx.c ; - case 16: - return !_localctx.c ; - case 17: - return !_localctx.c ; - case 18: - return !_localctx.c ; - case 19: - return !_localctx.c ; - case 20: - return !_localctx.c ; - case 21: - return !_localctx.c ; - case 22: - return !_localctx.c ; - case 23: - return !_localctx.c ; - } - return true; - } - private boolean primary_sempred(PrimaryContext _localctx, int predIndex) { - switch (predIndex) { - case 24: - return !_localctx.c ; - case 25: - return _localctx.c ; - } - return true; - } - private boolean secondary_sempred(SecondaryContext _localctx, int predIndex) { - switch (predIndex) { - case 26: - return _localctx.s ; - case 27: - return _localctx.s ; + return precpred(_ctx, 9); } return true; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3T\u0234\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3T\u01fb\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ - "\t!\4\"\t\"\4#\t#\3\2\7\2H\n\2\f\2\16\2K\13\2\3\2\7\2N\n\2\f\2\16\2Q\13"+ - "\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4a\n\4\f"+ - "\4\16\4d\13\4\5\4f\n\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5r\n"+ - "\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5z\n\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3"+ - "\5\3\5\3\5\5\5\u0087\n\5\3\5\3\5\5\5\u008b\n\5\3\5\3\5\5\5\u008f\n\5\3"+ - "\5\3\5\3\5\5\5\u0094\n\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3"+ - "\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5"+ - "\3\5\3\5\6\5\u00b5\n\5\r\5\16\5\u00b6\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5"+ - "\u00c0\n\5\3\6\3\6\5\6\u00c4\n\6\3\7\3\7\7\7\u00c8\n\7\f\7\16\7\u00cb"+ - "\13\7\3\7\3\7\3\b\3\b\3\t\3\t\5\t\u00d3\n\t\3\n\3\n\3\13\3\13\3\13\3\13"+ - "\7\13\u00db\n\13\f\13\16\13\u00de\13\13\3\f\3\f\3\f\7\f\u00e3\n\f\f\f"+ - "\16\f\u00e6\13\f\3\r\3\r\3\r\5\r\u00eb\n\r\3\16\3\16\3\16\3\16\3\16\3"+ - "\16\3\16\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\5\20\u00ff"+ - "\n\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ + "\t!\3\2\7\2D\n\2\f\2\16\2G\13\2\3\2\7\2J\n\2\f\2\16\2M\13\2\3\2\3\2\3"+ + "\3\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4]\n\4\f\4\16\4`\13\4"+ + "\5\4b\n\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5n\n\5\3\5\3\5\3\5"+ + "\3\5\3\5\3\5\5\5v\n\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5"+ + "\u0083\n\5\3\5\3\5\5\5\u0087\n\5\3\5\3\5\5\5\u008b\n\5\3\5\3\5\3\5\5\5"+ + "\u0090\n\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5"+ + "\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\6\5\u00b1"+ + "\n\5\r\5\16\5\u00b2\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5\u00bc\n\5\3\6\3\6"+ + "\5\6\u00c0\n\6\3\7\3\7\7\7\u00c4\n\7\f\7\16\7\u00c7\13\7\3\7\3\7\3\b\3"+ + "\b\3\t\3\t\5\t\u00cf\n\t\3\n\3\n\3\13\3\13\3\13\3\13\7\13\u00d7\n\13\f"+ + "\13\16\13\u00da\13\13\3\f\3\f\3\f\7\f\u00df\n\f\f\f\16\f\u00e2\13\f\3"+ + "\r\3\r\3\r\5\r\u00e7\n\r\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\17\3\17"+ "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ - "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\7\20\u0144"+ - "\n\20\f\20\16\20\u0147\13\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3"+ - "\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3"+ - "\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3"+ - "\21\5\21\u016e\n\21\3\22\3\22\7\22\u0172\n\22\f\22\16\22\u0175\13\22\3"+ - "\22\3\22\3\22\7\22\u017a\n\22\f\22\16\22\u017d\13\22\3\22\5\22\u0180\n"+ - "\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3"+ - "\23\3\23\3\23\3\23\3\23\3\23\5\23\u0195\n\23\3\24\3\24\3\24\3\24\5\24"+ - "\u019b\n\24\3\25\3\25\3\25\3\25\3\25\5\25\u01a2\n\25\3\26\3\26\3\26\3"+ - "\26\3\27\3\27\3\27\3\27\7\27\u01ac\n\27\f\27\16\27\u01af\13\27\5\27\u01b1"+ - "\n\27\3\27\3\27\3\30\3\30\3\30\5\30\u01b8\n\30\3\31\3\31\3\31\3\31\3\31"+ - "\7\31\u01bf\n\31\f\31\16\31\u01c2\13\31\5\31\u01c4\n\31\3\31\5\31\u01c7"+ - "\n\31\3\31\3\31\3\31\5\31\u01cc\n\31\3\32\5\32\u01cf\n\32\3\32\3\32\3"+ - "\33\3\33\3\33\3\33\5\33\u01d7\n\33\3\34\3\34\3\34\3\34\3\35\3\35\3\35"+ - "\3\35\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \6 \u01ef"+ - "\n \r \16 \u01f0\3 \3 \7 \u01f5\n \f \16 \u01f8\13 \5 \u01fa\n \3 \3 "+ - "\3 \3 \3 \3 \3 \3 \7 \u0204\n \f \16 \u0207\13 \5 \u0209\n \3 \5 \u020c"+ - "\n \3 \5 \u020f\n \3!\3!\3!\3!\7!\u0215\n!\f!\16!\u0218\13!\3!\3!\3!\3"+ - "!\5!\u021e\n!\3\"\3\"\3\"\3\"\7\"\u0224\n\"\f\"\16\"\u0227\13\"\3\"\3"+ - "\"\3\"\3\"\3\"\5\"\u022e\n\"\3#\3#\3#\3#\3#\2\3\36$\2\4\6\b\n\f\16\20"+ - "\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BD\2\16\3\3\r\r\3\2\u020e\3\2\2\2@"+ - "\u021d\3\2\2\2B\u022d\3\2\2\2D\u022f\3\2\2\2FH\5\4\3\2GF\3\2\2\2HK\3\2"+ - "\2\2IG\3\2\2\2IJ\3\2\2\2JO\3\2\2\2KI\3\2\2\2LN\5\b\5\2ML\3\2\2\2NQ\3\2"+ - "\2\2OM\3\2\2\2OP\3\2\2\2PR\3\2\2\2QO\3\2\2\2RS\7\2\2\3S\3\3\2\2\2TU\5"+ - "\26\f\2UV\7R\2\2VW\5\6\4\2WX\5\f\7\2X\5\3\2\2\2Ye\7\t\2\2Z[\5\26\f\2["+ - "b\7R\2\2\\]\7\f\2\2]^\5\26\f\2^_\7R\2\2_a\3\2\2\2`\\\3\2\2\2ad\3\2\2\2"+ - "b`\3\2\2\2bc\3\2\2\2cf\3\2\2\2db\3\2\2\2eZ\3\2\2\2ef\3\2\2\2fg\3\2\2\2"+ - "gh\7\n\2\2h\7\3\2\2\2ij\7\16\2\2jk\7\t\2\2kl\5\36\20\2lm\7\n\2\2mq\5\n"+ - "\6\2no\7\20\2\2or\5\n\6\2pr\6\5\2\2qn\3\2\2\2qp\3\2\2\2r\u00c0\3\2\2\2"+ - "st\7\21\2\2tu\7\t\2\2uv\5\36\20\2vy\7\n\2\2wz\5\n\6\2xz\5\16\b\2yw\3\2"+ - "\2\2yx\3\2\2\2z\u00c0\3\2\2\2{|\7\22\2\2|}\5\f\7\2}~\7\21\2\2~\177\7\t"+ - "\2\2\177\u0080\5\36\20\2\u0080\u0081\7\n\2\2\u0081\u0082\5\34\17\2\u0082"+ - "\u00c0\3\2\2\2\u0083\u0084\7\23\2\2\u0084\u0086\7\t\2\2\u0085\u0087\5"+ - "\20\t\2\u0086\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\3\2\2\2\u0088"+ - "\u008a\7\r\2\2\u0089\u008b\5\36\20\2\u008a\u0089\3\2\2\2\u008a\u008b\3"+ - "\2\2\2\u008b\u008c\3\2\2\2\u008c\u008e\7\r\2\2\u008d\u008f\5\22\n\2\u008e"+ - "\u008d\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0090\3\2\2\2\u0090\u0093\7\n"+ - "\2\2\u0091\u0094\5\n\6\2\u0092\u0094\5\16\b\2\u0093\u0091\3\2\2\2\u0093"+ - "\u0092\3\2\2\2\u0094\u00c0\3\2\2\2\u0095\u0096\7\23\2\2\u0096\u0097\7"+ - "\t\2\2\u0097\u0098\5\26\f\2\u0098\u0099\7R\2\2\u0099\u009a\7\65\2\2\u009a"+ - "\u009b\5\36\20\2\u009b\u009c\7\n\2\2\u009c\u009d\5\n\6\2\u009d\u00c0\3"+ - "\2\2\2\u009e\u009f\7\23\2\2\u009f\u00a0\7\t\2\2\u00a0\u00a1\7R\2\2\u00a1"+ - "\u00a2\7\17\2\2\u00a2\u00a3\5\36\20\2\u00a3\u00a4\7\n\2\2\u00a4\u00a5"+ - "\5\n\6\2\u00a5\u00c0\3\2\2\2\u00a6\u00a7\5\24\13\2\u00a7\u00a8\5\34\17"+ - "\2\u00a8\u00c0\3\2\2\2\u00a9\u00aa\7\24\2\2\u00aa\u00c0\5\34\17\2\u00ab"+ - "\u00ac\7\25\2\2\u00ac\u00c0\5\34\17\2\u00ad\u00ae\7\26\2\2\u00ae\u00af"+ - "\5\36\20\2\u00af\u00b0\5\34\17\2\u00b0\u00c0\3\2\2\2\u00b1\u00b2\7\30"+ - "\2\2\u00b2\u00b4\5\f\7\2\u00b3\u00b5\5\32\16\2\u00b4\u00b3\3\2\2\2\u00b5"+ - "\u00b6\3\2\2\2\u00b6\u00b4\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00c0\3\2"+ - "\2\2\u00b8\u00b9\7\32\2\2\u00b9\u00ba\5\36\20\2\u00ba\u00bb\5\34\17\2"+ - "\u00bb\u00c0\3\2\2\2\u00bc\u00bd\5\36\20\2\u00bd\u00be\5\34\17\2\u00be"+ - "\u00c0\3\2\2\2\u00bfi\3\2\2\2\u00bfs\3\2\2\2\u00bf{\3\2\2\2\u00bf\u0083"+ - "\3\2\2\2\u00bf\u0095\3\2\2\2\u00bf\u009e\3\2\2\2\u00bf\u00a6\3\2\2\2\u00bf"+ - "\u00a9\3\2\2\2\u00bf\u00ab\3\2\2\2\u00bf\u00ad\3\2\2\2\u00bf\u00b1\3\2"+ - "\2\2\u00bf\u00b8\3\2\2\2\u00bf\u00bc\3\2\2\2\u00c0\t\3\2\2\2\u00c1\u00c4"+ - "\5\f\7\2\u00c2\u00c4\5\b\5\2\u00c3\u00c1\3\2\2\2\u00c3\u00c2\3\2\2\2\u00c4"+ - "\13\3\2\2\2\u00c5\u00c9\7\5\2\2\u00c6\u00c8\5\b\5\2\u00c7\u00c6\3\2\2"+ - "\2\u00c8\u00cb\3\2\2\2\u00c9\u00c7\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00cc"+ - "\3\2\2\2\u00cb\u00c9\3\2\2\2\u00cc\u00cd\7\6\2\2\u00cd\r\3\2\2\2\u00ce"+ - "\u00cf\7\r\2\2\u00cf\17\3\2\2\2\u00d0\u00d3\5\24\13\2\u00d1\u00d3\5\36"+ - "\20\2\u00d2\u00d0\3\2\2\2\u00d2\u00d1\3\2\2\2\u00d3\21\3\2\2\2\u00d4\u00d5"+ - "\5\36\20\2\u00d5\23\3\2\2\2\u00d6\u00d7\5\26\f\2\u00d7\u00dc\5\30\r\2"+ - "\u00d8\u00d9\7\f\2\2\u00d9\u00db\5\30\r\2\u00da\u00d8\3\2\2\2\u00db\u00de"+ - "\3\2\2\2\u00dc\u00da\3\2\2\2\u00dc\u00dd\3\2\2\2\u00dd\25\3\2\2\2\u00de"+ - "\u00dc\3\2\2\2\u00df\u00e4\7Q\2\2\u00e0\u00e1\7\7\2\2\u00e1\u00e3\7\b"+ - "\2\2\u00e2\u00e0\3\2\2\2\u00e3\u00e6\3\2\2\2\u00e4\u00e2\3\2\2\2\u00e4"+ - "\u00e5\3\2\2\2\u00e5\27\3\2\2\2\u00e6\u00e4\3\2\2\2\u00e7\u00ea\7R\2\2"+ - "\u00e8\u00e9\7<\2\2\u00e9\u00eb\5\36\20\2\u00ea\u00e8\3\2\2\2\u00ea\u00eb"+ - "\3\2\2\2\u00eb\31\3\2\2\2\u00ec\u00ed\7\31\2\2\u00ed\u00ee\7\t\2\2\u00ee"+ - "\u00ef\7Q\2\2\u00ef\u00f0\7R\2\2\u00f0\u00f1\7\n\2\2\u00f1\u00f2\5\f\7"+ - "\2\u00f2\33\3\2\2\2\u00f3\u00f4\t\2\2\2\u00f4\35\3\2\2\2\u00f5\u00f6\b"+ - "\20\1\2\u00f6\u00f7\5\"\22\2\u00f7\u00f8\t\3\2\2\u00f8\u00f9\5\36\20\3"+ - "\u00f9\u00fa\b\20\1\2\u00fa\u00ff\3\2\2\2\u00fb\u00fc\5 \21\2\u00fc\u00fd"+ - "\b\20\1\2\u00fd\u00ff\3\2\2\2\u00fe\u00f5\3\2\2\2\u00fe\u00fb\3\2\2\2"+ - "\u00ff\u0145\3\2\2\2\u0100\u0101\f\20\2\2\u0101\u0102\t\4\2\2\u0102\u0103"+ - "\5\36\20\21\u0103\u0104\b\20\1\2\u0104\u0144\3\2\2\2\u0105\u0106\f\17"+ - "\2\2\u0106\u0107\t\5\2\2\u0107\u0108\5\36\20\20\u0108\u0109\b\20\1\2\u0109"+ - "\u0144\3\2\2\2\u010a\u010b\f\16\2\2\u010b\u010c\t\6\2\2\u010c\u010d\5"+ - "\36\20\17\u010d\u010e\b\20\1\2\u010e\u0144\3\2\2\2\u010f\u0110\f\r\2\2"+ - "\u0110\u0111\t\7\2\2\u0111\u0112\5\36\20\16\u0112\u0113\b\20\1\2\u0113"+ - "\u0144\3\2\2\2\u0114\u0115\f\f\2\2\u0115\u0116\t\b\2\2\u0116\u0117\5\36"+ - "\20\r\u0117\u0118\b\20\1\2\u0118\u0144\3\2\2\2\u0119\u011a\f\n\2\2\u011a"+ - "\u011b\t\t\2\2\u011b\u011c\5\36\20\13\u011c\u011d\b\20\1\2\u011d\u0144"+ - "\3\2\2\2\u011e\u011f\f\t\2\2\u011f\u0120\7/\2\2\u0120\u0121\5\36\20\n"+ - "\u0121\u0122\b\20\1\2\u0122\u0144\3\2\2\2\u0123\u0124\f\b\2\2\u0124\u0125"+ - "\7\60\2\2\u0125\u0126\5\36\20\t\u0126\u0127\b\20\1\2\u0127\u0144\3\2\2"+ - "\2\u0128\u0129\f\7\2\2\u0129\u012a\7\61\2\2\u012a\u012b\5\36\20\b\u012b"+ - "\u012c\b\20\1\2\u012c\u0144\3\2\2\2\u012d\u012e\f\6\2\2\u012e\u012f\7"+ - "\62\2\2\u012f\u0130\5\36\20\7\u0130\u0131\b\20\1\2\u0131\u0144\3\2\2\2"+ - "\u0132\u0133\f\5\2\2\u0133\u0134\7\63\2\2\u0134\u0135\5\36\20\6\u0135"+ - "\u0136\b\20\1\2\u0136\u0144\3\2\2\2\u0137\u0138\f\4\2\2\u0138\u0139\7"+ - "\64\2\2\u0139\u013a\5\36\20\2\u013a\u013b\7\65\2\2\u013b\u013c\5\36\20"+ - "\4\u013c\u013d\b\20\1\2\u013d\u0144\3\2\2\2\u013e\u013f\f\13\2\2\u013f"+ - "\u0140\7\34\2\2\u0140\u0141\5\26\f\2\u0141\u0142\b\20\1\2\u0142\u0144"+ - "\3\2\2\2\u0143\u0100\3\2\2\2\u0143\u0105\3\2\2\2\u0143\u010a\3\2\2\2\u0143"+ - "\u010f\3\2\2\2\u0143\u0114\3\2\2\2\u0143\u0119\3\2\2\2\u0143\u011e\3\2"+ - "\2\2\u0143\u0123\3\2\2\2\u0143\u0128\3\2\2\2\u0143\u012d\3\2\2\2\u0143"+ - "\u0132\3\2\2\2\u0143\u0137\3\2\2\2\u0143\u013e\3\2\2\2\u0144\u0147\3\2"+ - "\2\2\u0145\u0143\3\2\2\2\u0145\u0146\3\2\2\2\u0146\37\3\2\2\2\u0147\u0145"+ - "\3\2\2\2\u0148\u0149\6\21\20\3\u0149\u014a\t\n\2\2\u014a\u016e\5\"\22"+ - "\2\u014b\u014c\6\21\21\3\u014c\u014d\5\"\22\2\u014d\u014e\t\n\2\2\u014e"+ - "\u016e\3\2\2\2\u014f\u0150\6\21\22\3\u0150\u016e\5\"\22\2\u0151\u0152"+ - "\6\21\23\3\u0152\u0153\t\13\2\2\u0153\u016e\b\21\1\2\u0154\u0155\6\21"+ - "\24\3\u0155\u0156\7N\2\2\u0156\u016e\b\21\1\2\u0157\u0158\6\21\25\3\u0158"+ - "\u0159\7O\2\2\u0159\u016e\b\21\1\2\u015a\u015b\6\21\26\3\u015b\u015c\7"+ - "P\2\2\u015c\u016e\b\21\1\2\u015d\u015e\6\21\27\3\u015e\u015f\5@!\2\u015f"+ - "\u0160\b\21\1\2\u0160\u016e\3\2\2\2\u0161\u0162\6\21\30\3\u0162\u0163"+ - "\5B\"\2\u0163\u0164\b\21\1\2\u0164\u016e\3\2\2\2\u0165\u0166\6\21\31\3"+ - "\u0166\u0167\t\f\2\2\u0167\u016e\5 \21\2\u0168\u0169\7\t\2\2\u0169\u016a"+ - "\5\26\f\2\u016a\u016b\7\n\2\2\u016b\u016c\5 \21\2\u016c\u016e\3\2\2\2"+ - "\u016d\u0148\3\2\2\2\u016d\u014b\3\2\2\2\u016d\u014f\3\2\2\2\u016d\u0151"+ - "\3\2\2\2\u016d\u0154\3\2\2\2\u016d\u0157\3\2\2\2\u016d\u015a\3\2\2\2\u016d"+ - "\u015d\3\2\2\2\u016d\u0161\3\2\2\2\u016d\u0165\3\2\2\2\u016d\u0168\3\2"+ - "\2\2\u016e!\3\2\2\2\u016f\u0173\5$\23\2\u0170\u0172\5&\24\2\u0171\u0170"+ - "\3\2\2\2\u0172\u0175\3\2\2\2\u0173\u0171\3\2\2\2\u0173\u0174\3\2\2\2\u0174"+ - "\u0180\3\2\2\2\u0175\u0173\3\2\2\2\u0176\u0177\5\26\f\2\u0177\u017b\5"+ - "(\25\2\u0178\u017a\5&\24\2\u0179\u0178\3\2\2\2\u017a\u017d\3\2\2\2\u017b"+ - "\u0179\3\2\2\2\u017b\u017c\3\2\2\2\u017c\u0180\3\2\2\2\u017d\u017b\3\2"+ - "\2\2\u017e\u0180\5> \2\u017f\u016f\3\2\2\2\u017f\u0176\3\2\2\2\u017f\u017e"+ - "\3\2\2\2\u0180#\3\2\2\2\u0181\u0182\6\23\32\3\u0182\u0183\7\t\2\2\u0183"+ - "\u0184\5\36\20\2\u0184\u0185\7\n\2\2\u0185\u0186\b\23\1\2\u0186\u0195"+ - "\3\2\2\2\u0187\u0188\6\23\33\3\u0188\u0189\7\t\2\2\u0189\u018a\5 \21\2"+ - "\u018a\u018b\7\n\2\2\u018b\u0195\3\2\2\2\u018c\u0195\7L\2\2\u018d\u0195"+ - "\7M\2\2\u018e\u0195\7R\2\2\u018f\u0190\7R\2\2\u0190\u0195\5,\27\2\u0191"+ - "\u0192\7\27\2\2\u0192\u0193\7Q\2\2\u0193\u0195\5,\27\2\u0194\u0181\3\2"+ - "\2\2\u0194\u0187\3\2\2\2\u0194\u018c\3\2\2\2\u0194\u018d\3\2\2\2\u0194"+ - "\u018e\3\2\2\2\u0194\u018f\3\2\2\2\u0194\u0191\3\2\2\2\u0195%\3\2\2\2"+ - "\u0196\u0197\6\24\34\3\u0197\u019b\5(\25\2\u0198\u0199\6\24\35\3\u0199"+ - "\u019b\5*\26\2\u019a\u0196\3\2\2\2\u019a\u0198\3\2\2\2\u019b\'\3\2\2\2"+ - "\u019c\u019d\7\13\2\2\u019d\u019e\7T\2\2\u019e\u01a2\5,\27\2\u019f\u01a0"+ - "\7\13\2\2\u01a0\u01a2\t\r\2\2\u01a1\u019c\3\2\2\2\u01a1\u019f\3\2\2\2"+ - "\u01a2)\3\2\2\2\u01a3\u01a4\7\7\2\2\u01a4\u01a5\5\36\20\2\u01a5\u01a6"+ - "\7\b\2\2\u01a6+\3\2\2\2\u01a7\u01b0\7\t\2\2\u01a8\u01ad\5.\30\2\u01a9"+ - "\u01aa\7\f\2\2\u01aa\u01ac\5.\30\2\u01ab\u01a9\3\2\2\2\u01ac\u01af\3\2"+ - "\2\2\u01ad\u01ab\3\2\2\2\u01ad\u01ae\3\2\2\2\u01ae\u01b1\3\2\2\2\u01af"+ - "\u01ad\3\2\2\2\u01b0\u01a8\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1\u01b2\3\2"+ - "\2\2\u01b2\u01b3\7\n\2\2\u01b3-\3\2\2\2\u01b4\u01b8\5\36\20\2\u01b5\u01b8"+ - "\5\60\31\2\u01b6\u01b8\5\64\33\2\u01b7\u01b4\3\2\2\2\u01b7\u01b5\3\2\2"+ - "\2\u01b7\u01b6\3\2\2\2\u01b8/\3\2\2\2\u01b9\u01c7\5\62\32\2\u01ba\u01c3"+ - "\7\t\2\2\u01bb\u01c0\5\62\32\2\u01bc\u01bd\7\f\2\2\u01bd\u01bf\5\62\32"+ - "\2\u01be\u01bc\3\2\2\2\u01bf\u01c2\3\2\2\2\u01c0\u01be\3\2\2\2\u01c0\u01c1"+ - "\3\2\2\2\u01c1\u01c4\3\2\2\2\u01c2\u01c0\3\2\2\2\u01c3\u01bb\3\2\2\2\u01c3"+ - "\u01c4\3\2\2\2\u01c4\u01c5\3\2\2\2\u01c5\u01c7\7\n\2\2\u01c6\u01b9\3\2"+ - "\2\2\u01c6\u01ba\3\2\2\2\u01c7\u01c8\3\2\2\2\u01c8\u01cb\7\67\2\2\u01c9"+ - "\u01cc\5\f\7\2\u01ca\u01cc\5\36\20\2\u01cb\u01c9\3\2\2\2\u01cb\u01ca\3"+ - "\2\2\2\u01cc\61\3\2\2\2\u01cd\u01cf\5\26\f\2\u01ce\u01cd\3\2\2\2\u01ce"+ - "\u01cf\3\2\2\2\u01cf\u01d0\3\2\2\2\u01d0\u01d1\7R\2\2\u01d1\63\3\2\2\2"+ - "\u01d2\u01d7\5\66\34\2\u01d3\u01d7\58\35\2\u01d4\u01d7\5:\36\2\u01d5\u01d7"+ - "\5<\37\2\u01d6\u01d2\3\2\2\2\u01d6\u01d3\3\2\2\2\u01d6\u01d4\3\2\2\2\u01d6"+ - "\u01d5\3\2\2\2\u01d7\65\3\2\2\2\u01d8\u01d9\7Q\2\2\u01d9\u01da\7\66\2"+ - "\2\u01da\u01db\7R\2\2\u01db\67\3\2\2\2\u01dc\u01dd\5\26\f\2\u01dd\u01de"+ - "\7\66\2\2\u01de\u01df\7\27\2\2\u01df9\3\2\2\2\u01e0\u01e1\7R\2\2\u01e1"+ - "\u01e2\7\66\2\2\u01e2\u01e3\7R\2\2\u01e3;\3\2\2\2\u01e4\u01e5\7\33\2\2"+ - "\u01e5\u01e6\7\66\2\2\u01e6\u01e7\7R\2\2\u01e7=\3\2\2\2\u01e8\u01e9\7"+ - "\27\2\2\u01e9\u01ee\7Q\2\2\u01ea\u01eb\7\7\2\2\u01eb\u01ec\5\36\20\2\u01ec"+ - "\u01ed\7\b\2\2\u01ed\u01ef\3\2\2\2\u01ee\u01ea\3\2\2\2\u01ef\u01f0\3\2"+ - "\2\2\u01f0\u01ee\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1\u01f9\3\2\2\2\u01f2"+ - "\u01f6\5(\25\2\u01f3\u01f5\5&\24\2\u01f4\u01f3\3\2\2\2\u01f5\u01f8\3\2"+ - "\2\2\u01f6\u01f4\3\2\2\2\u01f6\u01f7\3\2\2\2\u01f7\u01fa\3\2\2\2\u01f8"+ - "\u01f6\3\2\2\2\u01f9\u01f2\3\2\2\2\u01f9\u01fa\3\2\2\2\u01fa\u020f\3\2"+ - "\2\2\u01fb\u01fc\7\27\2\2\u01fc\u01fd\7Q\2\2\u01fd\u01fe\7\7\2\2\u01fe"+ - "\u01ff\7\b\2\2\u01ff\u0208\7\5\2\2\u0200\u0205\5\36\20\2\u0201\u0202\7"+ - "\f\2\2\u0202\u0204\5\36\20\2\u0203\u0201\3\2\2\2\u0204\u0207\3\2\2\2\u0205"+ - "\u0203\3\2\2\2\u0205\u0206\3\2\2\2\u0206\u0209\3\2\2\2\u0207\u0205\3\2"+ - "\2\2\u0208\u0200\3\2\2\2\u0208\u0209\3\2\2\2\u0209\u020b\3\2\2\2\u020a"+ - "\u020c\7\r\2\2\u020b\u020a\3\2\2\2\u020b\u020c\3\2\2\2\u020c\u020d\3\2"+ - "\2\2\u020d\u020f\7\6\2\2\u020e\u01e8\3\2\2\2\u020e\u01fb\3\2\2\2\u020f"+ - "?\3\2\2\2\u0210\u0211\7\7\2\2\u0211\u0216\5\36\20\2\u0212\u0213\7\f\2"+ - "\2\u0213\u0215\5\36\20\2\u0214\u0212\3\2\2\2\u0215\u0218\3\2\2\2\u0216"+ - "\u0214\3\2\2\2\u0216\u0217\3\2\2\2\u0217\u0219\3\2\2\2\u0218\u0216\3\2"+ - "\2\2\u0219\u021a\7\b\2\2\u021a\u021e\3\2\2\2\u021b\u021c\7\7\2\2\u021c"+ - "\u021e\7\b\2\2\u021d\u0210\3\2\2\2\u021d\u021b\3\2\2\2\u021eA\3\2\2\2"+ - "\u021f\u0220\7\7\2\2\u0220\u0225\5D#\2\u0221\u0222\7\f\2\2\u0222\u0224"+ - "\5D#\2\u0223\u0221\3\2\2\2\u0224\u0227\3\2\2\2\u0225\u0223\3\2\2\2\u0225"+ - "\u0226\3\2\2\2\u0226\u0228\3\2\2\2\u0227\u0225\3\2\2\2\u0228\u0229\7\b"+ - "\2\2\u0229\u022e\3\2\2\2\u022a\u022b\7\7\2\2\u022b\u022c\7\65\2\2\u022c"+ - "\u022e\7\b\2\2\u022d\u021f\3\2\2\2\u022d\u022a\3\2\2\2\u022eC\3\2\2\2"+ - "\u022f\u0230\5\36\20\2\u0230\u0231\7\65\2\2\u0231\u0232\5\36\20\2\u0232"+ - "E\3\2\2\2\62IObeqy\u0086\u008a\u008e\u0093\u00b6\u00bf\u00c3\u00c9\u00d2"+ - "\u00dc\u00e4\u00ea\u00fe\u0143\u0145\u016d\u0173\u017b\u017f\u0194\u019a"+ - "\u01a1\u01ad\u01b0\u01b7\u01c0\u01c3\u01c6\u01cb\u01ce\u01d6\u01f0\u01f6"+ - "\u01f9\u0205\u0208\u020b\u020e\u0216\u021d\u0225\u022d"; + "\3\20\3\20\3\20\3\20\3\20\3\20\7\20\u0122\n\20\f\20\16\20\u0125\13\20"+ + "\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\5\21"+ + "\u0134\n\21\3\22\3\22\7\22\u0138\n\22\f\22\16\22\u013b\13\22\3\22\3\22"+ + "\3\22\7\22\u0140\n\22\f\22\16\22\u0143\13\22\3\22\5\22\u0146\n\22\3\23"+ + "\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23"+ + "\3\23\3\23\3\23\5\23\u015a\n\23\3\24\3\24\3\24\5\24\u015f\n\24\3\25\3"+ + "\25\5\25\u0163\n\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\30\3\30\3\30"+ + "\3\30\3\31\3\31\3\31\3\31\3\31\3\31\6\31\u0176\n\31\r\31\16\31\u0177\3"+ + "\31\3\31\7\31\u017c\n\31\f\31\16\31\u017f\13\31\5\31\u0181\n\31\3\31\3"+ + "\31\3\31\3\31\3\31\3\31\3\31\3\31\7\31\u018b\n\31\f\31\16\31\u018e\13"+ + "\31\5\31\u0190\n\31\3\31\5\31\u0193\n\31\3\31\3\31\7\31\u0197\n\31\f\31"+ + "\16\31\u019a\13\31\5\31\u019c\n\31\3\32\3\32\3\32\3\32\7\32\u01a2\n\32"+ + "\f\32\16\32\u01a5\13\32\3\32\3\32\3\32\3\32\5\32\u01ab\n\32\3\33\3\33"+ + "\3\33\3\33\7\33\u01b1\n\33\f\33\16\33\u01b4\13\33\3\33\3\33\3\33\3\33"+ + "\3\33\5\33\u01bb\n\33\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\7\35\u01c5"+ + "\n\35\f\35\16\35\u01c8\13\35\5\35\u01ca\n\35\3\35\3\35\3\36\3\36\3\36"+ + "\5\36\u01d1\n\36\3\37\3\37\3\37\3\37\3\37\7\37\u01d8\n\37\f\37\16\37\u01db"+ + "\13\37\5\37\u01dd\n\37\3\37\5\37\u01e0\n\37\3\37\3\37\3\37\5\37\u01e5"+ + "\n\37\3 \5 \u01e8\n \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\5!\u01f9"+ + "\n!\3!\2\3\36\"\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62"+ + "\64\668:<>@\2\16\3\3\r\r\3\2\37!\3\2\"#\3\289\3\2$&\3\2\'*\3\2+.\3\2<"+ + "G\3\2:;\4\2\35\36\"#\3\2HK\3\2ST\u0233\2E\3\2\2\2\4P\3\2\2\2\6U\3\2\2"+ + "\2\b\u00bb\3\2\2\2\n\u00bf\3\2\2\2\f\u00c1\3\2\2\2\16\u00ca\3\2\2\2\20"+ + "\u00ce\3\2\2\2\22\u00d0\3\2\2\2\24\u00d2\3\2\2\2\26\u00db\3\2\2\2\30\u00e3"+ + "\3\2\2\2\32\u00e8\3\2\2\2\34\u00ef\3\2\2\2\36\u00f1\3\2\2\2 \u0133\3\2"+ + "\2\2\"\u0145\3\2\2\2$\u0159\3\2\2\2&\u015e\3\2\2\2(\u0162\3\2\2\2*\u0164"+ + "\3\2\2\2,\u0168\3\2\2\2.\u016b\3\2\2\2\60\u019b\3\2\2\2\62\u01aa\3\2\2"+ + "\2\64\u01ba\3\2\2\2\66\u01bc\3\2\2\28\u01c0\3\2\2\2:\u01d0\3\2\2\2<\u01df"+ + "\3\2\2\2>\u01e7\3\2\2\2@\u01f8\3\2\2\2BD\5\4\3\2CB\3\2\2\2DG\3\2\2\2E"+ + "C\3\2\2\2EF\3\2\2\2FK\3\2\2\2GE\3\2\2\2HJ\5\b\5\2IH\3\2\2\2JM\3\2\2\2"+ + "KI\3\2\2\2KL\3\2\2\2LN\3\2\2\2MK\3\2\2\2NO\7\2\2\3O\3\3\2\2\2PQ\5\26\f"+ + "\2QR\7R\2\2RS\5\6\4\2ST\5\f\7\2T\5\3\2\2\2Ua\7\t\2\2VW\5\26\f\2W^\7R\2"+ + "\2XY\7\f\2\2YZ\5\26\f\2Z[\7R\2\2[]\3\2\2\2\\X\3\2\2\2]`\3\2\2\2^\\\3\2"+ + "\2\2^_\3\2\2\2_b\3\2\2\2`^\3\2\2\2aV\3\2\2\2ab\3\2\2\2bc\3\2\2\2cd\7\n"+ + "\2\2d\7\3\2\2\2ef\7\16\2\2fg\7\t\2\2gh\5\36\20\2hi\7\n\2\2im\5\n\6\2j"+ + "k\7\20\2\2kn\5\n\6\2ln\6\5\2\2mj\3\2\2\2ml\3\2\2\2n\u00bc\3\2\2\2op\7"+ + "\21\2\2pq\7\t\2\2qr\5\36\20\2ru\7\n\2\2sv\5\n\6\2tv\5\16\b\2us\3\2\2\2"+ + "ut\3\2\2\2v\u00bc\3\2\2\2wx\7\22\2\2xy\5\f\7\2yz\7\21\2\2z{\7\t\2\2{|"+ + "\5\36\20\2|}\7\n\2\2}~\5\34\17\2~\u00bc\3\2\2\2\177\u0080\7\23\2\2\u0080"+ + "\u0082\7\t\2\2\u0081\u0083\5\20\t\2\u0082\u0081\3\2\2\2\u0082\u0083\3"+ + "\2\2\2\u0083\u0084\3\2\2\2\u0084\u0086\7\r\2\2\u0085\u0087\5\36\20\2\u0086"+ + "\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\3\2\2\2\u0088\u008a\7\r"+ + "\2\2\u0089\u008b\5\22\n\2\u008a\u0089\3\2\2\2\u008a\u008b\3\2\2\2\u008b"+ + "\u008c\3\2\2\2\u008c\u008f\7\n\2\2\u008d\u0090\5\n\6\2\u008e\u0090\5\16"+ + "\b\2\u008f\u008d\3\2\2\2\u008f\u008e\3\2\2\2\u0090\u00bc\3\2\2\2\u0091"+ + "\u0092\7\23\2\2\u0092\u0093\7\t\2\2\u0093\u0094\5\26\f\2\u0094\u0095\7"+ + "R\2\2\u0095\u0096\7\65\2\2\u0096\u0097\5\36\20\2\u0097\u0098\7\n\2\2\u0098"+ + "\u0099\5\n\6\2\u0099\u00bc\3\2\2\2\u009a\u009b\7\23\2\2\u009b\u009c\7"+ + "\t\2\2\u009c\u009d\7R\2\2\u009d\u009e\7\17\2\2\u009e\u009f\5\36\20\2\u009f"+ + "\u00a0\7\n\2\2\u00a0\u00a1\5\n\6\2\u00a1\u00bc\3\2\2\2\u00a2\u00a3\5\24"+ + "\13\2\u00a3\u00a4\5\34\17\2\u00a4\u00bc\3\2\2\2\u00a5\u00a6\7\24\2\2\u00a6"+ + "\u00bc\5\34\17\2\u00a7\u00a8\7\25\2\2\u00a8\u00bc\5\34\17\2\u00a9\u00aa"+ + "\7\26\2\2\u00aa\u00ab\5\36\20\2\u00ab\u00ac\5\34\17\2\u00ac\u00bc\3\2"+ + "\2\2\u00ad\u00ae\7\30\2\2\u00ae\u00b0\5\f\7\2\u00af\u00b1\5\32\16\2\u00b0"+ + "\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2\u00b2\u00b0\3\2\2\2\u00b2\u00b3\3\2"+ + "\2\2\u00b3\u00bc\3\2\2\2\u00b4\u00b5\7\32\2\2\u00b5\u00b6\5\36\20\2\u00b6"+ + "\u00b7\5\34\17\2\u00b7\u00bc\3\2\2\2\u00b8\u00b9\5\36\20\2\u00b9\u00ba"+ + "\5\34\17\2\u00ba\u00bc\3\2\2\2\u00bbe\3\2\2\2\u00bbo\3\2\2\2\u00bbw\3"+ + "\2\2\2\u00bb\177\3\2\2\2\u00bb\u0091\3\2\2\2\u00bb\u009a\3\2\2\2\u00bb"+ + "\u00a2\3\2\2\2\u00bb\u00a5\3\2\2\2\u00bb\u00a7\3\2\2\2\u00bb\u00a9\3\2"+ + "\2\2\u00bb\u00ad\3\2\2\2\u00bb\u00b4\3\2\2\2\u00bb\u00b8\3\2\2\2\u00bc"+ + "\t\3\2\2\2\u00bd\u00c0\5\f\7\2\u00be\u00c0\5\b\5\2\u00bf\u00bd\3\2\2\2"+ + "\u00bf\u00be\3\2\2\2\u00c0\13\3\2\2\2\u00c1\u00c5\7\5\2\2\u00c2\u00c4"+ + "\5\b\5\2\u00c3\u00c2\3\2\2\2\u00c4\u00c7\3\2\2\2\u00c5\u00c3\3\2\2\2\u00c5"+ + "\u00c6\3\2\2\2\u00c6\u00c8\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c8\u00c9\7\6"+ + "\2\2\u00c9\r\3\2\2\2\u00ca\u00cb\7\r\2\2\u00cb\17\3\2\2\2\u00cc\u00cf"+ + "\5\24\13\2\u00cd\u00cf\5\36\20\2\u00ce\u00cc\3\2\2\2\u00ce\u00cd\3\2\2"+ + "\2\u00cf\21\3\2\2\2\u00d0\u00d1\5\36\20\2\u00d1\23\3\2\2\2\u00d2\u00d3"+ + "\5\26\f\2\u00d3\u00d8\5\30\r\2\u00d4\u00d5\7\f\2\2\u00d5\u00d7\5\30\r"+ + "\2\u00d6\u00d4\3\2\2\2\u00d7\u00da\3\2\2\2\u00d8\u00d6\3\2\2\2\u00d8\u00d9"+ + "\3\2\2\2\u00d9\25\3\2\2\2\u00da\u00d8\3\2\2\2\u00db\u00e0\7Q\2\2\u00dc"+ + "\u00dd\7\7\2\2\u00dd\u00df\7\b\2\2\u00de\u00dc\3\2\2\2\u00df\u00e2\3\2"+ + "\2\2\u00e0\u00de\3\2\2\2\u00e0\u00e1\3\2\2\2\u00e1\27\3\2\2\2\u00e2\u00e0"+ + "\3\2\2\2\u00e3\u00e6\7R\2\2\u00e4\u00e5\7<\2\2\u00e5\u00e7\5\36\20\2\u00e6"+ + "\u00e4\3\2\2\2\u00e6\u00e7\3\2\2\2\u00e7\31\3\2\2\2\u00e8\u00e9\7\31\2"+ + "\2\u00e9\u00ea\7\t\2\2\u00ea\u00eb\7Q\2\2\u00eb\u00ec\7R\2\2\u00ec\u00ed"+ + "\7\n\2\2\u00ed\u00ee\5\f\7\2\u00ee\33\3\2\2\2\u00ef\u00f0\t\2\2\2\u00f0"+ + "\35\3\2\2\2\u00f1\u00f2\b\20\1\2\u00f2\u00f3\5 \21\2\u00f3\u0123\3\2\2"+ + "\2\u00f4\u00f5\f\20\2\2\u00f5\u00f6\t\3\2\2\u00f6\u0122\5\36\20\21\u00f7"+ + "\u00f8\f\17\2\2\u00f8\u00f9\t\4\2\2\u00f9\u0122\5\36\20\20\u00fa\u00fb"+ + "\f\16\2\2\u00fb\u00fc\t\5\2\2\u00fc\u0122\5\36\20\17\u00fd\u00fe\f\r\2"+ + "\2\u00fe\u00ff\t\6\2\2\u00ff\u0122\5\36\20\16\u0100\u0101\f\f\2\2\u0101"+ + "\u0102\t\7\2\2\u0102\u0122\5\36\20\r\u0103\u0104\f\n\2\2\u0104\u0105\t"+ + "\b\2\2\u0105\u0122\5\36\20\13\u0106\u0107\f\t\2\2\u0107\u0108\7/\2\2\u0108"+ + "\u0122\5\36\20\n\u0109\u010a\f\b\2\2\u010a\u010b\7\60\2\2\u010b\u0122"+ + "\5\36\20\t\u010c\u010d\f\7\2\2\u010d\u010e\7\61\2\2\u010e\u0122\5\36\20"+ + "\b\u010f\u0110\f\6\2\2\u0110\u0111\7\62\2\2\u0111\u0122\5\36\20\7\u0112"+ + "\u0113\f\5\2\2\u0113\u0114\7\63\2\2\u0114\u0122\5\36\20\6\u0115\u0116"+ + "\f\4\2\2\u0116\u0117\7\64\2\2\u0117\u0118\5\36\20\2\u0118\u0119\7\65\2"+ + "\2\u0119\u011a\5\36\20\4\u011a\u0122\3\2\2\2\u011b\u011c\f\3\2\2\u011c"+ + "\u011d\t\t\2\2\u011d\u0122\5\36\20\3\u011e\u011f\f\13\2\2\u011f\u0120"+ + "\7\34\2\2\u0120\u0122\5\26\f\2\u0121\u00f4\3\2\2\2\u0121\u00f7\3\2\2\2"+ + "\u0121\u00fa\3\2\2\2\u0121\u00fd\3\2\2\2\u0121\u0100\3\2\2\2\u0121\u0103"+ + "\3\2\2\2\u0121\u0106\3\2\2\2\u0121\u0109\3\2\2\2\u0121\u010c\3\2\2\2\u0121"+ + "\u010f\3\2\2\2\u0121\u0112\3\2\2\2\u0121\u0115\3\2\2\2\u0121\u011b\3\2"+ + "\2\2\u0121\u011e\3\2\2\2\u0122\u0125\3\2\2\2\u0123\u0121\3\2\2\2\u0123"+ + "\u0124\3\2\2\2\u0124\37\3\2\2\2\u0125\u0123\3\2\2\2\u0126\u0127\t\n\2"+ + "\2\u0127\u0134\5\"\22\2\u0128\u0129\5\"\22\2\u0129\u012a\t\n\2\2\u012a"+ + "\u0134\3\2\2\2\u012b\u0134\5\"\22\2\u012c\u012d\t\13\2\2\u012d\u0134\5"+ + " \21\2\u012e\u012f\7\t\2\2\u012f\u0130\5\26\f\2\u0130\u0131\7\n\2\2\u0131"+ + "\u0132\5 \21\2\u0132\u0134\3\2\2\2\u0133\u0126\3\2\2\2\u0133\u0128\3\2"+ + "\2\2\u0133\u012b\3\2\2\2\u0133\u012c\3\2\2\2\u0133\u012e\3\2\2\2\u0134"+ + "!\3\2\2\2\u0135\u0139\5$\23\2\u0136\u0138\5&\24\2\u0137\u0136\3\2\2\2"+ + "\u0138\u013b\3\2\2\2\u0139\u0137\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u0146"+ + "\3\2\2\2\u013b\u0139\3\2\2\2\u013c\u013d\5\26\f\2\u013d\u0141\5(\25\2"+ + "\u013e\u0140\5&\24\2\u013f\u013e\3\2\2\2\u0140\u0143\3\2\2\2\u0141\u013f"+ + "\3\2\2\2\u0141\u0142\3\2\2\2\u0142\u0146\3\2\2\2\u0143\u0141\3\2\2\2\u0144"+ + "\u0146\5\60\31\2\u0145\u0135\3\2\2\2\u0145\u013c\3\2\2\2\u0145\u0144\3"+ + "\2\2\2\u0146#\3\2\2\2\u0147\u0148\7\t\2\2\u0148\u0149\5\36\20\2\u0149"+ + "\u014a\7\n\2\2\u014a\u015a\3\2\2\2\u014b\u015a\t\f\2\2\u014c\u015a\7N"+ + "\2\2\u014d\u015a\7O\2\2\u014e\u015a\7P\2\2\u014f\u015a\7L\2\2\u0150\u015a"+ + "\7M\2\2\u0151\u015a\5\62\32\2\u0152\u015a\5\64\33\2\u0153\u015a\7R\2\2"+ + "\u0154\u0155\7R\2\2\u0155\u015a\58\35\2\u0156\u0157\7\27\2\2\u0157\u0158"+ + "\7Q\2\2\u0158\u015a\58\35\2\u0159\u0147\3\2\2\2\u0159\u014b\3\2\2\2\u0159"+ + "\u014c\3\2\2\2\u0159\u014d\3\2\2\2\u0159\u014e\3\2\2\2\u0159\u014f\3\2"+ + "\2\2\u0159\u0150\3\2\2\2\u0159\u0151\3\2\2\2\u0159\u0152\3\2\2\2\u0159"+ + "\u0153\3\2\2\2\u0159\u0154\3\2\2\2\u0159\u0156\3\2\2\2\u015a%\3\2\2\2"+ + "\u015b\u015f\5*\26\2\u015c\u015f\5,\27\2\u015d\u015f\5.\30\2\u015e\u015b"+ + "\3\2\2\2\u015e\u015c\3\2\2\2\u015e\u015d\3\2\2\2\u015f\'\3\2\2\2\u0160"+ + "\u0163\5*\26\2\u0161\u0163\5,\27\2\u0162\u0160\3\2\2\2\u0162\u0161\3\2"+ + "\2\2\u0163)\3\2\2\2\u0164\u0165\7\13\2\2\u0165\u0166\7T\2\2\u0166\u0167"+ + "\58\35\2\u0167+\3\2\2\2\u0168\u0169\7\13\2\2\u0169\u016a\t\r\2\2\u016a"+ + "-\3\2\2\2\u016b\u016c\7\7\2\2\u016c\u016d\5\36\20\2\u016d\u016e\7\b\2"+ + "\2\u016e/\3\2\2\2\u016f\u0170\7\27\2\2\u0170\u0175\7Q\2\2\u0171\u0172"+ + "\7\7\2\2\u0172\u0173\5\36\20\2\u0173\u0174\7\b\2\2\u0174\u0176\3\2\2\2"+ + "\u0175\u0171\3\2\2\2\u0176\u0177\3\2\2\2\u0177\u0175\3\2\2\2\u0177\u0178"+ + "\3\2\2\2\u0178\u0180\3\2\2\2\u0179\u017d\5(\25\2\u017a\u017c\5&\24\2\u017b"+ + "\u017a\3\2\2\2\u017c\u017f\3\2\2\2\u017d\u017b\3\2\2\2\u017d\u017e\3\2"+ + "\2\2\u017e\u0181\3\2\2\2\u017f\u017d\3\2\2\2\u0180\u0179\3\2\2\2\u0180"+ + "\u0181\3\2\2\2\u0181\u019c\3\2\2\2\u0182\u0183\7\27\2\2\u0183\u0184\7"+ + "Q\2\2\u0184\u0185\7\7\2\2\u0185\u0186\7\b\2\2\u0186\u018f\7\5\2\2\u0187"+ + "\u018c\5\36\20\2\u0188\u0189\7\f\2\2\u0189\u018b\5\36\20\2\u018a\u0188"+ + "\3\2\2\2\u018b\u018e\3\2\2\2\u018c\u018a\3\2\2\2\u018c\u018d\3\2\2\2\u018d"+ + "\u0190\3\2\2\2\u018e\u018c\3\2\2\2\u018f\u0187\3\2\2\2\u018f\u0190\3\2"+ + "\2\2\u0190\u0192\3\2\2\2\u0191\u0193\7\r\2\2\u0192\u0191\3\2\2\2\u0192"+ + "\u0193\3\2\2\2\u0193\u0194\3\2\2\2\u0194\u0198\7\6\2\2\u0195\u0197\5&"+ + "\24\2\u0196\u0195\3\2\2\2\u0197\u019a\3\2\2\2\u0198\u0196\3\2\2\2\u0198"+ + "\u0199\3\2\2\2\u0199\u019c\3\2\2\2\u019a\u0198\3\2\2\2\u019b\u016f\3\2"+ + "\2\2\u019b\u0182\3\2\2\2\u019c\61\3\2\2\2\u019d\u019e\7\7\2\2\u019e\u01a3"+ + "\5\36\20\2\u019f\u01a0\7\f\2\2\u01a0\u01a2\5\36\20\2\u01a1\u019f\3\2\2"+ + "\2\u01a2\u01a5\3\2\2\2\u01a3\u01a1\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4\u01a6"+ + "\3\2\2\2\u01a5\u01a3\3\2\2\2\u01a6\u01a7\7\b\2\2\u01a7\u01ab\3\2\2\2\u01a8"+ + "\u01a9\7\7\2\2\u01a9\u01ab\7\b\2\2\u01aa\u019d\3\2\2\2\u01aa\u01a8\3\2"+ + "\2\2\u01ab\63\3\2\2\2\u01ac\u01ad\7\7\2\2\u01ad\u01b2\5\66\34\2\u01ae"+ + "\u01af\7\f\2\2\u01af\u01b1\5\66\34\2\u01b0\u01ae\3\2\2\2\u01b1\u01b4\3"+ + "\2\2\2\u01b2\u01b0\3\2\2\2\u01b2\u01b3\3\2\2\2\u01b3\u01b5\3\2\2\2\u01b4"+ + "\u01b2\3\2\2\2\u01b5\u01b6\7\b\2\2\u01b6\u01bb\3\2\2\2\u01b7\u01b8\7\7"+ + "\2\2\u01b8\u01b9\7\65\2\2\u01b9\u01bb\7\b\2\2\u01ba\u01ac\3\2\2\2\u01ba"+ + "\u01b7\3\2\2\2\u01bb\65\3\2\2\2\u01bc\u01bd\5\36\20\2\u01bd\u01be\7\65"+ + "\2\2\u01be\u01bf\5\36\20\2\u01bf\67\3\2\2\2\u01c0\u01c9\7\t\2\2\u01c1"+ + "\u01c6\5:\36\2\u01c2\u01c3\7\f\2\2\u01c3\u01c5\5:\36\2\u01c4\u01c2\3\2"+ + "\2\2\u01c5\u01c8\3\2\2\2\u01c6\u01c4\3\2\2\2\u01c6\u01c7\3\2\2\2\u01c7"+ + "\u01ca\3\2\2\2\u01c8\u01c6\3\2\2\2\u01c9\u01c1\3\2\2\2\u01c9\u01ca\3\2"+ + "\2\2\u01ca\u01cb\3\2\2\2\u01cb\u01cc\7\n\2\2\u01cc9\3\2\2\2\u01cd\u01d1"+ + "\5\36\20\2\u01ce\u01d1\5<\37\2\u01cf\u01d1\5@!\2\u01d0\u01cd\3\2\2\2\u01d0"+ + "\u01ce\3\2\2\2\u01d0\u01cf\3\2\2\2\u01d1;\3\2\2\2\u01d2\u01e0\5> \2\u01d3"+ + "\u01dc\7\t\2\2\u01d4\u01d9\5> \2\u01d5\u01d6\7\f\2\2\u01d6\u01d8\5> \2"+ + "\u01d7\u01d5\3\2\2\2\u01d8\u01db\3\2\2\2\u01d9\u01d7\3\2\2\2\u01d9\u01da"+ + "\3\2\2\2\u01da\u01dd\3\2\2\2\u01db\u01d9\3\2\2\2\u01dc\u01d4\3\2\2\2\u01dc"+ + "\u01dd\3\2\2\2\u01dd\u01de\3\2\2\2\u01de\u01e0\7\n\2\2\u01df\u01d2\3\2"+ + "\2\2\u01df\u01d3\3\2\2\2\u01e0\u01e1\3\2\2\2\u01e1\u01e4\7\67\2\2\u01e2"+ + "\u01e5\5\f\7\2\u01e3\u01e5\5\36\20\2\u01e4\u01e2\3\2\2\2\u01e4\u01e3\3"+ + "\2\2\2\u01e5=\3\2\2\2\u01e6\u01e8\5\26\f\2\u01e7\u01e6\3\2\2\2\u01e7\u01e8"+ + "\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01ea\7R\2\2\u01ea?\3\2\2\2\u01eb\u01ec"+ + "\7Q\2\2\u01ec\u01ed\7\66\2\2\u01ed\u01f9\7R\2\2\u01ee\u01ef\5\26\f\2\u01ef"+ + "\u01f0\7\66\2\2\u01f0\u01f1\7\27\2\2\u01f1\u01f9\3\2\2\2\u01f2\u01f3\7"+ + "R\2\2\u01f3\u01f4\7\66\2\2\u01f4\u01f9\7R\2\2\u01f5\u01f6\7\33\2\2\u01f6"+ + "\u01f7\7\66\2\2\u01f7\u01f9\7R\2\2\u01f8\u01eb\3\2\2\2\u01f8\u01ee\3\2"+ + "\2\2\u01f8\u01f2\3\2\2\2\u01f8\u01f5\3\2\2\2\u01f9A\3\2\2\2\62EK^amu\u0082"+ + "\u0086\u008a\u008f\u00b2\u00bb\u00bf\u00c5\u00ce\u00d8\u00e0\u00e6\u0121"+ + "\u0123\u0133\u0139\u0141\u0145\u0159\u015e\u0162\u0177\u017d\u0180\u018c"+ + "\u018f\u0192\u0198\u019b\u01a3\u01aa\u01b2\u01ba\u01c6\u01c9\u01d0\u01d9"+ + "\u01dc\u01df\u01e4\u01e7\u01f8"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java index a1279d611e3..d3a87f1a099 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java @@ -263,48 +263,6 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitRead(PainlessParser.ReadContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNumeric(PainlessParser.NumericContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitTrue(PainlessParser.TrueContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitFalse(PainlessParser.FalseContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNull(PainlessParser.NullContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitListinit(PainlessParser.ListinitContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitMapinit(PainlessParser.MapinitContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -346,14 +304,35 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExprprec(PainlessParser.ExprprecContext ctx) { return visitChildren(ctx); } + @Override public T visitPrecedence(PainlessParser.PrecedenceContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitChainprec(PainlessParser.ChainprecContext ctx) { return visitChildren(ctx); } + @Override public T visitNumeric(PainlessParser.NumericContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitTrue(PainlessParser.TrueContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitFalse(PainlessParser.FalseContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitNull(PainlessParser.NullContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -368,6 +347,20 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitRegex(PainlessParser.RegexContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitListinit(PainlessParser.ListinitContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitMapinit(PainlessParser.MapinitContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -395,7 +388,14 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitSecondary(PainlessParser.SecondaryContext ctx) { return visitChildren(ctx); } + @Override public T visitPostfix(PainlessParser.PostfixContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitPostdot(PainlessParser.PostdotContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -417,69 +417,6 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitBraceaccess(PainlessParser.BraceaccessContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitArguments(PainlessParser.ArgumentsContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitArgument(PainlessParser.ArgumentContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitLambda(PainlessParser.LambdaContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitLamtype(PainlessParser.LamtypeContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitFuncref(PainlessParser.FuncrefContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitClassFuncref(PainlessParser.ClassFuncrefContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitConstructorFuncref(PainlessParser.ConstructorFuncrefContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitCapturingFuncref(PainlessParser.CapturingFuncrefContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitLocalFuncref(PainlessParser.LocalFuncrefContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -515,4 +452,60 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitMaptoken(PainlessParser.MaptokenContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitArguments(PainlessParser.ArgumentsContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitArgument(PainlessParser.ArgumentContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitLambda(PainlessParser.LambdaContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitLamtype(PainlessParser.LamtypeContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitClassfuncref(PainlessParser.ClassfuncrefContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitConstructorfuncref(PainlessParser.ConstructorfuncrefContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitCapturingfuncref(PainlessParser.CapturingfuncrefContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitLocalfuncref(PainlessParser.LocalfuncrefContext ctx) { return visitChildren(ctx); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java index 8a297651070..0f7fef6185a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java @@ -249,48 +249,6 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitRead(PainlessParser.ReadContext ctx); - /** - * Visit a parse tree produced by the {@code numeric} - * labeled alternative in {@link PainlessParser#unary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNumeric(PainlessParser.NumericContext ctx); - /** - * Visit a parse tree produced by the {@code true} - * labeled alternative in {@link PainlessParser#unary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitTrue(PainlessParser.TrueContext ctx); - /** - * Visit a parse tree produced by the {@code false} - * labeled alternative in {@link PainlessParser#unary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFalse(PainlessParser.FalseContext ctx); - /** - * Visit a parse tree produced by the {@code null} - * labeled alternative in {@link PainlessParser#unary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNull(PainlessParser.NullContext ctx); - /** - * Visit a parse tree produced by the {@code listinit} - * labeled alternative in {@link PainlessParser#unary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitListinit(PainlessParser.ListinitContext ctx); - /** - * Visit a parse tree produced by the {@code mapinit} - * labeled alternative in {@link PainlessParser#unary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitMapinit(PainlessParser.MapinitContext ctx); /** * Visit a parse tree produced by the {@code operator} * labeled alternative in {@link PainlessParser#unary}. @@ -327,19 +285,40 @@ interface PainlessParserVisitor extends ParseTreeVisitor { */ T visitNewarray(PainlessParser.NewarrayContext ctx); /** - * Visit a parse tree produced by the {@code exprprec} + * Visit a parse tree produced by the {@code precedence} * labeled alternative in {@link PainlessParser#primary}. * @param ctx the parse tree * @return the visitor result */ - T visitExprprec(PainlessParser.ExprprecContext ctx); + T visitPrecedence(PainlessParser.PrecedenceContext ctx); /** - * Visit a parse tree produced by the {@code chainprec} + * Visit a parse tree produced by the {@code numeric} * labeled alternative in {@link PainlessParser#primary}. * @param ctx the parse tree * @return the visitor result */ - T visitChainprec(PainlessParser.ChainprecContext ctx); + T visitNumeric(PainlessParser.NumericContext ctx); + /** + * Visit a parse tree produced by the {@code true} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTrue(PainlessParser.TrueContext ctx); + /** + * Visit a parse tree produced by the {@code false} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFalse(PainlessParser.FalseContext ctx); + /** + * Visit a parse tree produced by the {@code null} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNull(PainlessParser.NullContext ctx); /** * Visit a parse tree produced by the {@code string} * labeled alternative in {@link PainlessParser#primary}. @@ -354,6 +333,20 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitRegex(PainlessParser.RegexContext ctx); + /** + * Visit a parse tree produced by the {@code listinit} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitListinit(PainlessParser.ListinitContext ctx); + /** + * Visit a parse tree produced by the {@code mapinit} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMapinit(PainlessParser.MapinitContext ctx); /** * Visit a parse tree produced by the {@code variable} * labeled alternative in {@link PainlessParser#primary}. @@ -376,86 +369,35 @@ interface PainlessParserVisitor extends ParseTreeVisitor { */ T visitNewobject(PainlessParser.NewobjectContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#secondary}. + * Visit a parse tree produced by {@link PainlessParser#postfix}. * @param ctx the parse tree * @return the visitor result */ - T visitSecondary(PainlessParser.SecondaryContext ctx); + T visitPostfix(PainlessParser.PostfixContext ctx); /** - * Visit a parse tree produced by the {@code callinvoke} - * labeled alternative in {@link PainlessParser#dot}. + * Visit a parse tree produced by {@link PainlessParser#postdot}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPostdot(PainlessParser.PostdotContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#callinvoke}. * @param ctx the parse tree * @return the visitor result */ T visitCallinvoke(PainlessParser.CallinvokeContext ctx); /** - * Visit a parse tree produced by the {@code fieldaccess} - * labeled alternative in {@link PainlessParser#dot}. + * Visit a parse tree produced by {@link PainlessParser#fieldaccess}. * @param ctx the parse tree * @return the visitor result */ T visitFieldaccess(PainlessParser.FieldaccessContext ctx); /** - * Visit a parse tree produced by the {@code braceaccess} - * labeled alternative in {@link PainlessParser#brace}. + * Visit a parse tree produced by {@link PainlessParser#braceaccess}. * @param ctx the parse tree * @return the visitor result */ T visitBraceaccess(PainlessParser.BraceaccessContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#arguments}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitArguments(PainlessParser.ArgumentsContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#argument}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitArgument(PainlessParser.ArgumentContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#lambda}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitLambda(PainlessParser.LambdaContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#lamtype}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitLamtype(PainlessParser.LamtypeContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#funcref}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFuncref(PainlessParser.FuncrefContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#classFuncref}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitClassFuncref(PainlessParser.ClassFuncrefContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#constructorFuncref}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitConstructorFuncref(PainlessParser.ConstructorFuncrefContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#capturingFuncref}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitCapturingFuncref(PainlessParser.CapturingFuncrefContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#localFuncref}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitLocalFuncref(PainlessParser.LocalFuncrefContext ctx); /** * Visit a parse tree produced by the {@code newstandardarray} * labeled alternative in {@link PainlessParser#arrayinitializer}. @@ -488,4 +430,56 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitMaptoken(PainlessParser.MaptokenContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#arguments}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitArguments(PainlessParser.ArgumentsContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#argument}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitArgument(PainlessParser.ArgumentContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#lambda}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitLambda(PainlessParser.LambdaContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#lamtype}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitLamtype(PainlessParser.LamtypeContext ctx); + /** + * Visit a parse tree produced by the {@code classfuncref} + * labeled alternative in {@link PainlessParser#funcref}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitClassfuncref(PainlessParser.ClassfuncrefContext ctx); + /** + * Visit a parse tree produced by the {@code constructorfuncref} + * labeled alternative in {@link PainlessParser#funcref}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitConstructorfuncref(PainlessParser.ConstructorfuncrefContext ctx); + /** + * Visit a parse tree produced by the {@code capturingfuncref} + * labeled alternative in {@link PainlessParser#funcref}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCapturingfuncref(PainlessParser.CapturingfuncrefContext ctx); + /** + * Visit a parse tree produced by the {@code localfuncref} + * labeled alternative in {@link PainlessParser#funcref}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitLocalfuncref(PainlessParser.LocalfuncrefContext ctx); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index 55e3445bace..61269419fdf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -30,18 +30,6 @@ import org.antlr.v4.runtime.atn.PredictionMode; import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.antlr.PainlessParser.ListinitContext; -import org.elasticsearch.painless.antlr.PainlessParser.ListinitializerContext; -import org.elasticsearch.painless.antlr.PainlessParser.MapinitContext; -import org.elasticsearch.painless.antlr.PainlessParser.MapinitializerContext; -import org.elasticsearch.painless.antlr.PainlessParser.MaptokenContext; -import org.elasticsearch.painless.antlr.PainlessParser.NewinitializedarrayContext; -import org.elasticsearch.painless.antlr.PainlessParser.NewstandardarrayContext; -import org.elasticsearch.painless.node.EListInit; -import org.elasticsearch.painless.node.EMapInit; -import org.elasticsearch.painless.node.SFunction.Reserved; -import org.elasticsearch.painless.node.SSource.MainMethodReserved; -import org.elasticsearch.painless.node.SFunction.FunctionReserved; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.antlr.PainlessParser.AfterthoughtContext; @@ -55,13 +43,12 @@ import org.elasticsearch.painless.antlr.PainlessParser.BraceaccessContext; import org.elasticsearch.painless.antlr.PainlessParser.BreakContext; import org.elasticsearch.painless.antlr.PainlessParser.CallinvokeContext; import org.elasticsearch.painless.antlr.PainlessParser.CalllocalContext; -import org.elasticsearch.painless.antlr.PainlessParser.CapturingFuncrefContext; +import org.elasticsearch.painless.antlr.PainlessParser.CapturingfuncrefContext; import org.elasticsearch.painless.antlr.PainlessParser.CastContext; -import org.elasticsearch.painless.antlr.PainlessParser.ChainprecContext; -import org.elasticsearch.painless.antlr.PainlessParser.ClassFuncrefContext; +import org.elasticsearch.painless.antlr.PainlessParser.ClassfuncrefContext; import org.elasticsearch.painless.antlr.PainlessParser.CompContext; import org.elasticsearch.painless.antlr.PainlessParser.ConditionalContext; -import org.elasticsearch.painless.antlr.PainlessParser.ConstructorFuncrefContext; +import org.elasticsearch.painless.antlr.PainlessParser.ConstructorfuncrefContext; import org.elasticsearch.painless.antlr.PainlessParser.ContinueContext; import org.elasticsearch.painless.antlr.PainlessParser.DeclContext; import org.elasticsearch.painless.antlr.PainlessParser.DeclarationContext; @@ -74,11 +61,9 @@ import org.elasticsearch.painless.antlr.PainlessParser.EachContext; import org.elasticsearch.painless.antlr.PainlessParser.EmptyContext; import org.elasticsearch.painless.antlr.PainlessParser.ExprContext; import org.elasticsearch.painless.antlr.PainlessParser.ExpressionContext; -import org.elasticsearch.painless.antlr.PainlessParser.ExprprecContext; import org.elasticsearch.painless.antlr.PainlessParser.FalseContext; import org.elasticsearch.painless.antlr.PainlessParser.FieldaccessContext; import org.elasticsearch.painless.antlr.PainlessParser.ForContext; -import org.elasticsearch.painless.antlr.PainlessParser.FuncrefContext; import org.elasticsearch.painless.antlr.PainlessParser.FunctionContext; import org.elasticsearch.painless.antlr.PainlessParser.IfContext; import org.elasticsearch.painless.antlr.PainlessParser.IneachContext; @@ -86,19 +71,28 @@ import org.elasticsearch.painless.antlr.PainlessParser.InitializerContext; import org.elasticsearch.painless.antlr.PainlessParser.InstanceofContext; import org.elasticsearch.painless.antlr.PainlessParser.LambdaContext; import org.elasticsearch.painless.antlr.PainlessParser.LamtypeContext; -import org.elasticsearch.painless.antlr.PainlessParser.LocalFuncrefContext; +import org.elasticsearch.painless.antlr.PainlessParser.ListinitContext; +import org.elasticsearch.painless.antlr.PainlessParser.ListinitializerContext; +import org.elasticsearch.painless.antlr.PainlessParser.LocalfuncrefContext; +import org.elasticsearch.painless.antlr.PainlessParser.MapinitContext; +import org.elasticsearch.painless.antlr.PainlessParser.MapinitializerContext; +import org.elasticsearch.painless.antlr.PainlessParser.MaptokenContext; import org.elasticsearch.painless.antlr.PainlessParser.NewarrayContext; +import org.elasticsearch.painless.antlr.PainlessParser.NewinitializedarrayContext; import org.elasticsearch.painless.antlr.PainlessParser.NewobjectContext; +import org.elasticsearch.painless.antlr.PainlessParser.NewstandardarrayContext; import org.elasticsearch.painless.antlr.PainlessParser.NullContext; import org.elasticsearch.painless.antlr.PainlessParser.NumericContext; import org.elasticsearch.painless.antlr.PainlessParser.OperatorContext; import org.elasticsearch.painless.antlr.PainlessParser.ParametersContext; import org.elasticsearch.painless.antlr.PainlessParser.PostContext; +import org.elasticsearch.painless.antlr.PainlessParser.PostdotContext; +import org.elasticsearch.painless.antlr.PainlessParser.PostfixContext; import org.elasticsearch.painless.antlr.PainlessParser.PreContext; +import org.elasticsearch.painless.antlr.PainlessParser.PrecedenceContext; import org.elasticsearch.painless.antlr.PainlessParser.ReadContext; import org.elasticsearch.painless.antlr.PainlessParser.RegexContext; import org.elasticsearch.painless.antlr.PainlessParser.ReturnContext; -import org.elasticsearch.painless.antlr.PainlessParser.SecondaryContext; import org.elasticsearch.painless.antlr.PainlessParser.SingleContext; import org.elasticsearch.painless.antlr.PainlessParser.SourceContext; import org.elasticsearch.painless.antlr.PainlessParser.StatementContext; @@ -109,18 +103,17 @@ import org.elasticsearch.painless.antlr.PainlessParser.TrailerContext; import org.elasticsearch.painless.antlr.PainlessParser.TrapContext; import org.elasticsearch.painless.antlr.PainlessParser.TrueContext; import org.elasticsearch.painless.antlr.PainlessParser.TryContext; -import org.elasticsearch.painless.antlr.PainlessParser.UnaryContext; import org.elasticsearch.painless.antlr.PainlessParser.VariableContext; import org.elasticsearch.painless.antlr.PainlessParser.WhileContext; import org.elasticsearch.painless.node.AExpression; -import org.elasticsearch.painless.node.ALink; import org.elasticsearch.painless.node.ANode; import org.elasticsearch.painless.node.AStatement; +import org.elasticsearch.painless.node.EAssignment; import org.elasticsearch.painless.node.EBinary; import org.elasticsearch.painless.node.EBool; import org.elasticsearch.painless.node.EBoolean; +import org.elasticsearch.painless.node.ECallLocal; import org.elasticsearch.painless.node.ECapturingFunctionRef; -import org.elasticsearch.painless.node.EChain; import org.elasticsearch.painless.node.EComp; import org.elasticsearch.painless.node.EConditional; import org.elasticsearch.painless.node.EDecimal; @@ -128,20 +121,20 @@ import org.elasticsearch.painless.node.EExplicit; import org.elasticsearch.painless.node.EFunctionRef; import org.elasticsearch.painless.node.EInstanceof; import org.elasticsearch.painless.node.ELambda; +import org.elasticsearch.painless.node.EListInit; +import org.elasticsearch.painless.node.EMapInit; +import org.elasticsearch.painless.node.ENewArray; +import org.elasticsearch.painless.node.ENewObj; import org.elasticsearch.painless.node.ENull; import org.elasticsearch.painless.node.ENumeric; +import org.elasticsearch.painless.node.ERegex; +import org.elasticsearch.painless.node.EStatic; +import org.elasticsearch.painless.node.EString; import org.elasticsearch.painless.node.EUnary; -import org.elasticsearch.painless.node.LBrace; -import org.elasticsearch.painless.node.LCallInvoke; -import org.elasticsearch.painless.node.LCallLocal; -import org.elasticsearch.painless.node.LCast; -import org.elasticsearch.painless.node.LField; -import org.elasticsearch.painless.node.LNewArray; -import org.elasticsearch.painless.node.LNewObj; -import org.elasticsearch.painless.node.LRegex; -import org.elasticsearch.painless.node.LStatic; -import org.elasticsearch.painless.node.LString; -import org.elasticsearch.painless.node.LVariable; +import org.elasticsearch.painless.node.EVariable; +import org.elasticsearch.painless.node.PBrace; +import org.elasticsearch.painless.node.PCallInvoke; +import org.elasticsearch.painless.node.PField; import org.elasticsearch.painless.node.SBlock; import org.elasticsearch.painless.node.SBreak; import org.elasticsearch.painless.node.SCatch; @@ -153,10 +146,13 @@ import org.elasticsearch.painless.node.SEach; import org.elasticsearch.painless.node.SExpression; import org.elasticsearch.painless.node.SFor; import org.elasticsearch.painless.node.SFunction; +import org.elasticsearch.painless.node.SFunction.FunctionReserved; import org.elasticsearch.painless.node.SIf; import org.elasticsearch.painless.node.SIfElse; import org.elasticsearch.painless.node.SReturn; import org.elasticsearch.painless.node.SSource; +import org.elasticsearch.painless.node.SSource.MainMethodReserved; +import org.elasticsearch.painless.node.SSource.Reserved; import org.elasticsearch.painless.node.SThrow; import org.elasticsearch.painless.node.STry; import org.elasticsearch.painless.node.SWhile; @@ -172,7 +168,7 @@ import java.util.List; /** * Converts the ANTLR tree to a Painless tree. */ -public final class Walker extends PainlessParserBaseVisitor { +public final class Walker extends PainlessParserBaseVisitor { public static SSource buildPainlessTree(String sourceName, String sourceText, CompilerSettings settings, Printer debugStream) { return new Walker(sourceName, sourceText, settings, debugStream).source; @@ -237,8 +233,13 @@ public final class Walker extends PainlessParserBaseVisitor { return new Location(sourceName, ctx.getStart().getStartIndex()); } + /** Returns name of next lambda */ + private String nextLambda() { + return "lambda$" + syntheticCounter++; + } + @Override - public Object visitSource(SourceContext ctx) { + public ANode visitSource(SourceContext ctx) { reserved.push(new MainMethodReserved()); List functions = new ArrayList<>(); @@ -258,7 +259,7 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitFunction(FunctionContext ctx) { + public ANode visitFunction(FunctionContext ctx) { reserved.push(new FunctionReserved()); String rtnType = ctx.decltype().getText(); @@ -284,13 +285,13 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitParameters(ParametersContext ctx) { + public ANode visitParameters(ParametersContext ctx) { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } @Override - public Object visitIf(IfContext ctx) { - AExpression expression = (AExpression)visitExpression(ctx.expression()); + public ANode visitIf(IfContext ctx) { + AExpression expression = (AExpression)visit(ctx.expression()); SBlock ifblock = (SBlock)visit(ctx.trailer(0)); if (ctx.trailer().size() > 1) { @@ -303,10 +304,10 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitWhile(WhileContext ctx) { + public ANode visitWhile(WhileContext ctx) { reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); - AExpression expression = (AExpression)visitExpression(ctx.expression()); + AExpression expression = (AExpression)visit(ctx.expression()); if (ctx.trailer() != null) { SBlock block = (SBlock)visit(ctx.trailer()); @@ -315,26 +316,26 @@ public final class Walker extends PainlessParserBaseVisitor { } else if (ctx.empty() != null) { return new SWhile(location(ctx), expression, null); } else { - throw location(ctx).createError(new IllegalStateException(" Illegal tree structure.")); + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } } @Override - public Object visitDo(DoContext ctx) { + public ANode visitDo(DoContext ctx) { reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); - AExpression expression = (AExpression)visitExpression(ctx.expression()); + AExpression expression = (AExpression)visit(ctx.expression()); SBlock block = (SBlock)visit(ctx.block()); return new SDo(location(ctx), block, expression); } @Override - public Object visitFor(ForContext ctx) { + public ANode visitFor(ForContext ctx) { reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); - ANode initializer = ctx.initializer() == null ? null : (ANode)visit(ctx.initializer()); - AExpression expression = ctx.expression() == null ? null : (AExpression)visitExpression(ctx.expression()); + ANode initializer = ctx.initializer() == null ? null : visit(ctx.initializer()); + AExpression expression = ctx.expression() == null ? null : (AExpression)visit(ctx.expression()); AExpression afterthought = ctx.afterthought() == null ? null : (AExpression)visit(ctx.afterthought()); if (ctx.trailer() != null) { @@ -349,52 +350,52 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitEach(EachContext ctx) { + public ANode visitEach(EachContext ctx) { reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); String type = ctx.decltype().getText(); String name = ctx.ID().getText(); - AExpression expression = (AExpression)visitExpression(ctx.expression()); + AExpression expression = (AExpression)visit(ctx.expression()); SBlock block = (SBlock)visit(ctx.trailer()); return new SEach(location(ctx), type, name, expression, block); } - + @Override - public Object visitIneach(IneachContext ctx) { + public ANode visitIneach(IneachContext ctx) { reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); String name = ctx.ID().getText(); - AExpression expression = (AExpression)visitExpression(ctx.expression()); + AExpression expression = (AExpression)visit(ctx.expression()); SBlock block = (SBlock)visit(ctx.trailer()); return new SEach(location(ctx), "def", name, expression, block); } @Override - public Object visitDecl(DeclContext ctx) { + public ANode visitDecl(DeclContext ctx) { return visit(ctx.declaration()); } @Override - public Object visitContinue(ContinueContext ctx) { + public ANode visitContinue(ContinueContext ctx) { return new SContinue(location(ctx)); } @Override - public Object visitBreak(BreakContext ctx) { + public ANode visitBreak(BreakContext ctx) { return new SBreak(location(ctx)); } @Override - public Object visitReturn(ReturnContext ctx) { - AExpression expression = (AExpression)visitExpression(ctx.expression()); + public ANode visitReturn(ReturnContext ctx) { + AExpression expression = (AExpression)visit(ctx.expression()); return new SReturn(location(ctx), expression); } @Override - public Object visitTry(TryContext ctx) { + public ANode visitTry(TryContext ctx) { SBlock block = (SBlock)visit(ctx.block()); List catches = new ArrayList<>(); @@ -406,21 +407,21 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitThrow(ThrowContext ctx) { - AExpression expression = (AExpression)visitExpression(ctx.expression()); + public ANode visitThrow(ThrowContext ctx) { + AExpression expression = (AExpression)visit(ctx.expression()); return new SThrow(location(ctx), expression); } @Override - public Object visitExpr(ExprContext ctx) { - AExpression expression = (AExpression)visitExpression(ctx.expression()); + public ANode visitExpr(ExprContext ctx) { + AExpression expression = (AExpression)visit(ctx.expression()); return new SExpression(location(ctx), expression); } @Override - public Object visitTrailer(TrailerContext ctx) { + public ANode visitTrailer(TrailerContext ctx) { if (ctx.block() != null) { return visit(ctx.block()); } else if (ctx.statement() != null) { @@ -434,7 +435,7 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitBlock(BlockContext ctx) { + public ANode visitBlock(BlockContext ctx) { if (ctx.statement().isEmpty()) { return null; } else { @@ -449,34 +450,34 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitEmpty(EmptyContext ctx) { + public ANode visitEmpty(EmptyContext ctx) { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } @Override - public Object visitInitializer(InitializerContext ctx) { + public ANode visitInitializer(InitializerContext ctx) { if (ctx.declaration() != null) { return visit(ctx.declaration()); } else if (ctx.expression() != null) { - return visitExpression(ctx.expression()); + return visit(ctx.expression()); } else { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } } @Override - public Object visitAfterthought(AfterthoughtContext ctx) { - return visitExpression(ctx.expression()); + public ANode visitAfterthought(AfterthoughtContext ctx) { + return visit(ctx.expression()); } @Override - public Object visitDeclaration(DeclarationContext ctx) { + public ANode visitDeclaration(DeclarationContext ctx) { String type = ctx.decltype().getText(); List declarations = new ArrayList<>(); for (DeclvarContext declvar : ctx.declvar()) { String name = declvar.ID().getText(); - AExpression expression = declvar.expression() == null ? null : (AExpression)visitExpression(declvar.expression()); + AExpression expression = declvar.expression() == null ? null : (AExpression)visit(declvar.expression()); declarations.add(new SDeclaration(location(declvar), type, name, expression)); } @@ -485,17 +486,17 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitDecltype(DecltypeContext ctx) { + public ANode visitDecltype(DecltypeContext ctx) { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } @Override - public Object visitDeclvar(DeclvarContext ctx) { + public ANode visitDeclvar(DeclvarContext ctx) { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } @Override - public Object visitTrap(TrapContext ctx) { + public ANode visitTrap(TrapContext ctx) { String type = ctx.TYPE().getText(); String name = ctx.ID().getText(); SBlock block = (SBlock)visit(ctx.block()); @@ -504,32 +505,19 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitDelimiter(DelimiterContext ctx) { + public ANode visitDelimiter(DelimiterContext ctx) { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } - private Object visitExpression(ExpressionContext ctx) { - Object expression = visit(ctx); - - if (expression instanceof List) { - @SuppressWarnings("unchecked") - List links = (List)expression; - - return new EChain(location(ctx), links, false, false, null, null); - } else { - return expression; - } - } - @Override - public Object visitSingle(SingleContext ctx) { + public ANode visitSingle(SingleContext ctx) { return visit(ctx.unary()); } @Override - public Object visitBinary(BinaryContext ctx) { - AExpression left = (AExpression)visitExpression(ctx.expression(0)); - AExpression right = (AExpression)visitExpression(ctx.expression(1)); + public ANode visitBinary(BinaryContext ctx) { + AExpression left = (AExpression)visit(ctx.expression(0)); + AExpression right = (AExpression)visit(ctx.expression(1)); final Operation operation; if (ctx.MUL() != null) { @@ -559,16 +547,16 @@ public final class Walker extends PainlessParserBaseVisitor { } else if (ctx.BWOR() != null) { operation = Operation.BWOR; } else { - throw location(ctx).createError(new IllegalStateException("Unexpected state.")); + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } return new EBinary(location(ctx), operation, left, right); } @Override - public Object visitComp(CompContext ctx) { - AExpression left = (AExpression)visitExpression(ctx.expression(0)); - AExpression right = (AExpression)visitExpression(ctx.expression(1)); + public ANode visitComp(CompContext ctx) { + AExpression left = (AExpression)visit(ctx.expression(0)); + AExpression right = (AExpression)visit(ctx.expression(1)); final Operation operation; if (ctx.LT() != null) { @@ -588,16 +576,24 @@ public final class Walker extends PainlessParserBaseVisitor { } else if (ctx.NER() != null) { operation = Operation.NER; } else { - throw location(ctx).createError(new IllegalStateException("Unexpected state.")); + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } return new EComp(location(ctx), operation, left, right); } @Override - public Object visitBool(BoolContext ctx) { - AExpression left = (AExpression)visitExpression(ctx.expression(0)); - AExpression right = (AExpression)visitExpression(ctx.expression(1)); + public ANode visitInstanceof(InstanceofContext ctx) { + AExpression expr = (AExpression)visit(ctx.expression()); + String type = ctx.decltype().getText(); + + return new EInstanceof(location(ctx), expr, type); + } + + @Override + public ANode visitBool(BoolContext ctx) { + AExpression left = (AExpression)visit(ctx.expression(0)); + AExpression right = (AExpression)visit(ctx.expression(1)); final Operation operation; if (ctx.BOOLAND() != null) { @@ -605,25 +601,26 @@ public final class Walker extends PainlessParserBaseVisitor { } else if (ctx.BOOLOR() != null) { operation = Operation.OR; } else { - throw location(ctx).createError(new IllegalStateException("Unexpected state.")); + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } return new EBool(location(ctx), operation, left, right); } @Override - public Object visitConditional(ConditionalContext ctx) { - AExpression condition = (AExpression)visitExpression(ctx.expression(0)); - AExpression left = (AExpression)visitExpression(ctx.expression(1)); - AExpression right = (AExpression)visitExpression(ctx.expression(2)); + public ANode visitConditional(ConditionalContext ctx) { + AExpression condition = (AExpression)visit(ctx.expression(0)); + AExpression left = (AExpression)visit(ctx.expression(1)); + AExpression right = (AExpression)visit(ctx.expression(2)); return new EConditional(location(ctx), condition, left, right); } @Override - public Object visitAssignment(AssignmentContext ctx) { - @SuppressWarnings("unchecked") - List links = (List)visit(ctx.chain()); + public ANode visitAssignment(AssignmentContext ctx) { + AExpression lhs = (AExpression)visit(ctx.expression(0)); + AExpression rhs = (AExpression)visit(ctx.expression(1)); + final Operation operation; if (ctx.ASSIGN() != null) { @@ -654,28 +651,13 @@ public final class Walker extends PainlessParserBaseVisitor { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } - AExpression expression = (AExpression)visitExpression(ctx.expression()); - - return new EChain(location(ctx), links, false, false, operation, expression); - } - - private Object visitUnary(UnaryContext ctx) { - Object expression = visit(ctx); - - if (expression instanceof List) { - @SuppressWarnings("unchecked") - List links = (List)expression; - - return new EChain(location(ctx), links, false, false, null, null); - } else { - return expression; - } + return new EAssignment(location(ctx), lhs, rhs, false, false, operation); } @Override - public Object visitPre(PreContext ctx) { - @SuppressWarnings("unchecked") - List links = (List)visit(ctx.chain()); + public ANode visitPre(PreContext ctx) { + AExpression expression = (AExpression)visit(ctx.chain()); + final Operation operation; if (ctx.INCR() != null) { @@ -686,13 +668,13 @@ public final class Walker extends PainlessParserBaseVisitor { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } - return new EChain(location(ctx), links, true, false, operation, null); + return new EAssignment(location(ctx), expression, null, true, false, operation); } @Override - public Object visitPost(PostContext ctx) { - @SuppressWarnings("unchecked") - List links = (List)visit(ctx.chain()); + public ANode visitPost(PostContext ctx) { + AExpression expression = (AExpression)visit(ctx.chain()); + final Operation operation; if (ctx.INCR() != null) { @@ -703,17 +685,79 @@ public final class Walker extends PainlessParserBaseVisitor { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } - return new EChain(location(ctx), links, false, true, operation, null); + return new EAssignment(location(ctx), expression, null, false, true, operation); } @Override - public Object visitRead(ReadContext ctx) { + public ANode visitRead(ReadContext ctx) { return visit(ctx.chain()); } @Override - public Object visitNumeric(NumericContext ctx) { - final boolean negate = ctx.parent instanceof OperatorContext && ((OperatorContext)ctx.parent).SUB() != null; + public ANode visitOperator(OperatorContext ctx) { + AExpression expression = (AExpression)visit(ctx.unary()); + + final Operation operation; + + if (ctx.BOOLNOT() != null) { + operation = Operation.NOT; + } else if (ctx.BWNOT() != null) { + operation = Operation.BWNOT; + } else if (ctx.ADD() != null) { + operation = Operation.ADD; + } else if (ctx.SUB() != null) { + if (ctx.unary() instanceof ReadContext && ((ReadContext)ctx.unary()).chain() instanceof DynamicContext && + ((DynamicContext)((ReadContext)ctx.unary()).chain()).primary() instanceof NumericContext && + ((DynamicContext)((ReadContext)ctx.unary()).chain()).postfix().isEmpty()) { + + return expression; + } + + operation = Operation.SUB; + } else { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); + } + + return new EUnary(location(ctx), operation, expression); + } + + @Override + public ANode visitCast(CastContext ctx) { + String type = ctx.decltype().getText(); + AExpression child = (AExpression)visit(ctx.unary()); + + return new EExplicit(location(ctx), type, child); + } + + @Override + public ANode visitDynamic(DynamicContext ctx) { + AExpression primary = (AExpression)visit(ctx.primary()); + + return buildPostfixChain(primary, null, ctx.postfix()); + } + + @Override + public ANode visitStatic(StaticContext ctx) { + String type = ctx.decltype().getText(); + + return buildPostfixChain(new EStatic(location(ctx), type), ctx.postdot(), ctx.postfix()); + } + + @Override + public ANode visitNewarray(NewarrayContext ctx) { + return visit(ctx.arrayinitializer()); + } + + @Override + public ANode visitPrecedence(PrecedenceContext ctx) { + return visit(ctx.expression()); + } + + @Override + public ANode visitNumeric(NumericContext ctx) { + final boolean negate = ((DynamicContext)ctx.parent).postfix().isEmpty() && + ctx.parent.parent.parent instanceof OperatorContext && + ((OperatorContext)ctx.parent.parent.parent).SUB() != null; if (ctx.DECIMAL() != null) { return new EDecimal(location(ctx), (negate ? "-" : "") + ctx.DECIMAL().getText()); @@ -729,197 +773,135 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitTrue(TrueContext ctx) { + public ANode visitTrue(TrueContext ctx) { return new EBoolean(location(ctx), true); } @Override - public Object visitFalse(FalseContext ctx) { + public ANode visitFalse(FalseContext ctx) { return new EBoolean(location(ctx), false); } @Override - public Object visitNull(NullContext ctx) { + public ANode visitNull(NullContext ctx) { return new ENull(location(ctx)); } @Override - public Object visitListinit(ListinitContext ctx) { - return visit(ctx.listinitializer()); - } - - @Override - public Object visitMapinit(MapinitContext ctx) { - return visit(ctx.mapinitializer()); - } - - @Override - public Object visitOperator(OperatorContext ctx) { - if (ctx.SUB() != null && ctx.unary() instanceof NumericContext) { - return visit(ctx.unary()); - } else { - AExpression expression = (AExpression)visitUnary(ctx.unary()); - final Operation operation; - - if (ctx.BOOLNOT() != null) { - operation = Operation.NOT; - } else if (ctx.BWNOT() != null) { - operation = Operation.BWNOT; - } else if (ctx.ADD() != null) { - operation = Operation.ADD; - } else if (ctx.SUB() != null) { - operation = Operation.SUB; - } else { - throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); - } - - return new EUnary(location(ctx), operation, expression); - } - } - - @Override - public Object visitCast(CastContext ctx) { - String type = ctx.decltype().getText(); - Object child = visit(ctx.unary()); - - if (child instanceof List) { - @SuppressWarnings("unchecked") - List links = (List)child; - links.add(new LCast(location(ctx), type)); - - return links; - } else { - return new EExplicit(location(ctx), type, (AExpression)child); - } - } - - @Override - public Object visitDynamic(DynamicContext ctx) { - Object child = visit(ctx.primary()); - - if (child instanceof List) { - @SuppressWarnings("unchecked") - List links = (List)child; - - for (SecondaryContext secondary : ctx.secondary()) { - links.add((ALink)visit(secondary)); - } - - return links; - } else if (!ctx.secondary().isEmpty()) { - throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); - } else { - return child; - } - } - - @Override - public Object visitStatic(StaticContext ctx) { - String type = ctx.decltype().getText(); - List links = new ArrayList<>(); - - links.add(new LStatic(location(ctx), type)); - links.add((ALink)visit(ctx.dot())); - - for (SecondaryContext secondary : ctx.secondary()) { - links.add((ALink)visit(secondary)); - } - - return links; - } - - @Override - public Object visitNewarray(NewarrayContext ctx) { - return visit(ctx.arrayinitializer()); - } - - @Override - public Object visitExprprec(ExprprecContext ctx) { - return visit(ctx.expression()); - } - - @Override - public Object visitChainprec(ChainprecContext ctx) { - return visit(ctx.unary()); - } - - @Override - public Object visitString(StringContext ctx) { + public ANode visitString(StringContext ctx) { String string = ctx.STRING().getText().substring(1, ctx.STRING().getText().length() - 1); - List links = new ArrayList<>(); - links.add(new LString(location(ctx), string)); - return links; + return new EString(location(ctx), string); } @Override - public Object visitRegex(RegexContext ctx) { + public ANode visitRegex(RegexContext ctx) { String text = ctx.REGEX().getText(); int lastSlash = text.lastIndexOf('/'); String pattern = text.substring(1, lastSlash); String flags = text.substring(lastSlash + 1); - List links = new ArrayList<>(); - links.add(new LRegex(location(ctx), pattern, flags)); - return links; + return new ERegex(location(ctx), pattern, flags); } @Override - public Object visitVariable(VariableContext ctx) { - String name = ctx.ID().getText(); - List links = new ArrayList<>(); - links.add(new LVariable(location(ctx), name)); + public ANode visitListinit(ListinitContext ctx) { + return visit(ctx.listinitializer()); + } + @Override + public ANode visitMapinit(MapinitContext ctx) { + return visit(ctx.mapinitializer()); + } + + @Override + public ANode visitVariable(VariableContext ctx) { + String name = ctx.ID().getText(); reserved.peek().markReserved(name); - return links; + return new EVariable(location(ctx), name); } @Override - public Object visitCalllocal(CalllocalContext ctx) { + public ANode visitCalllocal(CalllocalContext ctx) { String name = ctx.ID().getText(); - @SuppressWarnings("unchecked") - List arguments = (List)visit(ctx.arguments()); - List links = new ArrayList<>(); - links.add(new LCallLocal(location(ctx), name, arguments)); + List arguments = collectArguments(ctx.arguments()); - return links; + return new ECallLocal(location(ctx), name, arguments); } @Override - public Object visitNewobject(NewobjectContext ctx) { + public ANode visitNewobject(NewobjectContext ctx) { String type = ctx.TYPE().getText(); - @SuppressWarnings("unchecked") - List arguments = (List)visit(ctx.arguments()); + List arguments = collectArguments(ctx.arguments()); - List links = new ArrayList<>(); - links.add(new LNewObj(location(ctx), type, arguments)); + return new ENewObj(location(ctx), type, arguments); + } - return links; + private AExpression buildPostfixChain(AExpression primary, PostdotContext postdot, List postfixes) { + AExpression prefix = primary; + + if (postdot != null) { + prefix = visitPostdot(postdot, prefix); + } + + for (PostfixContext postfix : postfixes) { + prefix = visitPostfix(postfix, prefix); + } + + return prefix; } @Override - public Object visitSecondary(SecondaryContext ctx) { - if (ctx.dot() != null) { - return visit(ctx.dot()); - } else if (ctx.brace() != null) { - return visit(ctx.brace()); + public ANode visitPostfix(PostfixContext ctx) { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); + } + + public AExpression visitPostfix(PostfixContext ctx, AExpression prefix) { + if (ctx.callinvoke() != null) { + return visitCallinvoke(ctx.callinvoke(), prefix); + } else if (ctx.fieldaccess() != null) { + return visitFieldaccess(ctx.fieldaccess(), prefix); + } else if (ctx.braceaccess() != null) { + return visitBraceaccess(ctx.braceaccess(), prefix); } else { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } } @Override - public Object visitCallinvoke(CallinvokeContext ctx) { - String name = ctx.DOTID().getText(); - @SuppressWarnings("unchecked") - List arguments = (List)visit(ctx.arguments()); + public ANode visitPostdot(PostdotContext ctx) { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); + } - return new LCallInvoke(location(ctx), name, arguments); + public AExpression visitPostdot(PostdotContext ctx, AExpression prefix) { + if (ctx.callinvoke() != null) { + return visitCallinvoke(ctx.callinvoke(), prefix); + } else if (ctx.fieldaccess() != null) { + return visitFieldaccess(ctx.fieldaccess(), prefix); + } else { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); + } } @Override - public Object visitFieldaccess(FieldaccessContext ctx) { + public ANode visitCallinvoke(CallinvokeContext ctx) { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); + } + + public AExpression visitCallinvoke(CallinvokeContext ctx, AExpression prefix) { + String name = ctx.DOTID().getText(); + List arguments = collectArguments(ctx.arguments()); + + return new PCallInvoke(location(ctx), prefix, name, arguments); + } + + @Override + public ANode visitFieldaccess(FieldaccessContext ctx) { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); + } + + public AExpression visitFieldaccess(FieldaccessContext ctx, AExpression prefix) { final String value; if (ctx.DOTID() != null) { @@ -930,18 +912,79 @@ public final class Walker extends PainlessParserBaseVisitor { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } - return new LField(location(ctx), value); + return new PField(location(ctx), prefix, value); } @Override - public Object visitBraceaccess(BraceaccessContext ctx) { - AExpression expression = (AExpression)visitExpression(ctx.expression()); + public ANode visitBraceaccess(BraceaccessContext ctx) { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); + } - return new LBrace(location(ctx), expression); + public AExpression visitBraceaccess(BraceaccessContext ctx, AExpression prefix) { + AExpression expression = (AExpression)visit(ctx.expression()); + + return new PBrace(location(ctx), prefix, expression); } @Override - public Object visitArguments(ArgumentsContext ctx) { + public ANode visitNewstandardarray(NewstandardarrayContext ctx) { + String type = ctx.TYPE().getText(); + List expressions = new ArrayList<>(); + + for (ExpressionContext expression : ctx.expression()) { + expressions.add((AExpression)visit(expression)); + } + + return buildPostfixChain(new ENewArray(location(ctx), type, expressions, false), ctx.postdot(), ctx.postfix()); + } + + @Override + public ANode visitNewinitializedarray(NewinitializedarrayContext ctx) { + String type = ctx.TYPE().getText(); + List expressions = new ArrayList<>(); + + for (ExpressionContext expression : ctx.expression()) { + expressions.add((AExpression)visit(expression)); + } + + return buildPostfixChain(new ENewArray(location(ctx), type, expressions, true), null, ctx.postfix()); + } + + @Override + public ANode visitListinitializer(ListinitializerContext ctx) { + List values = new ArrayList<>(); + + for (ExpressionContext expression : ctx.expression()) { + values.add((AExpression)visit(expression)); + } + + return new EListInit(location(ctx), values); + } + + @Override + public ANode visitMapinitializer(MapinitializerContext ctx) { + List keys = new ArrayList<>(); + List values = new ArrayList<>(); + + for (MaptokenContext maptoken : ctx.maptoken()) { + keys.add((AExpression)visit(maptoken.expression(0))); + values.add((AExpression)visit(maptoken.expression(1))); + } + + return new EMapInit(location(ctx), keys, values); + } + + @Override + public ANode visitMaptoken(MaptokenContext ctx) { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); + } + + @Override + public ANode visitArguments(ArgumentsContext ctx) { + throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); + } + + private List collectArguments(ArgumentsContext ctx) { List arguments = new ArrayList<>(); for (ArgumentContext argument : ctx.argument()) { @@ -952,9 +995,9 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitArgument(ArgumentContext ctx) { + public ANode visitArgument(ArgumentContext ctx) { if (ctx.expression() != null) { - return visitExpression(ctx.expression()); + return visit(ctx.expression()); } else if (ctx.lambda() != null) { return visit(ctx.lambda()); } else if (ctx.funcref() != null) { @@ -965,7 +1008,7 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitLambda(LambdaContext ctx) { + public ANode visitLambda(LambdaContext ctx) { reserved.push(new FunctionReserved()); List paramTypes = new ArrayList<>(); @@ -984,7 +1027,7 @@ public final class Walker extends PainlessParserBaseVisitor { if (ctx.expression() != null) { // single expression - AExpression expression = (AExpression) visitExpression(ctx.expression()); + AExpression expression = (AExpression)visit(ctx.expression()); statements.add(new SReturn(location(ctx), expression)); } else { for (StatementContext statement : ctx.block().statement()) { @@ -998,138 +1041,42 @@ public final class Walker extends PainlessParserBaseVisitor { } @Override - public Object visitLamtype(LamtypeContext ctx) { + public ANode visitLamtype(LamtypeContext ctx) { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } @Override - public Object visitFuncref(FuncrefContext ctx) { - if (ctx.classFuncref() != null) { - return visit(ctx.classFuncref()); - } else if (ctx.constructorFuncref() != null) { - return visit(ctx.constructorFuncref()); - } else if (ctx.capturingFuncref() != null) { - return visit(ctx.capturingFuncref()); - } else if (ctx.localFuncref() != null) { - return visit(ctx.localFuncref()); - } else { - throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); - } - } - - @Override - public Object visitClassFuncref(ClassFuncrefContext ctx) { + public ANode visitClassfuncref(ClassfuncrefContext ctx) { return new EFunctionRef(location(ctx), ctx.TYPE().getText(), ctx.ID().getText()); } @Override - public Object visitConstructorFuncref(ConstructorFuncrefContext ctx) { + public ANode visitConstructorfuncref(ConstructorfuncrefContext ctx) { if (!ctx.decltype().LBRACE().isEmpty()) { // array constructors are special: we need to make a synthetic method // taking integer as argument and returning a new instance, and return a ref to that. Location location = location(ctx); String arrayType = ctx.decltype().getText(); SReturn code = new SReturn(location, - new EChain(location, - new LNewArray(location, arrayType, Arrays.asList( - new EChain(location, - new LVariable(location, "size"))), false))); + new ENewArray(location, arrayType, Arrays.asList( + new EVariable(location, "size")), false)); String name = nextLambda(); globals.addSyntheticMethod(new SFunction(new FunctionReserved(), location, arrayType, name, - Arrays.asList("int"), Arrays.asList("size"), Arrays.asList(code), true)); + Arrays.asList("int"), Arrays.asList("size"), Arrays.asList(code), true)); + return new EFunctionRef(location(ctx), "this", name); } + return new EFunctionRef(location(ctx), ctx.decltype().getText(), ctx.NEW().getText()); } @Override - public Object visitCapturingFuncref(CapturingFuncrefContext ctx) { + public ANode visitCapturingfuncref(CapturingfuncrefContext ctx) { return new ECapturingFunctionRef(location(ctx), ctx.ID(0).getText(), ctx.ID(1).getText()); } @Override - public Object visitLocalFuncref(LocalFuncrefContext ctx) { + public ANode visitLocalfuncref(LocalfuncrefContext ctx) { return new EFunctionRef(location(ctx), ctx.THIS().getText(), ctx.ID().getText()); } - - @Override - public Object visitNewstandardarray(NewstandardarrayContext ctx) { - String type = ctx.TYPE().getText(); - List expressions = new ArrayList<>(); - - for (ExpressionContext expression : ctx.expression()) { - expressions.add((AExpression)visitExpression(expression)); - } - - List links = new ArrayList<>(); - links.add(new LNewArray(location(ctx), type, expressions, false)); - - if (ctx.dot() != null) { - links.add((ALink)visit(ctx.dot())); - - for (SecondaryContext secondary : ctx.secondary()) { - links.add((ALink)visit(secondary)); - } - } else if (!ctx.secondary().isEmpty()) { - throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); - } - - return links; - } - - @Override - public Object visitNewinitializedarray(NewinitializedarrayContext ctx) { - String type = ctx.TYPE().getText(); - List expressions = new ArrayList<>(); - - for (ExpressionContext expression : ctx.expression()) { - expressions.add((AExpression)visitExpression(expression)); - } - - List links = new ArrayList<>(); - links.add(new LNewArray(location(ctx), type, expressions, true)); - - return links; - } - - @Override - public Object visitListinitializer(ListinitializerContext ctx) { - List values = new ArrayList<>(); - - for (ExpressionContext expression : ctx.expression()) { - values.add((AExpression)visitExpression(expression)); - } - - return new EListInit(location(ctx), values); - } - - @Override - public Object visitMapinitializer(MapinitializerContext ctx) { - List keys = new ArrayList<>(); - List values = new ArrayList<>(); - - for (MaptokenContext maptoken : ctx.maptoken()) { - keys.add((AExpression)visitExpression(maptoken.expression(0))); - values.add((AExpression)visitExpression(maptoken.expression(1))); - } - - return new EMapInit(location(ctx), keys, values); - } - - @Override - public Object visitMaptoken(MaptokenContext ctx) { - throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); - } - - @Override - public Object visitInstanceof(InstanceofContext ctx) { - AExpression expr = (AExpression)visitExpression(ctx.expression()); - String type = ctx.decltype().getText(); - return new EInstanceof(location(ctx), expr, type); - } - - /** Returns name of next lambda */ - private String nextLambda() { - return "lambda$" + syntheticCounter++; - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index eb5f73334db..739e3de6d21 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -19,39 +19,47 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Type; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.Locals; -import org.objectweb.asm.Label; -import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.Location; + +import java.util.Objects; /** - * The superclass for all E* (expression) nodes. + * The superclass for all E* (expression) and P* (postfix) nodes. */ public abstract class AExpression extends ANode { + /** + * Prefix is the predecessor to this node in a variable chain. + * This is used to analyze and write variable chains in a + * more natural order since the parent node of a variable + * chain will want the data from the final postfix to be + * analyzed. + */ + AExpression prefix; + /** * Set to false when an expression will not be read from such as * a basic assignment. Note this variable is always set by the parent * as input. */ - protected boolean read = true; + boolean read = true; /** * Set to true when an expression can be considered a stand alone * statement. Used to prevent extraneous bytecode. This is always * set by the node as output. */ - protected boolean statement = false; + boolean statement = false; /** * Set to the expected type this node needs to be. Note this variable * is always set by the parent as input and should never be read from. */ - protected Type expected = null; + Type expected = null; /** * Set to the actual type this node is. Note this variable is always @@ -59,19 +67,19 @@ public abstract class AExpression extends ANode { * node itself. Also, actual can always be read after a cast is * called on this node to get the type of the node after the cast. */ - protected Type actual = null; + Type actual = null; /** * Set by {@link EExplicit} if a cast made on an expression node should be * explicit. */ - protected boolean explicit = false; + boolean explicit = false; /** * Set to true if a cast is allowed to boxed/unboxed. This is used * for method arguments because casting may be required. */ - protected boolean internal = false; + boolean internal = false; /** * Set to the value of the constant this expression node represents if @@ -79,40 +87,30 @@ public abstract class AExpression extends ANode { * this node will be replaced by an {@link EConstant} during casting * if it's not already one. */ - protected Object constant = null; + Object constant = null; /** * Set to true by {@link ENull} to represent a null value. */ - protected boolean isNull = false; + boolean isNull = false; /** - * If an expression represents a branch statement, represents the jump should - * the expression evaluate to a true value. It should always be the case that only - * one of tru and fals are non-null or both are null. Only used during the writing phase. + * Standard constructor with location used for error tracking. */ - protected Label tru = null; - - /** - * If an expression represents a branch statement, represents the jump should - * the expression evaluate to a false value. It should always be the case that only - * one of tru and fals are non-null or both are null. Only used during the writing phase. - */ - protected Label fals = null; - - public AExpression(Location location) { + AExpression(Location location) { super(location); + + prefix = null; } /** - * Checks for errors and collects data for the writing phase. + * This constructor is used by variable/method chains when postfixes are specified. */ - abstract void analyze(Locals locals); + AExpression(Location location, AExpression prefix) { + super(location); - /** - * Writes ASM based on the data collected during the analysis phase. - */ - abstract void write(MethodWriter writer, Globals globals); + this.prefix = Objects.requireNonNull(prefix); + } /** * Inserts {@link ECast} nodes into the tree for implicit casts. Also replaces @@ -120,7 +118,7 @@ public abstract class AExpression extends ANode { * @return The new child node for the parent node calling this method. */ AExpression cast(Locals locals) { - final Cast cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal); + Cast cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal); if (cast == null) { if (constant == null || this instanceof EConstant) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java deleted file mode 100644 index f8081d0020b..00000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.node; - -import org.elasticsearch.painless.Definition.Type; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.MethodWriter; - -/** - * The superclass for all L* (link) nodes. - */ -public abstract class ALink extends ANode { - - /** - * Size is set to a value based on this link's size on the stack. This is - * used during the writing phase to dup stack values from this link as - * necessary during certain store operations. - */ - final int size; - - /** - * Set to false only if the link is not going to be read from. - */ - boolean load = true; - - /** - * Set to true only if the link is going to be written to and - * is the final link in a chain. - */ - boolean store = false; - - /** - * Set to true if this link represents a statik type to be accessed. - */ - boolean statik = false; - - /** - * Set by the parent chain to type of the previous link or null if - * there was no previous link. - */ - Type before = null; - - /** - * Set by the link to be the type after the link has been loaded/stored. - */ - Type after = null; - - /** - * Set to true if this link could be a stand-alone statement. - */ - boolean statement = false; - - /** - * Used by {@link LString} to set the value of the String constant. Also - * used by shortcuts to represent a constant key. - */ - String string = null; - - ALink(Location location, int size) { - super(location); - - this.size = size; - } - - /** - * Checks for errors and collects data for the writing phase. - * @return Possibly returns a different {@link ALink} node if a type is - * def or a shortcut is used. Otherwise, returns itself. This will be - * updated into the {@link EChain} node's list of links. - */ - abstract ALink analyze(Locals locals); - - /** - * Write values before a load/store occurs such as an array index. - */ - abstract void write(MethodWriter writer, Globals globals); - - /** - * Write a load for the specific link type. - */ - abstract void load(MethodWriter writer, Globals globals); - - /** - * Write a store for the specific link type. - */ - abstract void store(MethodWriter writer, Globals globals); - - /** - * Used to copy link data from one to another during analysis in the case of replacement. - */ - final ALink copy(ALink link) { - load = link.load; - store = link.store; - statik = link.statik; - before = link.before; - after = link.after; - statement = link.statement; - string = link.string; - - return this; - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java index 55d62108cba..da4e00f0183 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java @@ -19,24 +19,31 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import java.util.Objects; import java.util.Set; /** - * The superclass for all other nodes. + * The superclass for all nodes. */ public abstract class ANode { + /** * The identifier of the script and character offset used for debugging and errors. */ final Location location; + /** + * Standard constructor with location used for error tracking. + */ ANode(Location location) { this.location = Objects.requireNonNull(location); } - + /** * Adds all variable names referenced to the variable set. *

@@ -44,8 +51,18 @@ public abstract class ANode { * @param variables set of variables referenced (any scope) */ abstract void extractVariables(Set variables); - - public RuntimeException createError(RuntimeException exception) { + + /** + * Checks for errors and collects data for the writing phase. + */ + abstract void analyze(Locals locals); + + /** + * Writes ASM based on the data collected during the analysis phase. + */ + abstract void write(MethodWriter writer, Globals globals); + + RuntimeException createError(RuntimeException exception) { return location.createError(exception); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java index 23210472b70..42ffe0a1582 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java @@ -19,12 +19,9 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; -import org.objectweb.asm.Label; import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Label; /** * The superclass for all S* (statement) nodes. @@ -110,17 +107,10 @@ public abstract class AStatement extends ANode { */ Label brake = null; + /** + * Standard constructor with location used for error tracking. + */ AStatement(Location location) { super(location); } - - /** - * Checks for errors and collects data for the writing phase. - */ - abstract void analyze(Locals locals); - - /** - * Writes ASM based on the data collected during the analysis phase. - */ - abstract void write(MethodWriter writer, Globals globals); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStoreable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStoreable.java new file mode 100644 index 00000000000..71b8ccd4da1 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStoreable.java @@ -0,0 +1,103 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; + +import java.util.Objects; + +/** + * The super class for an expression that can store a value in local memory. + */ +abstract class AStoreable extends AExpression { + + /** + * Set to true when this node is an lhs-expression and will be storing + * a value from an rhs-expression. + */ + boolean write = false; + + /** + * Standard constructor with location used for error tracking. + */ + AStoreable(Location location) { + super(location); + + prefix = null; + } + + /** + * This constructor is used by variable/method chains when postfixes are specified. + */ + AStoreable(Location location, AExpression prefix) { + super(location); + + this.prefix = Objects.requireNonNull(prefix); + } + + /** + * Returns a value based on the number of elements previously placed on the + * stack to load/store a certain piece of a variable/method chain. This is + * used during the writing phase to dup stack values from this storeable as + * necessary during certain store operations. + *

+ * Examples: + * {@link EVariable} returns 0 because it requires nothing extra to perform + * a load/store + * {@link PSubField} returns 1 because it requires the name of the field as + * an index on the stack to perform a load/store + * {@link PSubBrace} returns 2 because it requires both the variable slot and + * an index into the array on the stack to perform a + * load/store + */ + abstract int accessElementCount(); + + /** + * Returns true if this node or a sub-node of this node can be optimized with + * rhs actual type to avoid an unnecessary cast. + */ + abstract boolean isDefOptimized(); + + /** + * If this node or a sub-node of this node uses dynamic calls then + * actual will be set to this value. This is used for an optimization + * during assignment to def type targets. + */ + abstract void updateActual(Type actual); + + /** + * Called before a storeable node is loaded or stored. Used to load prefixes and + * push load/store constants onto the stack if necessary. + */ + abstract void setup(MethodWriter writer, Globals globals); + + /** + * Called to load a storable used for compound assignments. + */ + abstract void load(MethodWriter writer, Globals globals); + + /** + * Called to store a storabable to local memory. + */ + abstract void store(MethodWriter writer, Globals globals); +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java new file mode 100644 index 00000000000..50b56505eb8 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java @@ -0,0 +1,325 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.DefBootstrap; +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.Operation; + +import java.util.Objects; +import java.util.Set; + +/** + * Represents an assignment with the lhs and rhs as child nodes. + */ +public final class EAssignment extends AExpression { + + private AExpression lhs; + private AExpression rhs; + private final boolean pre; + private final boolean post; + private Operation operation; + + private boolean cat = false; + private Type promote = null; + private Type shiftDistance; // for shifts, the RHS is promoted independently + private Cast there = null; + private Cast back = null; + + public EAssignment(Location location, AExpression lhs, AExpression rhs, boolean pre, boolean post, Operation operation) { + super(location); + + this.lhs = Objects.requireNonNull(lhs); + this.rhs = rhs; + this.pre = pre; + this.post = post; + this.operation = operation; + } + + @Override + void extractVariables(Set variables) { + lhs.extractVariables(variables); + rhs.extractVariables(variables); + } + + @Override + void analyze(Locals locals) { + analyzeLHS(locals); + analyzeIncrDecr(); + + if (operation != null) { + analyzeCompound(locals); + } else if (rhs != null) { + analyzeSimple(locals); + } else { + throw new IllegalStateException("Illegal tree structure."); + } + } + + private void analyzeLHS(Locals locals) { + if (lhs instanceof AStoreable) { + AStoreable lhs = (AStoreable)this.lhs; + + lhs.read = read; + lhs.write = true; + lhs.analyze(locals); + } else { + throw new IllegalArgumentException("Left-hand side cannot be assigned a value."); + } + } + + private void analyzeIncrDecr() { + if (pre && post) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } else if (pre || post) { + if (rhs != null) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + + Sort sort = lhs.actual.sort; + + if (operation == Operation.INCR) { + if (sort == Sort.DOUBLE) { + rhs = new EConstant(location, 1D); + } else if (sort == Sort.FLOAT) { + rhs = new EConstant(location, 1F); + } else if (sort == Sort.LONG) { + rhs = new EConstant(location, 1L); + } else { + rhs = new EConstant(location, 1); + } + + operation = Operation.ADD; + } else if (operation == Operation.DECR) { + if (sort == Sort.DOUBLE) { + rhs = new EConstant(location, 1D); + } else if (sort == Sort.FLOAT) { + rhs = new EConstant(location, 1F); + } else if (sort == Sort.LONG) { + rhs = new EConstant(location, 1L); + } else { + rhs = new EConstant(location, 1); + } + + operation = Operation.SUB; + } else { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + } + } + + private void analyzeCompound(Locals locals) { + rhs.analyze(locals); + + boolean shift = false; + + if (operation == Operation.MUL) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true); + } else if (operation == Operation.DIV) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true); + } else if (operation == Operation.REM) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true); + } else if (operation == Operation.ADD) { + promote = AnalyzerCaster.promoteAdd(lhs.actual, rhs.actual); + } else if (operation == Operation.SUB) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true); + } else if (operation == Operation.LSH) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, false); + shiftDistance = AnalyzerCaster.promoteNumeric(rhs.actual, false); + shift = true; + } else if (operation == Operation.RSH) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, false); + shiftDistance = AnalyzerCaster.promoteNumeric(rhs.actual, false); + shift = true; + } else if (operation == Operation.USH) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, false); + shiftDistance = AnalyzerCaster.promoteNumeric(rhs.actual, false); + shift = true; + } else if (operation == Operation.BWAND) { + promote = AnalyzerCaster.promoteXor(lhs.actual, rhs.actual); + } else if (operation == Operation.XOR) { + promote = AnalyzerCaster.promoteXor(lhs.actual, rhs.actual); + } else if (operation == Operation.BWOR) { + promote = AnalyzerCaster.promoteXor(lhs.actual, rhs.actual); + } else { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + + if (promote == null || (shift && shiftDistance == null)) { + throw createError(new ClassCastException("Cannot apply compound assignment " + + "[" + operation.symbol + "=] to types [" + lhs.actual + "] and [" + rhs.actual + "].")); + } + + cat = operation == Operation.ADD && promote.sort == Sort.STRING; + + if (cat) { + if (rhs instanceof EBinary && ((EBinary)rhs).operation == Operation.ADD && rhs.actual.sort == Sort.STRING) { + ((EBinary)rhs).cat = true; + } + + rhs.expected = rhs.actual; + } else if (shift) { + if (promote.sort == Sort.DEF) { + // shifts are promoted independently, but for the def type, we need object. + rhs.expected = promote; + } else if (shiftDistance.sort == Sort.LONG) { + rhs.expected = Definition.INT_TYPE; + rhs.explicit = true; + } else { + rhs.expected = shiftDistance; + } + } else { + rhs.expected = promote; + } + + rhs = rhs.cast(locals); + + there = AnalyzerCaster.getLegalCast(location, lhs.actual, promote, false, false); + back = AnalyzerCaster.getLegalCast(location, promote, lhs.actual, true, false); + + this.statement = true; + this.actual = read ? lhs.actual : Definition.VOID_TYPE; + } + + private void analyzeSimple(Locals locals) { + AStoreable lhs = (AStoreable)this.lhs; + + // If the lhs node is a def optimized node we update the actual type to remove the need for a cast. + if (lhs.isDefOptimized()) { + rhs.analyze(locals); + rhs.expected = rhs.actual; + lhs.updateActual(rhs.actual); + // Otherwise, we must adapt the rhs type to the lhs type with a cast. + } else { + rhs.expected = lhs.actual; + rhs.analyze(locals); + } + + rhs = rhs.cast(locals); + + this.statement = true; + this.actual = read ? lhs.actual : Definition.VOID_TYPE; + } + + /** + * Handles writing byte code for variable/method chains for all given possibilities + * including String concatenation, compound assignment, regular assignment, and simple + * reads. Includes proper duplication for chained assignments and assignments that are + * also read from. + */ + @Override + void write(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + // For the case where the assignment represents a String concatenation + // we must, depending on the Java version, write a StringBuilder or + // track types going onto the stack. This must be done before the + // lhs is read because we need the StringBuilder to be placed on the + // stack ahead of any potential concatenation arguments. + int catElementStackSize = 0; + + if (cat) { + catElementStackSize = writer.writeNewStrings(); + } + + // Cast the lhs to a storeable to perform the necessary operations to store the rhs. + AStoreable lhs = (AStoreable)this.lhs; + lhs.setup(writer, globals); // call the setup method on the lhs to prepare for a load/store operation + + if (cat) { + // Handle the case where we are doing a compound assignment + // representing a String concatenation. + + writer.writeDup(lhs.accessElementCount(), catElementStackSize); // dup the top element and insert it + // before concat helper on stack + lhs.load(writer, globals); // read the current lhs's value + writer.writeAppendStrings(lhs.actual); // append the lhs's value using the StringBuilder + + rhs.write(writer, globals); // write the bytecode for the rhs + + if (!(rhs instanceof EBinary) || ((EBinary)rhs).cat) { + writer.writeAppendStrings(rhs.actual); // append the rhs's value unless it's also a concatenation + } + + writer.writeToStrings(); // put the value for string concat onto the stack + writer.writeCast(back); // if necessary, cast the String to the lhs actual type + + if (lhs.read) { + writer.writeDup(lhs.actual.sort.size, lhs.accessElementCount()); // if this lhs is also read + // from dup the value onto the stack + } + + lhs.store(writer, globals); // store the lhs's value from the stack in its respective variable/field/array + } else if (operation != null) { + // Handle the case where we are doing a compound assignment that + // does not represent a String concatenation. + + writer.writeDup(lhs.accessElementCount(), 0); // if necessary, dup the previous lhs's value + // to be both loaded from and stored to + lhs.load(writer, globals); // load the current lhs's value + + if (lhs.read && post) { + writer.writeDup(lhs.actual.sort.size, lhs.accessElementCount()); // dup the value if the lhs is also + // read from and is a post increment + } + + writer.writeCast(there); // if necessary cast the current lhs's value + // to the promotion type between the lhs and rhs types + rhs.write(writer, globals); // write the bytecode for the rhs + + // XXX: fix these types, but first we need def compound assignment tests. + // its tricky here as there are possibly explicit casts, too. + // write the operation instruction for compound assignment + if (promote.sort == Sort.DEF) { + writer.writeDynamicBinaryInstruction(location, promote, + Definition.DEF_TYPE, Definition.DEF_TYPE, operation, DefBootstrap.OPERATOR_COMPOUND_ASSIGNMENT); + } else { + writer.writeBinaryInstruction(location, promote, operation); + } + + writer.writeCast(back); // if necessary cast the promotion type value back to the lhs's type + + if (lhs.read && !post) { + writer.writeDup(lhs.actual.sort.size, lhs.accessElementCount()); // dup the value if the lhs is also + // read from and is not a post increment + } + + lhs.store(writer, globals); // store the lhs's value from the stack in its respective variable/field/array + } else { + // Handle the case for a simple write. + + rhs.write(writer, globals); // write the bytecode for the rhs rhs + + if (lhs.read) { + writer.writeDup(lhs.actual.sort.size, lhs.accessElementCount()); // dup the value if the lhs is also read from + } + + lhs.store(writer, globals); // store the lhs's value from the stack in its respective variable/field/array + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index ca6332cd65e..ca2858feb8d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -22,18 +22,17 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; - -import java.util.Objects; -import java.util.Set; - +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.WriterConstants; -import org.elasticsearch.painless.Locals; + +import java.util.Objects; +import java.util.Set; /** * Represents a binary math expression. @@ -41,13 +40,13 @@ import org.elasticsearch.painless.Locals; public final class EBinary extends AExpression { final Operation operation; - AExpression left; - AExpression right; - Type promote; // promoted type - Type shiftDistance; // for shifts, the RHS is promoted independently + private AExpression left; + private AExpression right; + private Type promote = null; // promoted type + private Type shiftDistance = null; // for shifts, the rhs is promoted independently boolean cat = false; - boolean originallyExplicit = false; // record whether there was originally an explicit cast + private boolean originallyExplicit = false; // record whether there was originally an explicit cast public EBinary(Location location, Operation operation, AExpression left, AExpression right) { super(location); @@ -56,7 +55,7 @@ public final class EBinary extends AExpression { this.left = Objects.requireNonNull(left); this.right = Objects.requireNonNull(right); } - + @Override void extractVariables(Set variables) { left.extractVariables(variables); @@ -66,6 +65,7 @@ public final class EBinary extends AExpression { @Override void analyze(Locals locals) { originallyExplicit = explicit; + if (operation == Operation.MUL) { analyzeMul(locals); } else if (operation == Operation.DIV) { @@ -153,9 +153,11 @@ public final class EBinary extends AExpression { } actual = promote; + if (promote.sort == Sort.DEF) { left.expected = left.actual; right.expected = right.actual; + if (expected != null) { actual = expected; } @@ -182,8 +184,8 @@ public final class EBinary extends AExpression { } else { throw createError(new IllegalStateException("Illegal tree structure.")); } - } catch (ArithmeticException e) { - throw createError(e); + } catch (ArithmeticException exception) { + throw createError(exception); } } } @@ -204,6 +206,7 @@ public final class EBinary extends AExpression { if (promote.sort == Sort.DEF) { left.expected = left.actual; right.expected = right.actual; + if (expected != null) { actual = expected; } @@ -230,8 +233,8 @@ public final class EBinary extends AExpression { } else { throw createError(new IllegalStateException("Illegal tree structure.")); } - } catch (ArithmeticException e) { - throw createError(e); + } catch (ArithmeticException exception) { + throw createError(exception); } } } @@ -266,6 +269,7 @@ public final class EBinary extends AExpression { } else if (sort == Sort.DEF) { left.expected = left.actual; right.expected = right.actual; + if (expected != null) { actual = expected; } @@ -311,6 +315,7 @@ public final class EBinary extends AExpression { if (promote.sort == Sort.DEF) { left.expected = left.actual; right.expected = right.actual; + if (expected != null) { actual = expected; } @@ -349,7 +354,6 @@ public final class EBinary extends AExpression { left = left.cast(variables); right = right.cast(variables); - // It'd be nice to be able to do constant folding here but we can't because constants aren't flowing through EChain promote = Definition.BOOLEAN_TYPE; actual = Definition.BOOLEAN_TYPE; } @@ -372,11 +376,13 @@ public final class EBinary extends AExpression { if (lhspromote.sort == Sort.DEF || rhspromote.sort == Sort.DEF) { left.expected = left.actual; right.expected = right.actual; + if (expected != null) { actual = expected; } } else { left.expected = lhspromote; + if (rhspromote.sort == Sort.LONG) { right.expected = Definition.INT_TYPE; right.explicit = true; @@ -419,12 +425,14 @@ public final class EBinary extends AExpression { if (lhspromote.sort == Sort.DEF || rhspromote.sort == Sort.DEF) { left.expected = left.actual; right.expected = right.actual; + if (expected != null) { actual = expected; } } else { left.expected = lhspromote; - if (rhspromote.sort == Sort.LONG) { + + if (rhspromote.sort == Sort.LONG) { right.expected = Definition.INT_TYPE; right.explicit = true; } else { @@ -466,12 +474,14 @@ public final class EBinary extends AExpression { if (lhspromote.sort == Sort.DEF || rhspromote.sort == Sort.DEF) { left.expected = left.actual; right.expected = right.actual; + if (expected != null) { actual = expected; } } else { left.expected = lhspromote; - if (rhspromote.sort == Sort.LONG) { + + if (rhspromote.sort == Sort.LONG) { right.expected = Definition.INT_TYPE; right.explicit = true; } else { @@ -511,6 +521,7 @@ public final class EBinary extends AExpression { if (promote.sort == Sort.DEF) { left.expected = left.actual; right.expected = right.actual; + if (expected != null) { actual = expected; } @@ -628,25 +639,31 @@ public final class EBinary extends AExpression { left.write(writer, globals); - if (!(left instanceof EBinary) || ((EBinary)left).operation != Operation.ADD || left.actual.sort != Sort.STRING) { + if (!(left instanceof EBinary) || !((EBinary)left).cat) { writer.writeAppendStrings(left.actual); } right.write(writer, globals); - if (!(right instanceof EBinary) || ((EBinary)right).operation != Operation.ADD || right.actual.sort != Sort.STRING) { + if (!(right instanceof EBinary) || !((EBinary)right).cat) { writer.writeAppendStrings(right.actual); } if (!cat) { writer.writeToStrings(); } - } else if (operation == Operation.FIND) { - writeBuildMatcher(writer, globals); - writer.invokeVirtual(Definition.MATCHER_TYPE.type, WriterConstants.MATCHER_FIND); - } else if (operation == Operation.MATCH) { - writeBuildMatcher(writer, globals); - writer.invokeVirtual(Definition.MATCHER_TYPE.type, WriterConstants.MATCHER_MATCHES); + } else if (operation == Operation.FIND || operation == Operation.MATCH) { + right.write(writer, globals); + left.write(writer, globals); + writer.invokeVirtual(Definition.PATTERN_TYPE.type, WriterConstants.PATTERN_MATCHER); + + if (operation == Operation.FIND) { + writer.invokeVirtual(Definition.MATCHER_TYPE.type, WriterConstants.MATCHER_FIND); + } else if (operation == Operation.MATCH) { + writer.invokeVirtual(Definition.MATCHER_TYPE.type, WriterConstants.MATCHER_MATCHES); + } else { + throw new IllegalStateException("Illegal tree structure."); + } } else { left.write(writer, globals); right.write(writer, globals); @@ -663,13 +680,5 @@ public final class EBinary extends AExpression { writer.writeBinaryInstruction(location, actual, operation); } } - - writer.writeBranch(tru, fals); - } - - private void writeBuildMatcher(MethodWriter writer, Globals globals) { - right.write(writer, globals); - left.write(writer, globals); - writer.invokeVirtual(Definition.PATTERN_TYPE.type, WriterConstants.PATTERN_MATCHER); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java index d707cc811f9..706a00c4925 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java @@ -30,15 +30,16 @@ import java.util.Objects; import java.util.Set; import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Opcodes; /** * Represents a boolean expression. */ public final class EBool extends AExpression { - final Operation operation; - AExpression left; - AExpression right; + private final Operation operation; + private AExpression left; + private AExpression right; public EBool(Location location, Operation operation, AExpression left, AExpression right) { super(location); @@ -47,7 +48,7 @@ public final class EBool extends AExpression { this.left = Objects.requireNonNull(left); this.right = Objects.requireNonNull(right); } - + @Override void extractVariables(Set variables) { left.extractVariables(variables); @@ -79,72 +80,38 @@ public final class EBool extends AExpression { @Override void write(MethodWriter writer, Globals globals) { - if (tru != null || fals != null) { - if (operation == Operation.AND) { - Label localfals = fals == null ? new Label() : fals; + if (operation == Operation.AND) { + Label fals = new Label(); + Label end = new Label(); - left.fals = localfals; - right.tru = tru; - right.fals = fals; + left.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, fals); + right.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, fals); - left.write(writer, globals); - right.write(writer, globals); + writer.push(true); + writer.goTo(end); + writer.mark(fals); + writer.push(false); + writer.mark(end); + } else if (operation == Operation.OR) { + Label tru = new Label(); + Label fals = new Label(); + Label end = new Label(); - if (fals == null) { - writer.mark(localfals); - } - } else if (operation == Operation.OR) { - Label localtru = tru == null ? new Label() : tru; + left.write(writer, globals); + writer.ifZCmp(Opcodes.IFNE, tru); + right.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, fals); - left.tru = localtru; - right.tru = tru; - right.fals = fals; - - left.write(writer, globals); - right.write(writer, globals); - - if (tru == null) { - writer.mark(localtru); - } - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } + writer.mark(tru); + writer.push(true); + writer.goTo(end); + writer.mark(fals); + writer.push(false); + writer.mark(end); } else { - if (operation == Operation.AND) { - Label localfals = new Label(); - Label end = new Label(); - - left.fals = localfals; - right.fals = localfals; - - left.write(writer, globals); - right.write(writer, globals); - - writer.push(true); - writer.goTo(end); - writer.mark(localfals); - writer.push(false); - writer.mark(end); - } else if (operation == Operation.OR) { - Label localtru = new Label(); - Label localfals = new Label(); - Label end = new Label(); - - left.tru = localtru; - right.fals = localfals; - - left.write(writer, globals); - right.write(writer, globals); - - writer.mark(localtru); - writer.push(true); - writer.goTo(end); - writer.mark(localfals); - writer.push(false); - writer.mark(end); - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } + throw createError(new IllegalStateException("Illegal tree structure.")); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java index dc25bb0ed44..17c72b43a4f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java @@ -37,12 +37,18 @@ public final class EBoolean extends AExpression { this.constant = constant; } - + @Override - void extractVariables(Set variables) {} + void extractVariables(Set variables) { + // Do nothing. + } @Override void analyze(Locals locals) { + if (!read) { + throw createError(new IllegalArgumentException("Must read from constant [" + constant + "].")); + } + actual = Definition.BOOLEAN_TYPE; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java similarity index 59% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCallLocal.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index 7aa46f004d6..c50cb3d6eed 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -35,20 +35,20 @@ import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; /** * Represents a user-defined call. */ -public class LCallLocal extends ALink { +public final class ECallLocal extends AExpression { - final String name; - final List arguments; + private final String name; + private final List arguments; - Method method = null; + private Method method = null; - public LCallLocal(Location location, String name, List arguments) { - super(location, -1); + public ECallLocal(Location location, String name, List arguments) { + super(location); this.name = Objects.requireNonNull(name); this.arguments = Objects.requireNonNull(arguments); } - + @Override void extractVariables(Set variables) { for (AExpression argument : arguments) { @@ -57,42 +57,29 @@ public class LCallLocal extends ALink { } @Override - ALink analyze(Locals locals) { - if (before != null) { - throw createError(new IllegalArgumentException("Illegal call [" + name + "] against an existing target.")); - } else if (store) { - throw createError(new IllegalArgumentException("Cannot assign a value to a call [" + name + "].")); - } - + void analyze(Locals locals) { MethodKey methodKey = new MethodKey(name, arguments.size()); method = locals.getMethod(methodKey); - if (method != null) { - for (int argument = 0; argument < arguments.size(); ++argument) { - AExpression expression = arguments.get(argument); - - expression.expected = method.arguments.get(argument); - expression.internal = true; - expression.analyze(locals); - arguments.set(argument, expression.cast(locals)); - } - - statement = true; - after = method.rtn; - - return this; + if (method == null) { + throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments.")); } - throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments.")); + for (int argument = 0; argument < arguments.size(); ++argument) { + AExpression expression = arguments.get(argument); + + expression.expected = method.arguments.get(argument); + expression.internal = true; + expression.analyze(locals); + arguments.set(argument, expression.cast(locals)); + } + + statement = true; + actual = method.rtn; } @Override void write(MethodWriter writer, Globals globals) { - // Do nothing. - } - - @Override - void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); for (AExpression argument : arguments) { @@ -101,9 +88,4 @@ public class LCallLocal extends ALink { writer.invokeStatic(CLASS_TYPE, method.method); } - - @Override - void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index 7bf8e195d6d..bf85665e4df 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -23,29 +23,29 @@ import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - import java.lang.invoke.LambdaMetafactory; import java.util.Objects; import java.util.Set; +import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; + /** * Represents a capturing function reference. */ -public class ECapturingFunctionRef extends AExpression implements ILambda { - public final String variable; - public final String call; - +public final class ECapturingFunctionRef extends AExpression implements ILambda { + private final String variable; + private final String call; + private FunctionRef ref; - Variable captured; - String defPointer; + private Variable captured; + private String defPointer; public ECapturingFunctionRef(Location location, String variable, String call) { super(location); @@ -53,7 +53,7 @@ public class ECapturingFunctionRef extends AExpression implements ILambda { this.variable = Objects.requireNonNull(variable); this.call = Objects.requireNonNull(call); } - + @Override void extractVariables(Set variables) { variables.add(variable); @@ -106,27 +106,27 @@ public class ECapturingFunctionRef extends AExpression implements ILambda { Type samMethodType = Type.getMethodType(ref.samMethodType.toMethodDescriptorString()); Type interfaceType = Type.getMethodType(ref.interfaceMethodType.toMethodDescriptorString()); if (ref.needsBridges()) { - writer.invokeDynamic(ref.invokedName, - invokedType, - LAMBDA_BOOTSTRAP_HANDLE, - samMethodType, - ref.implMethodASM, - samMethodType, - LambdaMetafactory.FLAG_BRIDGES, - 1, + writer.invokeDynamic(ref.invokedName, + invokedType, + LAMBDA_BOOTSTRAP_HANDLE, + samMethodType, + ref.implMethodASM, + samMethodType, + LambdaMetafactory.FLAG_BRIDGES, + 1, interfaceType); } else { - writer.invokeDynamic(ref.invokedName, - invokedType, - LAMBDA_BOOTSTRAP_HANDLE, - samMethodType, - ref.implMethodASM, - samMethodType, + writer.invokeDynamic(ref.invokedName, + invokedType, + LAMBDA_BOOTSTRAP_HANDLE, + samMethodType, + ref.implMethodASM, + samMethodType, 0); } } } - + @Override public String getPointer() { return defPointer; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index c7dda568ff5..e575af24127 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -30,27 +30,23 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.MethodWriter; /** - * Represents an implicit cast in most cases, though it will replace - * explicit casts in the tree for simplicity. (Internal only.) + * Represents a cast that is inserted into the tree replacing other casts. (Internal only.) */ final class ECast extends AExpression { - final String type; - AExpression child; - - Cast cast = null; + private AExpression child; + private final Cast cast; ECast(Location location, AExpression child, Cast cast) { super(location); - this.type = null; this.child = Objects.requireNonNull(child); this.cast = Objects.requireNonNull(cast); } - + @Override void extractVariables(Set variables) { - child.extractVariables(variables); + throw new IllegalStateException("Illegal tree structure."); } @Override @@ -63,6 +59,5 @@ final class ECast extends AExpression { child.write(writer, globals); writer.writeDebugInfo(location); writer.writeCast(cast); - writer.writeBranch(tru, fals); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java deleted file mode 100644 index 44fe019990f..00000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java +++ /dev/null @@ -1,405 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.node; - -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Type; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Operation; -import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.MethodWriter; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Set; - -/** - * Represents the entirety of a variable/method chain for read/write operations. - */ -public final class EChain extends AExpression { - - final List links; - final boolean pre; - final boolean post; - Operation operation; - AExpression expression; - - boolean cat = false; - Type promote = null; - Type shiftDistance; // for shifts, the RHS is promoted independently - Cast there = null; - Cast back = null; - - /** Creates a new RHS-only EChain */ - public EChain(Location location, ALink link) { - this(location, Arrays.asList(link), false, false, null, null); - } - - public EChain(Location location, List links, - boolean pre, boolean post, Operation operation, AExpression expression) { - super(location); - - this.links = Objects.requireNonNull(links); - this.pre = pre; - this.post = post; - this.operation = operation; - this.expression = expression; - } - - @Override - void extractVariables(Set variables) { - for (ALink link : links) { - link.extractVariables(variables); - } - if (expression != null) { - expression.extractVariables(variables); - } - } - - @Override - void analyze(Locals locals) { - analyzeLinks(locals); - analyzeIncrDecr(); - - if (operation != null) { - analyzeCompound(locals); - } else if (expression != null) { - analyzeWrite(locals); - } else { - analyzeRead(); - } - } - - private void analyzeLinks(Locals variables) { - ALink previous = null; - int index = 0; - - while (index < links.size()) { - ALink current = links.get(index); - - if (previous != null) { - current.before = previous.after; - - if (index == 1) { - current.statik = previous.statik; - } - } - - if (index == links.size() - 1) { - current.load = read; - current.store = expression != null || pre || post; - } - - ALink analyzed = current.analyze(variables); - - if (analyzed == null) { - links.remove(index); - } else { - if (analyzed != current) { - links.set(index, analyzed); - } - - previous = analyzed; - ++index; - } - } - - if (links.get(0).statik) { - links.remove(0); - } - } - - private void analyzeIncrDecr() { - ALink last = links.get(links.size() - 1); - - if (pre && post) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } else if (pre || post) { - if (expression != null) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - - Sort sort = last.after.sort; - - if (operation == Operation.INCR) { - if (sort == Sort.DOUBLE) { - expression = new EConstant(location, 1D); - } else if (sort == Sort.FLOAT) { - expression = new EConstant(location, 1F); - } else if (sort == Sort.LONG) { - expression = new EConstant(location, 1L); - } else { - expression = new EConstant(location, 1); - } - - operation = Operation.ADD; - } else if (operation == Operation.DECR) { - if (sort == Sort.DOUBLE) { - expression = new EConstant(location, 1D); - } else if (sort == Sort.FLOAT) { - expression = new EConstant(location, 1F); - } else if (sort == Sort.LONG) { - expression = new EConstant(location, 1L); - } else { - expression = new EConstant(location, 1); - } - - operation = Operation.SUB; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - } - - private void analyzeCompound(Locals variables) { - ALink last = links.get(links.size() - 1); - - expression.analyze(variables); - boolean shift = false; - - if (operation == Operation.MUL) { - promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); - } else if (operation == Operation.DIV) { - promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); - } else if (operation == Operation.REM) { - promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); - } else if (operation == Operation.ADD) { - promote = AnalyzerCaster.promoteAdd(last.after, expression.actual); - } else if (operation == Operation.SUB) { - promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); - } else if (operation == Operation.LSH) { - promote = AnalyzerCaster.promoteNumeric(last.after, false); - shiftDistance = AnalyzerCaster.promoteNumeric(expression.actual, false); - shift = true; - } else if (operation == Operation.RSH) { - promote = AnalyzerCaster.promoteNumeric(last.after, false); - shiftDistance = AnalyzerCaster.promoteNumeric(expression.actual, false); - shift = true; - } else if (operation == Operation.USH) { - promote = AnalyzerCaster.promoteNumeric(last.after, false); - shiftDistance = AnalyzerCaster.promoteNumeric(expression.actual, false); - shift = true; - } else if (operation == Operation.BWAND) { - promote = AnalyzerCaster.promoteXor(last.after, expression.actual); - } else if (operation == Operation.XOR) { - promote = AnalyzerCaster.promoteXor(last.after, expression.actual); - } else if (operation == Operation.BWOR) { - promote = AnalyzerCaster.promoteXor(last.after, expression.actual); - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - - if (promote == null || (shift && shiftDistance == null)) { - throw createError(new ClassCastException("Cannot apply compound assignment " + - "[" + operation.symbol + "=] to types [" + last.after + "] and [" + expression.actual + "].")); - } - - cat = operation == Operation.ADD && promote.sort == Sort.STRING; - - if (cat) { - if (expression instanceof EBinary && ((EBinary)expression).operation == Operation.ADD && - expression.actual.sort == Sort.STRING) { - ((EBinary)expression).cat = true; - } - - expression.expected = expression.actual; - } else if (shift) { - if (promote.sort == Sort.DEF) { - // shifts are promoted independently, but for the def type, we need object. - expression.expected = promote; - } else if (shiftDistance.sort == Sort.LONG) { - expression.expected = Definition.INT_TYPE; - expression.explicit = true; - } else { - expression.expected = shiftDistance; - } - } else { - expression.expected = promote; - } - - expression = expression.cast(variables); - - there = AnalyzerCaster.getLegalCast(location, last.after, promote, false, false); - back = AnalyzerCaster.getLegalCast(location, promote, last.after, true, false); - - this.statement = true; - this.actual = read ? last.after : Definition.VOID_TYPE; - } - - private void analyzeWrite(Locals variables) { - ALink last = links.get(links.size() - 1); - - // If the store node is a def node, we remove the cast to def from the expression - // and promote the real type to it: - if (last instanceof IDefLink) { - expression.analyze(variables); - last.after = expression.expected = expression.actual; - } else { - // otherwise we adapt the type of the expression to the store type - expression.expected = last.after; - expression.analyze(variables); - } - - expression = expression.cast(variables); - - this.statement = true; - this.actual = read ? last.after : Definition.VOID_TYPE; - } - - private void analyzeRead() { - ALink last = links.get(links.size() - 1); - - // If the load node is a def node, we adapt its after type to use _this_ expected output type: - if (last instanceof IDefLink && this.expected != null) { - last.after = this.expected; - } - - constant = last.string; - statement = last.statement; - actual = last.after; - } - - /** - * Handles writing byte code for variable/method chains for all given possibilities - * including String concatenation, compound assignment, regular assignment, and simple - * reads. Includes proper duplication for chained assignments and assignments that are - * also read from. - * - * Example given 'x[0] += 5;' where x is an array of shorts and x[0] is 1. - * Note this example has two links -- x (LVariable) and [0] (LBrace). - * The following steps occur: - * 1. call link{x}.write(...) -- no op [...] - * 2. call link{x}.load(...) -- loads the address of the x array onto the stack [..., address(x)] - * 3. call writer.dup(...) -- dup's the address of the x array onto the stack for later use with store [..., address(x), address(x)] - * 4. call link{[0]}.write(...) -- load the array index value of the constant int 0 onto the stack [..., address(x), address(x), int(0)] - * 5. call link{[0]}.load(...) -- load the short value from x[0] onto the stack [..., address(x), short(1)] - * 6. call writer.writeCast(there) -- casts the short on the stack to an int so it can be added with the rhs [..., address(x), int(1)] - * 7. call expression.write(...) -- puts the expression's value of the constant int 5 onto the stack [..., address(x), int(1), int(5)] - * 8. call writer.writeBinaryInstruction(operation) -- writes the int addition instruction [..., address(x), int(6)] - * 9. call writer.writeCast(back) -- convert the value on the stack back into a short [..., address(x), short(6)] - * 10. call link{[0]}.store(...) -- store the value on the stack into the 0th index of the array x [...] - */ - @Override - void write(MethodWriter writer, Globals globals) { - writer.writeDebugInfo(location); - - // For the case where the chain represents a String concatenation - // we must, depending on the Java version, write a StringBuilder or - // track types going onto the stack. This must be done before the - // links in the chain are read because we need the StringBuilder to - // be placed on the stack ahead of any potential concatenation arguments. - int catElementStackSize = 0; - if (cat) { - catElementStackSize = writer.writeNewStrings(); - } - - ALink last = links.get(links.size() - 1); - - // Go through all the links in the chain first calling write - // and then load, except for the final link which may be a store. - // See individual links for more information on what each of the - // write, load, and store methods do. - for (ALink link : links) { - link.write(writer, globals); // call the write method on the link to prepare for a load/store operation - - if (link == last && link.store) { - if (cat) { - // Handle the case where we are doing a compound assignment - // representing a String concatenation. - - writer.writeDup(link.size, catElementStackSize); // dup the top element and insert it before concat helper on stack - link.load(writer, globals); // read the current link's value - writer.writeAppendStrings(link.after); // append the link's value using the StringBuilder - - expression.write(writer, globals); // write the bytecode for the rhs expression - - if (!(expression instanceof EBinary) || - ((EBinary)expression).operation != Operation.ADD || expression.actual.sort != Sort.STRING) { - writer.writeAppendStrings(expression.actual); // append the expression's value unless it's also a concatenation - } - - writer.writeToStrings(); // put the value for string concat onto the stack - writer.writeCast(back); // if necessary, cast the String to the lhs actual type - - if (link.load) { - writer.writeDup(link.after.sort.size, link.size); // if this link is also read from dup the value onto the stack - } - - link.store(writer, globals); // store the link's value from the stack in its respective variable/field/array - } else if (operation != null) { - // Handle the case where we are doing a compound assignment that - // does not represent a String concatenation. - - writer.writeDup(link.size, 0); // if necessary, dup the previous link's value to be both loaded from and stored to - link.load(writer, globals); // load the current link's value - - if (link.load && post) { - writer.writeDup(link.after.sort.size, link.size); // dup the value if the link is also - // read from and is a post increment - } - - writer.writeCast(there); // if necessary cast the current link's value - // to the promotion type between the lhs and rhs types - expression.write(writer, globals); // write the bytecode for the rhs expression - // XXX: fix these types, but first we need def compound assignment tests. - // its tricky here as there are possibly explicit casts, too. - // write the operation instruction for compound assignment - if (promote.sort == Sort.DEF) { - writer.writeDynamicBinaryInstruction(location, promote, - Definition.DEF_TYPE, Definition.DEF_TYPE, operation, DefBootstrap.OPERATOR_COMPOUND_ASSIGNMENT); - } else { - writer.writeBinaryInstruction(location, promote, operation); - } - - writer.writeCast(back); // if necessary cast the promotion type value back to the link's type - - if (link.load && !post) { - writer.writeDup(link.after.sort.size, link.size); // dup the value if the link is also - // read from and is not a post increment - } - - link.store(writer, globals); // store the link's value from the stack in its respective variable/field/array - } else { - // Handle the case for a simple write. - - expression.write(writer, globals); // write the bytecode for the rhs expression - - if (link.load) { - writer.writeDup(link.after.sort.size, link.size); // dup the value if the link is also read from - } - - link.store(writer, globals); // store the link's value from the stack in its respective variable/field/array - } - } else { - // Handle the case for a simple read. - - link.load(writer, globals); // read the link's value onto the stack - } - } - - writer.writeBranch(tru, fals); // if this is a branch node, write the bytecode to make an appropiate jump - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index d76fbbe9065..4b4e902774a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -43,10 +43,11 @@ import static org.elasticsearch.painless.WriterConstants.EQUALS; */ public final class EComp extends AExpression { - final Operation operation; - AExpression left; - AExpression right; - Type promotedType; + private final Operation operation; + private AExpression left; + private AExpression right; + + private Type promotedType; public EComp(Location location, Operation operation, AExpression left, AExpression right) { super(location); @@ -55,7 +56,7 @@ public final class EComp extends AExpression { this.left = Objects.requireNonNull(left); this.right = Objects.requireNonNull(right); } - + @Override void extractVariables(Set variables) { left.extractVariables(variables); @@ -449,25 +450,21 @@ public final class EComp extends AExpression { void write(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - boolean branch = tru != null || fals != null; - left.write(writer, globals); if (!right.isNull) { right.write(writer, globals); } - Label jump = tru != null ? tru : fals != null ? fals : new Label(); + Label jump = new Label(); Label end = new Label(); - boolean eq = (operation == Operation.EQ || operation == Operation.EQR) && (tru != null || fals == null) || - (operation == Operation.NE || operation == Operation.NER) && fals != null; - boolean ne = (operation == Operation.NE || operation == Operation.NER) && (tru != null || fals == null) || - (operation == Operation.EQ || operation == Operation.EQR) && fals != null; - boolean lt = operation == Operation.LT && (tru != null || fals == null) || operation == Operation.GTE && fals != null; - boolean lte = operation == Operation.LTE && (tru != null || fals == null) || operation == Operation.GT && fals != null; - boolean gt = operation == Operation.GT && (tru != null || fals == null) || operation == Operation.LTE && fals != null; - boolean gte = operation == Operation.GTE && (tru != null || fals == null) || operation == Operation.LT && fals != null; + boolean eq = (operation == Operation.EQ || operation == Operation.EQR); + boolean ne = (operation == Operation.NE || operation == Operation.NER); + boolean lt = operation == Operation.LT; + boolean lte = operation == Operation.LTE; + boolean gt = operation == Operation.GT; + boolean gte = operation == Operation.GTE; boolean writejump = true; @@ -478,8 +475,8 @@ public final class EComp extends AExpression { case CHAR: throw createError(new IllegalStateException("Illegal tree structure.")); case BOOL: - if (eq) writer.ifZCmp(MethodWriter.EQ, jump); - else if (ne) writer.ifZCmp(MethodWriter.NE, jump); + if (eq) writer.ifCmp(promotedType.type, MethodWriter.EQ, jump); + else if (ne) writer.ifCmp(promotedType.type, MethodWriter.NE, jump); else { throw createError(new IllegalStateException("Illegal tree structure.")); } @@ -503,10 +500,11 @@ public final class EComp extends AExpression { case DEF: org.objectweb.asm.Type booleanType = org.objectweb.asm.Type.getType(boolean.class); org.objectweb.asm.Type descriptor = org.objectweb.asm.Type.getMethodType(booleanType, left.actual.type, right.actual.type); + if (eq) { if (right.isNull) { writer.ifNull(jump); - } else if (!left.isNull && (operation == Operation.EQ || operation == Operation.NE)) { + } else if (!left.isNull && operation == Operation.EQ) { writer.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); writejump = false; } else { @@ -515,7 +513,7 @@ public final class EComp extends AExpression { } else if (ne) { if (right.isNull) { writer.ifNonNull(jump); - } else if (!left.isNull && (operation == Operation.EQ || operation == Operation.NE)) { + } else if (!left.isNull && operation == Operation.NE) { writer.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); writer.ifZCmp(MethodWriter.EQ, jump); } else { @@ -537,22 +535,13 @@ public final class EComp extends AExpression { throw createError(new IllegalStateException("Illegal tree structure.")); } - if (branch && !writejump) { - writer.ifZCmp(MethodWriter.NE, jump); - } - break; default: if (eq) { if (right.isNull) { writer.ifNull(jump); - } else if (operation == Operation.EQ || operation == Operation.NE) { + } else if (operation == Operation.EQ) { writer.invokeStatic(OBJECTS_TYPE, EQUALS); - - if (branch) { - writer.ifZCmp(MethodWriter.NE, jump); - } - writejump = false; } else { writer.ifCmp(promotedType.type, MethodWriter.EQ, jump); @@ -560,7 +549,7 @@ public final class EComp extends AExpression { } else if (ne) { if (right.isNull) { writer.ifNonNull(jump); - } else if (operation == Operation.EQ || operation == Operation.NE) { + } else if (operation == Operation.NE) { writer.invokeStatic(OBJECTS_TYPE, EQUALS); writer.ifZCmp(MethodWriter.EQ, jump); } else { @@ -571,7 +560,7 @@ public final class EComp extends AExpression { } } - if (!branch && writejump) { + if (writejump) { writer.push(false); writer.goTo(end); writer.mark(jump); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java index e05419e1c52..1f9fe8bdfcb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java @@ -31,15 +31,16 @@ import java.util.Objects; import java.util.Set; import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Opcodes; /** * Respresents a conditional expression. */ public final class EConditional extends AExpression { - AExpression condition; - AExpression left; - AExpression right; + private AExpression condition; + private AExpression left; + private AExpression right; public EConditional(Location location, AExpression condition, AExpression left, AExpression right) { super(location); @@ -48,7 +49,7 @@ public final class EConditional extends AExpression { this.left = Objects.requireNonNull(left); this.right = Objects.requireNonNull(right); } - + @Override void extractVariables(Set variables) { condition.extractVariables(variables); @@ -93,17 +94,15 @@ public final class EConditional extends AExpression { void write(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - Label localfals = new Label(); + Label fals = new Label(); Label end = new Label(); - condition.fals = localfals; - left.tru = right.tru = tru; - left.fals = right.fals = fals; - condition.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, fals); + left.write(writer, globals); writer.goTo(end); - writer.mark(localfals); + writer.mark(fals); right.write(writer, globals); writer.mark(end); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java index c5e10a340d6..722a339c822 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java @@ -30,8 +30,8 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.MethodWriter; /** - * Represents a constant. Note this replaces any other expression - * node with a constant value set during a cast. (Internal only.) + * Represents a constant inserted into the tree replacing + * other constants during constant folding. (Internal only.) */ final class EConstant extends AExpression { @@ -40,9 +40,11 @@ final class EConstant extends AExpression { this.constant = constant; } - + @Override - void extractVariables(Set variables) {} + void extractVariables(Set variables) { + throw new IllegalStateException("Illegal tree structure."); + } @Override void analyze(Locals locals) { @@ -82,22 +84,9 @@ final class EConstant extends AExpression { case CHAR: writer.push((char)constant); break; case SHORT: writer.push((short)constant); break; case BYTE: writer.push((byte)constant); break; - case BOOL: - if (tru != null && (boolean)constant) { - writer.goTo(tru); - } else if (fals != null && !(boolean)constant) { - writer.goTo(fals); - } else if (tru == null && fals == null) { - writer.push((boolean)constant); - } - - break; + case BOOL: writer.push((boolean)constant); break; default: throw createError(new IllegalStateException("Illegal tree structure.")); } - - if (sort != Sort.BOOL) { - writer.writeBranch(tru, fals); - } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java index e93b63c3bcd..643861477e7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java @@ -33,19 +33,23 @@ import java.util.Set; */ public final class EDecimal extends AExpression { - final String value; + private final String value; public EDecimal(Location location, String value) { super(location); this.value = Objects.requireNonNull(value); } - + @Override void extractVariables(Set variables) {} @Override void analyze(Locals locals) { + if (!read) { + throw createError(new IllegalArgumentException("Must read from constant [" + value + "].")); + } + if (value.endsWith("f") || value.endsWith("F")) { try { constant = Float.parseFloat(value.substring(0, value.length() - 1)); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index 71ad952baff..527bcc9a0bb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -33,8 +33,8 @@ import java.util.Set; */ public final class EExplicit extends AExpression { - final String type; - AExpression child; + private final String type; + private AExpression child; public EExplicit(Location location, String type, AExpression child) { super(location); @@ -42,7 +42,7 @@ public final class EExplicit extends AExpression { this.type = Objects.requireNonNull(type); this.child = Objects.requireNonNull(child); } - + @Override void extractVariables(Set variables) { child.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index 380bfd6c43f..17ab7c014fa 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -20,30 +20,30 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.FunctionRef; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.MethodKey; +import org.elasticsearch.painless.FunctionRef; +import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Type; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - import java.lang.invoke.LambdaMetafactory; import java.util.Objects; import java.util.Set; +import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; + /** * Represents a function reference. */ -public class EFunctionRef extends AExpression implements ILambda { - public final String type; - public final String call; +public final class EFunctionRef extends AExpression implements ILambda { + private final String type; + private final String call; private FunctionRef ref; - String defPointer; + private String defPointer; public EFunctionRef(Location location, String type, String call) { super(location); @@ -51,7 +51,7 @@ public class EFunctionRef extends AExpression implements ILambda { this.type = Objects.requireNonNull(type); this.call = Objects.requireNonNull(call); } - + @Override void extractVariables(Set variables) {} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index 5f08ff521d8..ba8a3454ee2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -20,12 +20,12 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import java.lang.invoke.MethodType; import java.util.Objects; import java.util.Set; @@ -34,19 +34,20 @@ import java.util.Set; *

* Unlike java's, this works for primitive types too. */ -public class EInstanceof extends AExpression { - AExpression expression; - final String type; - Class resolvedType; - Class expressionType; - boolean primitiveExpression; +public final class EInstanceof extends AExpression { + private AExpression expression; + private final String type; + + private Class resolvedType; + private Class expressionType; + private boolean primitiveExpression; public EInstanceof(Location location, AExpression expression, String type) { super(location); this.expression = Objects.requireNonNull(expression); this.type = Objects.requireNonNull(type); } - + @Override void extractVariables(Set variables) { expression.extractVariables(variables); @@ -54,20 +55,29 @@ public class EInstanceof extends AExpression { @Override void analyze(Locals locals) { - Definition.Type raw = Definition.getType(type); - // map to wrapped type for primitive types - resolvedType = MethodType.methodType(raw.clazz).wrap().returnType(); - expression.analyze(locals); - actual = Definition.BOOLEAN_TYPE; - - Definition.Type expressionRaw = expression.actual; - if (expressionRaw == null) { - expressionRaw = Definition.DEF_TYPE; + final Type type; + + // ensure the specified type is part of the definition + try { + type = Definition.getType(this.type); + } catch (IllegalArgumentException exception) { + throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } - // record if the expression returns a primitive - primitiveExpression = expressionRaw.clazz.isPrimitive(); + // map to wrapped type for primitive types - expressionType = MethodType.methodType(expressionRaw.clazz).wrap().returnType(); + resolvedType = type.sort.primitive ? type.sort.boxed : type.clazz; + + // analyze and cast the expression + expression.analyze(locals); + expression.expected = expression.actual; + expression = expression.cast(locals); + + // record if the expression returns a primitive + primitiveExpression = expression.actual.sort.primitive; + // map to wrapped type for primitive types + expressionType = expression.actual.sort.primitive ? expression.actual.sort.boxed : type.clazz; + + actual = Definition.BOOLEAN_TYPE; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 0cbb2ed1b33..802da80860d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; @@ -29,7 +30,6 @@ import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.objectweb.asm.Opcodes; -import org.objectweb.asm.Type; import java.lang.invoke.LambdaMetafactory; import java.util.ArrayList; @@ -47,7 +47,7 @@ import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE * This can currently only be the direct argument of a call (method/constructor). * When the argument is of a known type, it uses * - * Java's lambda translation. However, if its a def call, then we don't have + * Java's lambda translation. However, if its a def call, then we don't have * enough information, and have to defer this until link time. In that case a placeholder * and all captures are pushed onto the stack and folded into the signature of the parent call. *

@@ -64,24 +64,25 @@ import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE *
* {@code sort(list, lambda$0(capture))} */ -public class ELambda extends AExpression implements ILambda { - final String name; - final FunctionReserved reserved; - final List paramTypeStrs; - final List paramNameStrs; - final List statements; +public final class ELambda extends AExpression implements ILambda { + + private final String name; + private final FunctionReserved reserved; + private final List paramTypeStrs; + private final List paramNameStrs; + private final List statements; // desugared synthetic method (lambda body) - SFunction desugared; + private SFunction desugared; // captured variables - List captures; + private List captures; // static parent, static lambda - FunctionRef ref; + private FunctionRef ref; // dynamic parent, deferred until link time - String defPointer; + private String defPointer; - public ELambda(String name, FunctionReserved reserved, - Location location, List paramTypes, List paramNames, + public ELambda(String name, FunctionReserved reserved, + Location location, List paramTypes, List paramNames, List statements) { super(location); this.name = Objects.requireNonNull(name); @@ -90,7 +91,7 @@ public class ELambda extends AExpression implements ILambda { this.paramNameStrs = Collections.unmodifiableList(paramNames); this.statements = Collections.unmodifiableList(statements); } - + @Override void extractVariables(Set variables) { for (AStatement statement : statements) { @@ -100,7 +101,7 @@ public class ELambda extends AExpression implements ILambda { @Override void analyze(Locals locals) { - final Definition.Type returnType; + final Type returnType; final List actualParamTypeStrs; Method interfaceMethod; // inspect the target first, set interface method if we know it. @@ -114,12 +115,12 @@ public class ELambda extends AExpression implements ILambda { // we know the method statically, infer return type and any unknown/def types interfaceMethod = expected.struct.getFunctionalMethod(); if (interfaceMethod == null) { - throw createError(new IllegalArgumentException("Cannot pass lambda to [" + expected.name + + throw createError(new IllegalArgumentException("Cannot pass lambda to [" + expected.name + "], not a functional interface")); } // check arity before we manipulate parameters if (interfaceMethod.arguments.size() != paramTypeStrs.size()) - throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name + + throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name + "] in [" + expected.clazz + "]"); // for method invocation, its allowed to ignore the return value if (interfaceMethod.rtn == Definition.VOID_TYPE) { @@ -159,14 +160,14 @@ public class ELambda extends AExpression implements ILambda { } paramTypes.addAll(actualParamTypeStrs); paramNames.addAll(paramNameStrs); - + // desugar lambda body into a synthetic method - desugared = new SFunction(reserved, location, returnType.name, name, + desugared = new SFunction(reserved, location, returnType.name, name, paramTypes, paramNames, statements, true); - desugared.generate(); - desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, desugared.parameters, + desugared.generateSignature(); + desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, desugared.parameters, captures.size(), reserved.getMaxLoopCounter())); - + // setup method reference to synthetic method if (expected == null) { ref = null; @@ -195,8 +196,10 @@ public class ELambda extends AExpression implements ILambda { } // convert MethodTypes to asm Type for the constant pool. String invokedType = ref.invokedType.toMethodDescriptorString(); - Type samMethodType = Type.getMethodType(ref.samMethodType.toMethodDescriptorString()); - Type interfaceType = Type.getMethodType(ref.interfaceMethodType.toMethodDescriptorString()); + org.objectweb.asm.Type samMethodType = + org.objectweb.asm.Type.getMethodType(ref.samMethodType.toMethodDescriptorString()); + org.objectweb.asm.Type interfaceType = + org.objectweb.asm.Type.getMethodType(ref.interfaceMethodType.toMethodDescriptorString()); if (ref.needsBridges()) { writer.invokeDynamic(ref.invokedName, invokedType, @@ -235,8 +238,8 @@ public class ELambda extends AExpression implements ILambda { } @Override - public Type[] getCaptures() { - Type[] types = new Type[captures.size()]; + public org.objectweb.asm.Type[] getCaptures() { + org.objectweb.asm.Type[] types = new org.objectweb.asm.Type[captures.size()]; for (int i = 0; i < types.length; i++) { types[i] = captures.get(i).type.type; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index d6e4616c1d8..dd93ec0ee72 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -33,11 +33,11 @@ import java.util.Set; /** * Represents a list initialization shortcut. */ -public class EListInit extends AExpression { - final List values; +public final class EListInit extends AExpression { + private final List values; - Method constructor = null; - Method method = null; + private Method constructor = null; + private Method method = null; public EListInit(Location location, List values) { super(location); @@ -54,6 +54,10 @@ public class EListInit extends AExpression { @Override void analyze(Locals locals) { + if (!read) { + throw createError(new IllegalArgumentException("Must read from list initializer.")); + } + try { actual = Definition.getType("ArrayList"); } catch (IllegalArgumentException exception) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index d36ac50c24e..0dcba39d457 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -33,12 +33,12 @@ import java.util.Set; /** * Represents a map initialization shortcut. */ -public class EMapInit extends AExpression { - final List keys; - final List values; +public final class EMapInit extends AExpression { + private final List keys; + private final List values; - Method constructor = null; - Method method = null; + private Method constructor = null; + private Method method = null; public EMapInit(Location location, List keys, List values) { super(location); @@ -60,6 +60,10 @@ public class EMapInit extends AExpression { @Override void analyze(Locals locals) { + if (!read) { + throw createError(new IllegalArgumentException("Must read from map initializer.")); + } + try { actual = Definition.getType("HashMap"); } catch (IllegalArgumentException exception) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java similarity index 69% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java index f94f344c481..3334b05e74e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java @@ -33,14 +33,14 @@ import java.util.Set; /** * Represents an array instantiation. */ -public final class LNewArray extends ALink { +public final class ENewArray extends AExpression { - final String type; - final List arguments; - final boolean initialize; + private final String type; + private final List arguments; + private final boolean initialize; - public LNewArray(Location location, String type, List arguments, boolean initialize) { - super(location, -1); + public ENewArray(Location location, String type, List arguments, boolean initialize) { + super(location); this.type = Objects.requireNonNull(type); this.arguments = Objects.requireNonNull(arguments); @@ -55,13 +55,9 @@ public final class LNewArray extends ALink { } @Override - ALink analyze(Locals locals) { - if (before != null) { - throw createError(new IllegalArgumentException("Cannot create a new array with a target already defined.")); - } else if (store) { - throw createError(new IllegalArgumentException("Cannot assign a value to a new array.")); - } else if (!load) { - throw createError(new IllegalArgumentException("A newly created array must be read.")); + void analyze(Locals locals) { + if (!read) { + throw createError(new IllegalArgumentException("A newly created array must be read from.")); } final Type type; @@ -81,23 +77,16 @@ public final class LNewArray extends ALink { arguments.set(argument, expression.cast(locals)); } - after = Definition.getType(type.struct, initialize ? 1 : arguments.size()); - - return this; + actual = Definition.getType(type.struct, initialize ? 1 : arguments.size()); } @Override void write(MethodWriter writer, Globals globals) { - // Do nothing. - } - - @Override - void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); if (initialize) { writer.push(arguments.size()); - writer.newArray(Definition.getType(after.struct, 0).type); + writer.newArray(Definition.getType(actual.struct, 0).type); for (int index = 0; index < arguments.size(); ++index) { AExpression argument = arguments.get(index); @@ -105,7 +94,7 @@ public final class LNewArray extends ALink { writer.dup(); writer.push(index); argument.write(writer, globals); - writer.arrayStore(Definition.getType(after.struct, 0).type); + writer.arrayStore(Definition.getType(actual.struct, 0).type); } } else { for (AExpression argument : arguments) { @@ -113,15 +102,10 @@ public final class LNewArray extends ALink { } if (arguments.size() > 1) { - writer.visitMultiANewArrayInsn(after.type.getDescriptor(), after.type.getDimensions()); + writer.visitMultiANewArrayInsn(actual.type.getDescriptor(), actual.type.getDimensions()); } else { - writer.newArray(Definition.getType(after.struct, 0).type); + writer.newArray(Definition.getType(actual.struct, 0).type); } } } - - @Override - void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java similarity index 79% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index 0ac2762f336..7a19491fd09 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -35,20 +35,20 @@ import java.util.Set; /** * Represents and object instantiation. */ -public final class LNewObj extends ALink { +public final class ENewObj extends AExpression { - final String type; - final List arguments; + private final String type; + private final List arguments; - Method constructor; + private Method constructor; - public LNewObj(Location location, String type, List arguments) { - super(location, -1); + public ENewObj(Location location, String type, List arguments) { + super(location); this.type = Objects.requireNonNull(type); this.arguments = Objects.requireNonNull(arguments); } - + @Override void extractVariables(Set variables) { for (AExpression argument : arguments) { @@ -57,13 +57,7 @@ public final class LNewObj extends ALink { } @Override - ALink analyze(Locals locals) { - if (before != null) { - throw createError(new IllegalArgumentException("Illegal new call with a target already defined.")); - } else if (store) { - throw createError(new IllegalArgumentException("Cannot assign a value to a new call.")); - } - + void analyze(Locals locals) { final Type type; try { @@ -94,25 +88,19 @@ public final class LNewObj extends ALink { } statement = true; - after = type; + actual = type; } else { throw createError(new IllegalArgumentException("Unknown new call on type [" + struct.name + "].")); } - - return this; } @Override void write(MethodWriter writer, Globals globals) { - // Do nothing. - } - - @Override - void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - writer.newInstance(after.type); - if (load) { + writer.newInstance(actual.type); + + if (read) { writer.dup(); } @@ -122,9 +110,4 @@ public final class LNewObj extends ALink { writer.invokeConstructor(constructor.owner.type, constructor.method); } - - @Override - void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index ad6562c0e14..b07344e86b1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -37,12 +37,18 @@ public final class ENull extends AExpression { public ENull(Location location) { super(location); } - + @Override - void extractVariables(Set variables) {} + void extractVariables(Set variables) { + // Do nothing. + } @Override void analyze(Locals locals) { + if (!read) { + throw createError(new IllegalArgumentException("Must read from null constant.")); + } + isNull = true; if (expected != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java index e9a28a1e06a..102a8491226 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java @@ -35,8 +35,8 @@ import org.elasticsearch.painless.MethodWriter; */ public final class ENumeric extends AExpression { - final String value; - int radix; + private final String value; + private int radix; public ENumeric(Location location, String value, int radix) { super(location); @@ -44,12 +44,18 @@ public final class ENumeric extends AExpression { this.value = Objects.requireNonNull(value); this.radix = radix; } - + @Override - void extractVariables(Set variables) {} + void extractVariables(Set variables) { + // Do nothing. + } @Override void analyze(Locals locals) { + if (!read) { + throw createError(new IllegalArgumentException("Must read from constant [" + value + "].")); + } + if (value.endsWith("d") || value.endsWith("D")) { if (radix != 10) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LRegex.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java similarity index 64% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LRegex.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java index c737c761cf2..8b9e1bcdc9c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LRegex.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java @@ -35,61 +35,53 @@ import org.elasticsearch.painless.WriterConstants; /** * Represents a regex constant. All regexes are constants. */ -public final class LRegex extends ALink { +public final class ERegex extends AExpression { + private final String pattern; private final int flags; private Constant constant; - public LRegex(Location location, String pattern, String flagsString) { - super(location, 1); + public ERegex(Location location, String pattern, String flagsString) { + super(location); + this.pattern = pattern; + int flags = 0; + for (int c = 0; c < flagsString.length(); c++) { flags |= flagForChar(flagsString.charAt(c)); } + this.flags = flags; - try { - // Compile the pattern early after parsing so we can throw an error to the user with the location - Pattern.compile(pattern, flags); - } catch (PatternSyntaxException e) { - throw createError(e); - } - } - - @Override - void extractVariables(Set variables) {} - - @Override - ALink analyze(Locals locals) { - if (before != null) { - throw createError(new IllegalArgumentException("Illegal Regex constant [" + pattern + "].")); - } else if (store) { - throw createError(new IllegalArgumentException("Cannot write to Regex constant [" + pattern + "].")); - } else if (!load) { - throw createError(new IllegalArgumentException("Regex constant may only be read [" + pattern + "].")); - } - - constant = new Constant(location, Definition.PATTERN_TYPE.type, "regexAt$" + location.getOffset(), this::initializeConstant); - after = Definition.PATTERN_TYPE; - - return this; } @Override - void write(MethodWriter writer, Globals globals) { + void extractVariables(Set variables) { // Do nothing. } @Override - void load(MethodWriter writer, Globals globals) { - writer.writeDebugInfo(location); - writer.getStatic(WriterConstants.CLASS_TYPE, constant.name, Definition.PATTERN_TYPE.type); - globals.addConstantInitializer(constant); + void analyze(Locals locals) { + if (!read) { + throw createError(new IllegalArgumentException("Regex constant may only be read [" + pattern + "].")); + } + + try { + Pattern.compile(pattern, flags); + } catch (PatternSyntaxException exception) { + throw createError(exception); + } + + constant = new Constant(location, Definition.PATTERN_TYPE.type, "regexAt$" + location.getOffset(), this::initializeConstant); + actual = Definition.PATTERN_TYPE; } @Override - void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); + void write(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + writer.getStatic(WriterConstants.CLASS_TYPE, constant.name, Definition.PATTERN_TYPE.type); + globals.addConstantInitializer(constant); } private void initializeConstant(MethodWriter writer) { @@ -100,15 +92,16 @@ public final class LRegex extends ALink { private int flagForChar(char c) { switch (c) { - case 'c': return Pattern.CANON_EQ; - case 'i': return Pattern.CASE_INSENSITIVE; - case 'l': return Pattern.LITERAL; - case 'm': return Pattern.MULTILINE; - case 's': return Pattern.DOTALL; - case 'U': return Pattern.UNICODE_CHARACTER_CLASS; - case 'u': return Pattern.UNICODE_CASE; - case 'x': return Pattern.COMMENTS; - default: throw new IllegalArgumentException("Unknown flag [" + c + "]"); + case 'c': return Pattern.CANON_EQ; + case 'i': return Pattern.CASE_INSENSITIVE; + case 'l': return Pattern.LITERAL; + case 'm': return Pattern.MULTILINE; + case 's': return Pattern.DOTALL; + case 'U': return Pattern.UNICODE_CHARACTER_CLASS; + case 'u': return Pattern.UNICODE_CASE; + case 'x': return Pattern.COMMENTS; + default: + throw new IllegalArgumentException("Unknown flag [" + c + "]"); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LStatic.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java similarity index 62% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LStatic.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java index d9077a4446f..8e6846c4ef9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LStatic.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java @@ -32,47 +32,32 @@ import org.elasticsearch.painless.Locals; /** * Represents a static type target. */ -public final class LStatic extends ALink { +public final class EStatic extends AExpression { - final String type; + private final String type; - public LStatic(Location location, String type) { - super(location, 0); + public EStatic(Location location, String type) { + super(location); this.type = Objects.requireNonNull(type); } - - @Override - void extractVariables(Set variables) {} @Override - ALink analyze(Locals locals) { - if (before != null) { - throw createError(new IllegalArgumentException("Illegal static type [" + type + "] after target already defined.")); - } + void extractVariables(Set variables) { + // Do nothing. + } + @Override + void analyze(Locals locals) { try { - after = Definition.getType(type); - statik = true; + actual = Definition.getType(type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } - - return this; } @Override void write(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - - @Override - void load(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - - @Override - void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); + // Do nothing. } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java similarity index 59% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java index ab45c0f3a8e..84062ffccac 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java @@ -31,44 +31,30 @@ import java.util.Set; /** * Represents a string constant. */ -public final class LString extends ALink { +public final class EString extends AExpression { - public LString(Location location, String string) { - super(location, -1); + public EString(Location location, String string) { + super(location); - this.string = Objects.requireNonNull(string); - } - - @Override - void extractVariables(Set variables) {} - - @Override - ALink analyze(Locals locals) { - if (before != null) { - throw createError(new IllegalArgumentException("Illegal String constant [" + string + "].")); - } else if (store) { - throw createError(new IllegalArgumentException("Cannot write to read-only String constant [" + string + "].")); - } else if (!load) { - throw createError(new IllegalArgumentException("Must read String constant [" + string + "].")); - } - - after = Definition.STRING_TYPE; - - return this; + this.constant = Objects.requireNonNull(string); } @Override - void write(MethodWriter writer, Globals globals) { + void extractVariables(Set variables) { // Do nothing. } @Override - void load(MethodWriter writer, Globals globals) { - writer.push(string); + void analyze(Locals locals) { + if (!read) { + throw createError(new IllegalArgumentException("Must read from constant [" + constant + "].")); + } + + actual = Definition.STRING_TYPE; } @Override - void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); + void write(MethodWriter writer, Globals globals) { + throw new IllegalStateException("Illegal tree structure."); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index a635ec811a3..129e6c67f6e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -34,16 +34,18 @@ import java.util.Objects; import java.util.Set; import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Opcodes; /** * Represents a unary math expression. */ public final class EUnary extends AExpression { - final Operation operation; - AExpression child; - Type promote; - boolean originallyExplicit = false; // record whether there was originally an explicit cast + private final Operation operation; + private AExpression child; + + private Type promote; + private boolean originallyExplicit = false; // record whether there was originally an explicit cast public EUnary(Location location, Operation operation, AExpression child) { super(location); @@ -51,7 +53,7 @@ public final class EUnary extends AExpression { this.operation = Objects.requireNonNull(operation); this.child = Objects.requireNonNull(child); } - + @Override void extractVariables(Set variables) { child.extractVariables(variables); @@ -60,6 +62,7 @@ public final class EUnary extends AExpression { @Override void analyze(Locals locals) { originallyExplicit = explicit; + if (operation == Operation.NOT) { analyzeNot(locals); } else if (operation == Operation.BWNOT) { @@ -191,33 +194,29 @@ public final class EUnary extends AExpression { writer.writeDebugInfo(location); if (operation == Operation.NOT) { - if (tru == null && fals == null) { - Label localfals = new Label(); - Label end = new Label(); + Label fals = new Label(); + Label end = new Label(); - child.fals = localfals; - child.write(writer, globals); + child.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, fals); - writer.push(false); - writer.goTo(end); - writer.mark(localfals); - writer.push(true); - writer.mark(end); - } else { - child.tru = fals; - child.fals = tru; - child.write(writer, globals); - } + writer.push(false); + writer.goTo(end); + writer.mark(fals); + writer.push(true); + writer.mark(end); } else { Sort sort = promote.sort; child.write(writer, globals); - // def calls adopt the wanted return value. if there was a narrowing cast, - // we need to flag that so that its done at runtime. + // Def calls adopt the wanted return value. If there was a narrowing cast, + // we need to flag that so that it's done at runtime. int defFlags = 0; + if (originallyExplicit) { defFlags |= DefBootstrap.OPERATOR_EXPLICIT_CAST; } + if (operation == Operation.BWNOT) { if (sort == Sort.DEF) { org.objectweb.asm.Type descriptor = org.objectweb.asm.Type.getMethodType(actual.type, child.actual.type); @@ -244,12 +243,10 @@ public final class EUnary extends AExpression { if (sort == Sort.DEF) { org.objectweb.asm.Type descriptor = org.objectweb.asm.Type.getMethodType(actual.type, child.actual.type); writer.invokeDefCall("plus", descriptor, DefBootstrap.UNARY_OPERATOR, defFlags); - } + } } else { throw createError(new IllegalStateException("Illegal tree structure.")); } - - writer.writeBranch(tru, fals); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java similarity index 65% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java index 4f016349c24..11c29dc7514 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java @@ -19,11 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Opcodes; import java.util.Objects; @@ -32,52 +33,66 @@ import java.util.Set; /** * Represents a variable load/store. */ -public final class LVariable extends ALink { +public final class EVariable extends AStoreable { - final String name; + private final String name; - Variable variable; + private Variable variable = null; - public LVariable(Location location, String name) { - super(location, 0); + public EVariable(Location location, String name) { + super(location); this.name = Objects.requireNonNull(name); } - + @Override void extractVariables(Set variables) { variables.add(name); } @Override - ALink analyze(Locals locals) { - if (before != null) { - throw createError(new IllegalArgumentException("Illegal variable [" + name + "] access with target already defined.")); - } - + void analyze(Locals locals) { variable = locals.getVariable(location, name); - if (store && variable.readonly) { + if (write && variable.readonly) { throw createError(new IllegalArgumentException("Variable [" + variable.name + "] is read-only.")); } - after = variable.type; - - return this; + actual = variable.type; } @Override void write(MethodWriter writer, Globals globals) { + writer.visitVarInsn(actual.type.getOpcode(Opcodes.ILOAD), variable.getSlot()); + } + + @Override + int accessElementCount() { + return 0; + } + + @Override + boolean isDefOptimized() { + return false; + } + + @Override + void updateActual(Type actual) { + throw new IllegalArgumentException("Illegal tree structure."); + } + + @Override + void setup(MethodWriter writer, Globals globals) { // Do nothing. } @Override void load(MethodWriter writer, Globals globals) { - writer.visitVarInsn(after.type.getOpcode(Opcodes.ILOAD), variable.getSlot()); + writer.visitVarInsn(actual.type.getOpcode(Opcodes.ILOAD), variable.getSlot()); } @Override void store(MethodWriter writer, Globals globals) { - writer.visitVarInsn(after.type.getOpcode(Opcodes.ISTORE), variable.getSlot()); + writer.visitVarInsn(actual.type.getOpcode(Opcodes.ISTORE), variable.getSlot()); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java deleted file mode 100644 index 16b9b8c5d44..00000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.node; - -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.MethodWriter; - -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; - -/** - * Represents an array load/store or defers to possible shortcuts. - */ -public final class LBrace extends ALink { - - AExpression index; - - public LBrace(Location location, AExpression index) { - super(location, 2); - - this.index = Objects.requireNonNull(index); - } - - @Override - void extractVariables(Set variables) { - index.extractVariables(variables); - } - - @Override - ALink analyze(Locals locals) { - if (before == null) { - throw createError(new IllegalArgumentException("Illegal array access made without target.")); - } - - Sort sort = before.sort; - - if (sort == Sort.ARRAY) { - index.expected = Definition.INT_TYPE; - index.analyze(locals); - index = index.cast(locals); - - after = Definition.getType(before.struct, before.dimensions - 1); - - return this; - } else if (sort == Sort.DEF) { - return new LDefArray(location, index).copy(this).analyze(locals); - } else if (Map.class.isAssignableFrom(before.clazz)) { - return new LMapShortcut(location, index).copy(this).analyze(locals); - } else if (List.class.isAssignableFrom(before.clazz)) { - return new LListShortcut(location, index).copy(this).analyze(locals); - } - - throw createError(new IllegalArgumentException("Illegal array access on type [" + before.name + "].")); - } - - @Override - void write(MethodWriter writer, Globals globals) { - index.write(writer, globals); - } - - @Override - void load(MethodWriter writer, Globals globals) { - writer.writeDebugInfo(location); - writer.arrayLoad(after.type); - } - - @Override - void store(MethodWriter writer, Globals globals) { - writer.writeDebugInfo(location); - writer.arrayStore(after.type); - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCallInvoke.java deleted file mode 100644 index 1056af2aaca..00000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCallInvoke.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.node; - -import org.elasticsearch.painless.Definition.MethodKey; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Struct; -import org.elasticsearch.painless.Definition.Type; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.MethodWriter; - -import java.lang.invoke.MethodType; -import java.util.List; -import java.util.Objects; -import java.util.Set; - -/** - * Represents a method call or defers to a def call. - */ -public final class LCallInvoke extends ALink { - - final String name; - final List arguments; - - Method method = null; - - boolean box = false; // true for primitive types - - public LCallInvoke(Location location, String name, List arguments) { - super(location, -1); - - this.name = Objects.requireNonNull(name); - this.arguments = Objects.requireNonNull(arguments); - } - - @Override - void extractVariables(Set variables) { - for (AExpression argument : arguments) { - argument.extractVariables(variables); - } - } - - @Override - ALink analyze(Locals locals) { - if (before == null) { - throw createError(new IllegalArgumentException("Illegal call [" + name + "] made without target.")); - } else if (before.sort == Sort.ARRAY) { - throw createError(new IllegalArgumentException("Illegal call [" + name + "] on array type.")); - } else if (store) { - throw createError(new IllegalArgumentException("Cannot assign a value to a call [" + name + "].")); - } - - MethodKey methodKey = new MethodKey(name, arguments.size()); - Struct struct = before.struct; - if (before.clazz.isPrimitive()) { - Class wrapper = MethodType.methodType(before.clazz).wrap().returnType(); - Type boxed = Definition.getType(wrapper.getSimpleName()); - struct = boxed.struct; - box = true; - } - method = statik ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey); - - if (method != null) { - for (int argument = 0; argument < arguments.size(); ++argument) { - AExpression expression = arguments.get(argument); - - expression.expected = method.arguments.get(argument); - expression.internal = true; - expression.analyze(locals); - arguments.set(argument, expression.cast(locals)); - } - - statement = true; - after = method.rtn; - - return this; - } else if (before.sort == Sort.DEF) { - ALink link = new LDefCall(location, name, arguments); - link.copy(this); - - return link.analyze(locals); - } - - throw createError(new IllegalArgumentException( - "Unknown call [" + name + "] with [" + arguments.size() + "] arguments on type [" + struct.name + "].")); - } - - @Override - void write(MethodWriter writer, Globals globals) { - // Do nothing. - } - - @Override - void load(MethodWriter writer, Globals globals) { - writer.writeDebugInfo(location); - - if (box) { - writer.box(before.type); - } - - for (AExpression argument : arguments) { - argument.write(writer, globals); - } - - method.write(writer); - } - - @Override - void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java deleted file mode 100644 index 30d0a033746..00000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.node; - -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.Definition.Field; -import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Struct; -import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.MethodWriter; - -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; - -/** - * Represents a field load/store or defers to a possible shortcuts. - */ -public final class LField extends ALink { - - final String value; - - Field field; - - public LField(Location location, String value) { - super(location, 1); - - this.value = Objects.requireNonNull(value); - } - - @Override - void extractVariables(Set variables) {} - - @Override - ALink analyze(Locals locals) { - if (before == null) { - throw createError(new IllegalArgumentException("Illegal field [" + value + "] access made without target.")); - } - - Sort sort = before.sort; - - if (sort == Sort.ARRAY) { - return new LArrayLength(location, value).copy(this).analyze(locals); - } else if (sort == Sort.DEF) { - return new LDefField(location, value).copy(this).analyze(locals); - } - - Struct struct = before.struct; - field = statik ? struct.staticMembers.get(value) : struct.members.get(value); - - if (field != null) { - if (store && java.lang.reflect.Modifier.isFinal(field.modifiers)) { - throw createError(new IllegalArgumentException( - "Cannot write to read-only field [" + value + "] for type [" + struct.name + "].")); - } - - after = field.type; - - return this; - } else { - boolean shortcut = - struct.methods.containsKey(new Definition.MethodKey("get" + - Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)) || - struct.methods.containsKey(new Definition.MethodKey("is" + - Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)) || - struct.methods.containsKey(new Definition.MethodKey("set" + - Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); - - if (shortcut) { - return new LShortcut(location, value).copy(this).analyze(locals); - } else { - EConstant index = new EConstant(location, value); - index.analyze(locals); - - if (Map.class.isAssignableFrom(before.clazz)) { - return new LMapShortcut(location, index).copy(this).analyze(locals); - } - - if (List.class.isAssignableFrom(before.clazz)) { - return new LListShortcut(location, index).copy(this).analyze(locals); - } - } - } - - throw createError(new IllegalArgumentException("Unknown field [" + value + "] for type [" + struct.name + "].")); - } - - @Override - void write(MethodWriter writer, Globals globals) { - // Do nothing. - } - - @Override - void load(MethodWriter writer, Globals globals) { - writer.writeDebugInfo(location); - - if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.getStatic(field.owner.type, field.javaName, field.type.type); - } else { - writer.getField(field.owner.type, field.javaName, field.type.type); - } - } - - @Override - void store(MethodWriter writer, Globals globals) { - writer.writeDebugInfo(location); - - if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.putStatic(field.owner.type, field.javaName, field.type.type); - } else { - writer.putField(field.owner.type, field.javaName, field.type.type); - } - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java new file mode 100644 index 00000000000..9732ab72886 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; + +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +/** + * Represents an array load/store and defers to a child subnode. + */ +public final class PBrace extends AStoreable { + + private AExpression index; + + private AStoreable sub = null; + + public PBrace(Location location, AExpression prefix, AExpression index) { + super(location, prefix); + + this.index = Objects.requireNonNull(index); + } + + @Override + void extractVariables(Set variables) { + prefix.extractVariables(variables); + index.extractVariables(variables); + } + + @Override + void analyze(Locals locals) { + prefix.analyze(locals); + prefix.expected = prefix.actual; + prefix = prefix.cast(locals); + + Sort sort = prefix.actual.sort; + + if (sort == Sort.ARRAY) { + sub = new PSubBrace(location, prefix.actual, index); + } else if (sort == Sort.DEF) { + sub = new PSubDefArray(location, index); + } else if (Map.class.isAssignableFrom(prefix.actual.clazz)) { + sub = new PSubMapShortcut(location, prefix.actual.struct, index); + } else if (List.class.isAssignableFrom(prefix.actual.clazz)) { + sub = new PSubListShortcut(location, prefix.actual.struct, index); + } else { + throw createError(new IllegalArgumentException("Illegal array access on type [" + prefix.actual.name + "].")); + } + + sub.write = write; + sub.read = read; + sub.expected = expected; + sub.explicit = explicit; + sub.analyze(locals); + actual = sub.actual; + } + + @Override + void write(MethodWriter writer, Globals globals) { + prefix.write(writer, globals); + sub.write(writer, globals); + } + + @Override + boolean isDefOptimized() { + return sub.isDefOptimized(); + } + + @Override + void updateActual(Type actual) { + sub.updateActual(actual); + this.actual = actual; + } + + @Override + int accessElementCount() { + return sub.accessElementCount(); + } + + @Override + void setup(MethodWriter writer, Globals globals) { + prefix.write(writer, globals); + sub.setup(writer, globals); + } + + @Override + void load(MethodWriter writer, Globals globals) { + sub.load(writer, globals); + } + + @Override + void store(MethodWriter writer, Globals globals) { + sub.store(writer, globals); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java new file mode 100644 index 00000000000..e8cfb1eba2e --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -0,0 +1,103 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.MethodKey; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; + +import java.util.List; +import java.util.Objects; +import java.util.Set; + +/** + * Represents a method call and defers to a child subnode. + */ +public final class PCallInvoke extends AExpression { + + private final String name; + private final List arguments; + + private AExpression sub = null; + + public PCallInvoke(Location location, AExpression prefix, String name, List arguments) { + super(location, prefix); + + this.name = Objects.requireNonNull(name); + this.arguments = Objects.requireNonNull(arguments); + } + + @Override + void extractVariables(Set variables) { + prefix.extractVariables(variables); + + for (AExpression argument : arguments) { + argument.extractVariables(variables); + } + } + + @Override + void analyze(Locals locals) { + prefix.analyze(locals); + prefix.expected = prefix.actual; + prefix = prefix.cast(locals); + + if (prefix.actual.sort == Sort.ARRAY) { + throw createError(new IllegalArgumentException("Illegal call [" + name + "] on array type.")); + } + + Struct struct = prefix.actual.struct; + + if (prefix.actual.sort.primitive) { + struct = Definition.getType(prefix.actual.sort.boxed.getSimpleName()).struct; + } + + MethodKey methodKey = new MethodKey(name, arguments.size()); + Method method = prefix instanceof EStatic ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey); + + if (method != null) { + sub = new PSubCallInvoke(location, method, prefix.actual, arguments); + } else if (prefix.actual.sort == Sort.DEF) { + sub = new PSubDefCall(location, name, arguments); + } else { + throw createError(new IllegalArgumentException( + "Unknown call [" + name + "] with [" + arguments.size() + "] arguments on type [" + struct.name + "].")); + } + + sub.expected = expected; + sub.explicit = explicit; + sub.analyze(locals); + actual = sub.actual; + + statement = true; + } + + @Override + void write(MethodWriter writer, Globals globals) { + prefix.write(writer, globals); + sub.write(writer, globals); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java new file mode 100644 index 00000000000..21a3def3189 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Field; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; + +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +/** + * Represents a field load/store and defers to a child subnode. + */ +public final class PField extends AStoreable { + + private final String value; + + private AStoreable sub = null; + + public PField(Location location, AExpression prefix, String value) { + super(location, prefix); + + this.value = Objects.requireNonNull(value); + } + + @Override + void extractVariables(Set variables) { + prefix.extractVariables(variables); + } + + @Override + void analyze(Locals locals) { + prefix.analyze(locals); + prefix.expected = prefix.actual; + prefix = prefix.cast(locals); + + Sort sort = prefix.actual.sort; + + if (sort == Sort.ARRAY) { + sub = new PSubArrayLength(location,prefix.actual.name, value); + } else if (sort == Sort.DEF) { + sub = new PSubDefField(location, value); + } else { + Struct struct = prefix.actual.struct; + Field field = prefix instanceof EStatic ? struct.staticMembers.get(value) : struct.members.get(value); + + if (field != null) { + sub = new PSubField(location, field); + } else { + Method getter = struct.methods.get( + new Definition.MethodKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + + if (getter == null) { + getter = struct.methods.get( + new Definition.MethodKey("is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + } + + Method setter = struct.methods.get( + new Definition.MethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); + + if (getter != null || setter != null) { + sub = new PSubShortcut(location, value, prefix.actual.name, getter, setter); + } else { + EConstant index = new EConstant(location, value); + index.analyze(locals); + + if (Map.class.isAssignableFrom(prefix.actual.clazz)) { + sub = new PSubMapShortcut(location, struct, index); + } + + if (List.class.isAssignableFrom(prefix.actual.clazz)) { + sub = new PSubListShortcut(location, struct, index); + } + } + } + } + + if (sub == null) { + throw createError(new IllegalArgumentException("Unknown field [" + value + "] for type [" + prefix.actual.name + "].")); + } + + sub.write = write; + sub.read = read; + sub.expected = expected; + sub.explicit = explicit; + sub.analyze(locals); + actual = sub.actual; + } + + @Override + void write(MethodWriter writer, Globals globals) { + prefix.write(writer, globals); + sub.write(writer, globals); + } + + @Override + boolean isDefOptimized() { + return sub.isDefOptimized(); + } + + @Override + void updateActual(Type actual) { + sub.updateActual(actual); + this.actual = actual; + } + + @Override + int accessElementCount() { + return sub.accessElementCount(); + } + + @Override + void setup(MethodWriter writer, Globals globals) { + prefix.write(writer, globals); + sub.setup(writer, globals); + } + + @Override + void load(MethodWriter writer, Globals globals) { + sub.load(writer, globals); + } + + @Override + void store(MethodWriter writer, Globals globals) { + sub.store(writer, globals); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java similarity index 57% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java index a2970ca2e53..610bd2df77d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java @@ -20,9 +20,10 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import java.util.Objects; @@ -31,49 +32,69 @@ import java.util.Set; /** * Represents an array length field load. */ -public final class LArrayLength extends ALink { +final class PSubArrayLength extends AStoreable { - final String value; + private final String type; + private final String value; - LArrayLength(Location location, String value) { - super(location, -1); + PSubArrayLength(Location location, String type, String value) { + super(location); + this.type = Objects.requireNonNull(type); this.value = Objects.requireNonNull(value); } - - @Override - void extractVariables(Set variables) {} @Override - ALink analyze(Locals locals) { + void extractVariables(Set variables) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + + @Override + void analyze(Locals locals) { if ("length".equals(value)) { - if (!load) { - throw createError(new IllegalArgumentException("Must read array field [length].")); - } else if (store) { - throw createError(new IllegalArgumentException("Cannot write to read-only array field [length].")); + if (write) { + throw createError(new IllegalArgumentException("Cannot write to read-only field [length] for an array.")); } - after = Definition.INT_TYPE; + actual = Definition.INT_TYPE; } else { - throw createError(new IllegalArgumentException("Illegal field access [" + value + "].")); + throw createError(new IllegalArgumentException("Field [" + value + "] does not exist for type [" + type + "].")); } - - return this; } @Override void write(MethodWriter writer, Globals globals) { - // Do nothing. - } - - @Override - void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); writer.arrayLength(); } + @Override + int accessElementCount() { + throw new IllegalStateException("Illegal tree structure."); + } + + @Override + boolean isDefOptimized() { + throw new IllegalStateException("Illegal tree structure."); + } + + @Override + void updateActual(Type actual) { + throw new IllegalStateException("Illegal tree structure."); + } + + @Override + void setup(MethodWriter writer, Globals globals) { + throw new IllegalStateException("Illegal tree structure."); + } + + @Override + void load(MethodWriter writer, Globals globals) { + throw new IllegalStateException("Illegal tree structure."); + } + @Override void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); + throw new IllegalStateException("Illegal tree structure."); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java similarity index 62% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java index 828e0a1cd8a..45b3ef88cd1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java @@ -20,64 +20,81 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Locals; -import org.objectweb.asm.Type; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.MethodWriter; - /** - * Represents an array load/store or shortcut on a def type. (Internal only.) + * Represents an array load/store. */ -final class LDefArray extends ALink implements IDefLink { +final class PSubBrace extends AStoreable { - AExpression index; + private final Type type; + private AExpression index; - LDefArray(Location location, AExpression index) { - super(location, 2); + PSubBrace(Location location, Type type, AExpression index) { + super(location); + this.type = Objects.requireNonNull(type); this.index = Objects.requireNonNull(index); } - + @Override void extractVariables(Set variables) { index.extractVariables(variables); } @Override - ALink analyze(Locals locals) { + void analyze(Locals locals) { + index.expected = Definition.INT_TYPE; index.analyze(locals); - index.expected = index.actual; index = index.cast(locals); - after = Definition.DEF_TYPE; - - return this; + actual = Definition.getType(type.struct, type.dimensions - 1); } @Override void write(MethodWriter writer, Globals globals) { + if (!write) { + setup(writer, globals); + load(writer, globals); + } + } + + @Override + int accessElementCount() { + return 2; + } + + @Override + boolean isDefOptimized() { + return false; + } + + @Override + void updateActual(Type actual) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + + @Override + void setup(MethodWriter writer, Globals globals) { index.write(writer, globals); } @Override void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - - Type methodType = Type.getMethodType(after.type, Definition.DEF_TYPE.type, index.actual.type); - writer.invokeDefCall("arrayLoad", methodType, DefBootstrap.ARRAY_LOAD); + writer.arrayLoad(actual.type); } @Override void store(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - - Type methodType = Type.getMethodType(Definition.VOID_TYPE.type, Definition.DEF_TYPE.type, index.actual.type, after.type); - writer.invokeDefCall("arrayStore", methodType, DefBootstrap.ARRAY_STORE); + writer.arrayStore(actual.type); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java similarity index 50% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java index 86dbdda24c4..02dd89bb602 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java @@ -19,67 +19,66 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.Definition.Cast; - -import java.util.Set; - -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import java.util.List; +import java.util.Objects; +import java.util.Set; + /** - * Represents a cast made in a variable/method chain. + * Represents a method call. */ -public final class LCast extends ALink { +final class PSubCallInvoke extends AExpression { - final String type; + private final Method method; + private final Type box; + private final List arguments; - Cast cast = null; + public PSubCallInvoke(Location location, Method method, Type box, List arguments) { + super(location); - public LCast(Location location, String type) { - super(location, -1); - - this.type = type; + this.method = Objects.requireNonNull(method); + this.box = box; + this.arguments = Objects.requireNonNull(arguments); } - - @Override - void extractVariables(Set variables) {} @Override - ALink analyze(Locals locals) { - if (before == null) { - throw createError(new IllegalStateException("Illegal cast without a target.")); - } else if (store) { - throw createError(new IllegalArgumentException("Cannot assign a value to a cast.")); + void extractVariables(Set variables) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + + @Override + void analyze(Locals locals) { + for (int argument = 0; argument < arguments.size(); ++argument) { + AExpression expression = arguments.get(argument); + + expression.expected = method.arguments.get(argument); + expression.internal = true; + expression.analyze(locals); + arguments.set(argument, expression.cast(locals)); } - try { - after = Definition.getType(type); - } catch (IllegalArgumentException exception) { - throw createError(new IllegalArgumentException("Not a type [" + type + "].")); - } - - cast = AnalyzerCaster.getLegalCast(location, before, after, true, false); - - return cast != null ? this : null; + statement = true; + actual = method.rtn; } @Override void write(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - writer.writeCast(cast); - } - @Override - void load(MethodWriter writer, Globals globals) { - // Do nothing. - } + if (box.sort.primitive) { + writer.box(box.type); + } - @Override - void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); + for (AExpression argument : arguments) { + argument.write(writer, globals); + } + + method.write(writer); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java new file mode 100644 index 00000000000..2153897a000 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.DefBootstrap; +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; + +import java.util.Objects; +import java.util.Set; + +/** + * Represents an array load/store or shortcut on a def type. (Internal only.) + */ +final class PSubDefArray extends AStoreable { + + private AExpression index; + + PSubDefArray(Location location, AExpression index) { + super(location); + + this.index = Objects.requireNonNull(index); + } + + @Override + void extractVariables(Set variables) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + + @Override + void analyze(Locals locals) { + index.analyze(locals); + index.expected = index.actual; + index = index.cast(locals); + + actual = expected == null || explicit ? Definition.DEF_TYPE : expected; + } + + @Override + void write(MethodWriter writer, Globals globals) { + index.write(writer, globals); + + writer.writeDebugInfo(location); + + org.objectweb.asm.Type methodType = + org.objectweb.asm.Type.getMethodType(actual.type, Definition.DEF_TYPE.type, index.actual.type); + writer.invokeDefCall("arrayLoad", methodType, DefBootstrap.ARRAY_LOAD); + } + + @Override + int accessElementCount() { + return 2; + } + + @Override + boolean isDefOptimized() { + return true; + } + + @Override + void updateActual(Type actual) { + this.actual = actual; + } + + @Override + void setup(MethodWriter writer, Globals globals) { + index.write(writer, globals); + } + + @Override + void load(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + org.objectweb.asm.Type methodType = + org.objectweb.asm.Type.getMethodType(actual.type, Definition.DEF_TYPE.type, index.actual.type); + writer.invokeDefCall("arrayLoad", methodType, DefBootstrap.ARRAY_LOAD); + } + + @Override + void store(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + org.objectweb.asm.Type methodType = + org.objectweb.asm.Type.getMethodType(Definition.VOID_TYPE.type, Definition.DEF_TYPE.type, index.actual.type, actual.type); + writer.invokeDefCall("arrayStore", methodType, DefBootstrap.ARRAY_STORE); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java similarity index 81% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java index dfdaef2369f..7f1d0dd1e95 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Type; @@ -35,31 +35,31 @@ import java.util.Set; /** * Represents a method call made on a def type. (Internal only.) */ -final class LDefCall extends ALink implements IDefLink { +final class PSubDefCall extends AExpression { - final String name; - final List arguments; - StringBuilder recipe; - List pointers = new ArrayList<>(); + private final String name; + private final List arguments; - LDefCall(Location location, String name, List arguments) { - super(location, -1); + private StringBuilder recipe = null; + private List pointers = new ArrayList<>(); + + PSubDefCall(Location location, String name, List arguments) { + super(location); this.name = Objects.requireNonNull(name); this.arguments = Objects.requireNonNull(arguments); } - + @Override void extractVariables(Set variables) { - for (AExpression argument : arguments) { - argument.extractVariables(variables); - } + throw createError(new IllegalStateException("Illegal tree structure.")); } @Override - ALink analyze(Locals locals) { + void analyze(Locals locals) { recipe = new StringBuilder(); int totalCaptures = 0; + for (int argument = 0; argument < arguments.size(); ++argument) { AExpression expression = arguments.get(argument); @@ -79,19 +79,11 @@ final class LDefCall extends ALink implements IDefLink { arguments.set(argument, expression.cast(locals)); } - statement = true; - after = Definition.DEF_TYPE; - - return this; + actual = expected == null || explicit ? Definition.DEF_TYPE : expected; } @Override void write(MethodWriter writer, Globals globals) { - // Do nothing. - } - - @Override - void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); List parameterTypes = new ArrayList<>(); @@ -102,26 +94,24 @@ final class LDefCall extends ALink implements IDefLink { // append each argument for (AExpression argument : arguments) { parameterTypes.add(argument.actual.type); + if (argument instanceof ILambda) { ILambda lambda = (ILambda) argument; + for (Type capture : lambda.getCaptures()) { parameterTypes.add(capture); } } + argument.write(writer, globals); } // create method type from return value and arguments - Type methodType = Type.getMethodType(after.type, parameterTypes.toArray(new Type[0])); + Type methodType = Type.getMethodType(actual.type, parameterTypes.toArray(new Type[0])); List args = new ArrayList<>(); args.add(recipe.toString()); args.addAll(pointers); writer.invokeDefCall(name, methodType, DefBootstrap.METHOD_CALL, args.toArray()); } - - @Override - void store(MethodWriter writer, Globals globals) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java similarity index 58% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java index c51a0a02647..8b098fa99f6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java @@ -19,43 +19,66 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.DefBootstrap; +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; -import org.objectweb.asm.Type; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.MethodWriter; - /** * Represents a field load/store or shortcut on a def type. (Internal only.) */ -final class LDefField extends ALink implements IDefLink { +final class PSubDefField extends AStoreable { - final String value; + private final String value; - LDefField(Location location, String value) { - super(location, 1); + PSubDefField(Location location, String value) { + super(location); this.value = Objects.requireNonNull(value); } @Override - void extractVariables(Set variables) {} + void extractVariables(Set variables) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } @Override - ALink analyze(Locals locals) { - after = Definition.DEF_TYPE; - - return this; + void analyze(Locals locals) { + actual = expected == null || explicit ? Definition.DEF_TYPE : expected; } @Override void write(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + org.objectweb.asm.Type methodType = + org.objectweb.asm.Type.getMethodType(actual.type, Definition.DEF_TYPE.type); + writer.invokeDefCall(value, methodType, DefBootstrap.LOAD); + } + + @Override + int accessElementCount() { + return 1; + } + + @Override + boolean isDefOptimized() { + return true; + } + + @Override + void updateActual(Type actual) { + this.actual = actual; + } + + @Override + void setup(MethodWriter writer, Globals globals) { // Do nothing. } @@ -63,7 +86,8 @@ final class LDefField extends ALink implements IDefLink { void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - Type methodType = Type.getMethodType(after.type, Definition.DEF_TYPE.type); + org.objectweb.asm.Type methodType = + org.objectweb.asm.Type.getMethodType(actual.type, Definition.DEF_TYPE.type); writer.invokeDefCall(value, methodType, DefBootstrap.LOAD); } @@ -71,7 +95,8 @@ final class LDefField extends ALink implements IDefLink { void store(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - Type methodType = Type.getMethodType(Definition.VOID_TYPE.type, Definition.DEF_TYPE.type, after.type); + org.objectweb.asm.Type methodType = + org.objectweb.asm.Type.getMethodType(Definition.VOID_TYPE.type, Definition.DEF_TYPE.type, actual.type); writer.invokeDefCall(value, methodType, DefBootstrap.STORE); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java new file mode 100644 index 00000000000..8b85a260d51 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition.Field; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; + +import java.lang.reflect.Modifier; +import java.util.Objects; +import java.util.Set; + +/** + * Represents a field load/store. + */ +final class PSubField extends AStoreable { + + private final Field field; + + public PSubField(Location location, Field field) { + super(location); + + this.field = Objects.requireNonNull(field); + } + + @Override + void extractVariables(Set variables) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + + @Override + void analyze(Locals locals) { + if (write && Modifier.isFinal(field.modifiers)) { + throw createError(new IllegalArgumentException( + "Cannot write to read-only field [" + field.name + "] for type [" + field.type.name + "].")); + } + + actual = field.type; + } + + @Override + void write(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { + writer.getStatic(field.owner.type, field.javaName, field.type.type); + } else { + writer.getField(field.owner.type, field.javaName, field.type.type); + } + } + + @Override + int accessElementCount() { + return 1; + } + + @Override + boolean isDefOptimized() { + return false; + } + + @Override + void updateActual(Type actual) { + throw new IllegalArgumentException("Illegal tree structure."); + } + + @Override + void setup(MethodWriter writer, Globals globals) { + // Do nothing. + } + + @Override + void load(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { + writer.getStatic(field.owner.type, field.javaName, field.type.type); + } else { + writer.getField(field.owner.type, field.javaName, field.type.type); + } + } + + @Override + void store(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { + writer.putStatic(field.owner.type, field.javaName, field.type.type); + } else { + writer.putField(field.owner.type, field.javaName, field.type.type); + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java similarity index 64% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 6ef8aedb0bf..c13f8235821 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -20,49 +20,53 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.MethodWriter; - /** * Represents a list load/store shortcut. (Internal only.) */ -final class LListShortcut extends ALink { +final class PSubListShortcut extends AStoreable { - AExpression index; - Method getter; - Method setter; + private final Struct struct; + private AExpression index; - LListShortcut(Location location, AExpression index) { - super(location, 2); + private Method getter; + private Method setter; + PSubListShortcut(Location location, Struct struct, AExpression index) { + super(location); + + this.struct = Objects.requireNonNull(struct); this.index = Objects.requireNonNull(index); } - + @Override void extractVariables(Set variables) { - index.extractVariables(variables); + throw createError(new IllegalStateException("Illegal tree structure.")); } @Override - ALink analyze(Locals locals) { - getter = before.struct.methods.get(new Definition.MethodKey("get", 1)); - setter = before.struct.methods.get(new Definition.MethodKey("set", 2)); + void analyze(Locals locals) { + getter = struct.methods.get(new Definition.MethodKey("get", 1)); + setter = struct.methods.get(new Definition.MethodKey("set", 2)); if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || getter.arguments.get(0).sort != Sort.INT)) { - throw createError(new IllegalArgumentException("Illegal list get shortcut for type [" + before.name + "].")); + throw createError(new IllegalArgumentException("Illegal list get shortcut for type [" + struct.name + "].")); } if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) { - throw createError(new IllegalArgumentException("Illegal list set shortcut for type [" + before.name + "].")); + throw createError(new IllegalArgumentException("Illegal list set shortcut for type [" + struct.name + "].")); } if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) @@ -70,22 +74,48 @@ final class LListShortcut extends ALink { throw createError(new IllegalArgumentException("Shortcut argument types must match.")); } - if ((load || store) && (!load || getter != null) && (!store || setter != null)) { + if ((read || write) && (!read || getter != null) && (!write || setter != null)) { index.expected = Definition.INT_TYPE; index.analyze(locals); index = index.cast(locals); - after = setter != null ? setter.arguments.get(1) : getter.rtn; + actual = setter != null ? setter.arguments.get(1) : getter.rtn; } else { - throw createError(new IllegalArgumentException("Illegal list shortcut for type [" + before.name + "].")); + throw createError(new IllegalArgumentException("Illegal list shortcut for type [" + struct.name + "].")); } - - return this; } @Override void write(MethodWriter writer, Globals globals) { index.write(writer, globals); + + writer.writeDebugInfo(location); + + getter.write(writer); + + if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) { + writer.checkCast(getter.rtn.type); + } + } + + @Override + int accessElementCount() { + return 2; + } + + @Override + boolean isDefOptimized() { + return false; + } + + @Override + void updateActual(Type actual) { + throw new IllegalArgumentException("Illegal tree structure."); + } + + @Override + void setup(MethodWriter writer, Globals globals) { + index.write(writer, globals); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java similarity index 64% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index 52d66b0fe75..183ee823752 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -20,6 +20,8 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Definition.Method; @@ -34,34 +36,37 @@ import org.elasticsearch.painless.MethodWriter; /** * Represents a map load/store shortcut. (Internal only.) */ -final class LMapShortcut extends ALink { +final class PSubMapShortcut extends AStoreable { - AExpression index; - Method getter; - Method setter; + private final Struct struct; + private AExpression index; - LMapShortcut(Location location, AExpression index) { - super(location, 2); + private Method getter; + private Method setter; + PSubMapShortcut(Location location, Struct struct, AExpression index) { + super(location); + + this.struct = Objects.requireNonNull(struct); this.index = Objects.requireNonNull(index); } - + @Override void extractVariables(Set variables) { - index.extractVariables(variables); + throw createError(new IllegalStateException("Illegal tree structure.")); } @Override - ALink analyze(Locals locals) { - getter = before.struct.methods.get(new Definition.MethodKey("get", 1)); - setter = before.struct.methods.get(new Definition.MethodKey("put", 2)); + void analyze(Locals locals) { + getter = struct.methods.get(new Definition.MethodKey("get", 1)); + setter = struct.methods.get(new Definition.MethodKey("put", 2)); if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1)) { - throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + before.name + "].")); + throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + struct.name + "].")); } if (setter != null && setter.arguments.size() != 2) { - throw createError(new IllegalArgumentException("Illegal map set shortcut for type [" + before.name + "].")); + throw createError(new IllegalArgumentException("Illegal map set shortcut for type [" + struct.name + "].")); } if (getter != null && setter != null && @@ -69,22 +74,48 @@ final class LMapShortcut extends ALink { throw createError(new IllegalArgumentException("Shortcut argument types must match.")); } - if ((load || store) && (!load || getter != null) && (!store || setter != null)) { + if ((read || write) && (!read || getter != null) && (!write || setter != null)) { index.expected = setter != null ? setter.arguments.get(0) : getter.arguments.get(0); index.analyze(locals); index = index.cast(locals); - after = setter != null ? setter.arguments.get(1) : getter.rtn; + actual = setter != null ? setter.arguments.get(1) : getter.rtn; } else { - throw createError(new IllegalArgumentException("Illegal map shortcut for type [" + before.name + "].")); + throw createError(new IllegalArgumentException("Illegal map shortcut for type [" + struct.name + "].")); } - - return this; } @Override void write(MethodWriter writer, Globals globals) { index.write(writer, globals); + + writer.writeDebugInfo(location); + + getter.write(writer); + + if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) { + writer.checkCast(getter.rtn.type); + } + } + + @Override + int accessElementCount() { + return 2; + } + + @Override + boolean isDefOptimized() { + return false; + } + + @Override + void updateActual(Type actual) { + throw new IllegalArgumentException("Illegal tree structure."); + } + + @Override + void setup(MethodWriter writer, Globals globals) { + index.write(writer, globals); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java similarity index 63% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java index 6d669adc658..34720a275db 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java @@ -19,75 +19,91 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Struct; - -import java.util.Objects; -import java.util.Set; - +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import java.util.Set; + /** * Represents a field load/store shortcut. (Internal only.) */ -final class LShortcut extends ALink { +final class PSubShortcut extends AStoreable { - final String value; + private final String value; + private final String type; + private final Method getter; + private final Method setter; - Method getter = null; - Method setter = null; + PSubShortcut(Location location, String value, String type, Method getter, Method setter) { + super(location); - LShortcut(Location location, String value) { - super(location, 1); - - this.value = Objects.requireNonNull(value); + this.value = value; + this.type = type; + this.getter = getter; + this.setter = setter; } - - @Override - void extractVariables(Set variables) {} @Override - ALink analyze(Locals locals) { - Struct struct = before.struct; - - getter = struct.methods.get(new Definition.MethodKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); - - if (getter == null) { - getter = struct.methods.get(new Definition.MethodKey("is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); - } - - setter = struct.methods.get(new Definition.MethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); + void extractVariables(Set variables) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + @Override + void analyze(Locals locals) { if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) { throw createError(new IllegalArgumentException( - "Illegal get shortcut on field [" + value + "] for type [" + struct.name + "].")); + "Illegal get shortcut on field [" + value + "] for type [" + type + "].")); } if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 1)) { throw createError(new IllegalArgumentException( - "Illegal set shortcut on field [" + value + "] for type [" + struct.name + "].")); + "Illegal set shortcut on field [" + value + "] for type [" + type + "].")); } if (getter != null && setter != null && setter.arguments.get(0) != getter.rtn) { throw createError(new IllegalArgumentException("Shortcut argument types must match.")); } - if ((getter != null || setter != null) && (!load || getter != null) && (!store || setter != null)) { - after = setter != null ? setter.arguments.get(0) : getter.rtn; + if ((getter != null || setter != null) && (!read || getter != null) && (!write || setter != null)) { + actual = setter != null ? setter.arguments.get(0) : getter.rtn; } else { - throw createError(new IllegalArgumentException("Illegal shortcut on field [" + value + "] for type [" + struct.name + "].")); + throw createError(new IllegalArgumentException("Illegal shortcut on field [" + value + "] for type [" + type + "].")); } - - return this; } @Override void write(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + getter.write(writer); + + if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) { + writer.checkCast(getter.rtn.type); + } + } + + @Override + int accessElementCount() { + return 1; + } + + @Override + boolean isDefOptimized() { + return false; + } + + @Override + void updateActual(Type actual) { + throw new IllegalArgumentException("Illegal tree structure."); + } + + @Override + void setup(MethodWriter writer, Globals globals) { // Do nothing. } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java index e688f0acec0..d81437242a8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java @@ -33,14 +33,14 @@ import java.util.Set; */ public final class SBlock extends AStatement { - final List statements; + private final List statements; public SBlock(Location location, List statements) { super(location); this.statements = Collections.unmodifiableList(statements); } - + @Override void extractVariables(Set variables) { for (AStatement statement : statements) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java index 62560d1bd4e..71d737fcd66 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java @@ -34,9 +34,11 @@ public final class SBreak extends AStatement { public SBreak(Location location) { super(location); } - + @Override - void extractVariables(Set variables) {} + void extractVariables(Set variables) { + // Do nothing. + } @Override void analyze(Locals locals) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java index 783af3d0022..d73aead49c8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java @@ -20,33 +20,32 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.MethodWriter; - /** * Represents a catch block as part of a try-catch block. */ public final class SCatch extends AStatement { - final String type; - final String name; - final SBlock block; + private final String type; + private final String name; + private final SBlock block; - Variable variable; + private Variable variable = null; - Label begin; - Label end; - Label exception; + Label begin = null; + Label end = null; + Label exception = null; public SCatch(Location location, String type, String name, SBlock block) { super(location); @@ -55,10 +54,11 @@ public final class SCatch extends AStatement { this.name = Objects.requireNonNull(name); this.block = block; } - + @Override void extractVariables(Set variables) { variables.add(name); + if (block != null) { block.extractVariables(variables); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java index 98ac804cc3a..5504ae8b268 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java @@ -34,9 +34,11 @@ public final class SContinue extends AStatement { public SContinue(Location location) { super(location); } - + @Override - void extractVariables(Set variables) {} + void extractVariables(Set variables) { + // Do nothing. + } @Override void analyze(Locals locals) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java index d0dae5a68ac..4bade4625ee 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java @@ -33,14 +33,14 @@ import java.util.Set; */ public final class SDeclBlock extends AStatement { - final List declarations; + private final List declarations; public SDeclBlock(Location location, List declarations) { super(location); this.declarations = Collections.unmodifiableList(declarations); } - + @Override void extractVariables(Set variables) { for (SDeclaration declaration : declarations) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index de856722a0a..c0911232d21 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -20,28 +20,27 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Opcodes; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.MethodWriter; - /** * Represents a single variable declaration. */ public final class SDeclaration extends AStatement { - final String type; - final String name; - AExpression expression; + private final String type; + private final String name; + private AExpression expression; - Variable variable; + private Variable variable = null; public SDeclaration(Location location, String type, String name, AExpression expression) { super(location); @@ -50,10 +49,11 @@ public final class SDeclaration extends AStatement { this.name = Objects.requireNonNull(name); this.expression = expression; } - + @Override void extractVariables(Set variables) { variables.add(name); + if (expression != null) { expression.extractVariables(variables); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java index 62572e54610..d63910683be 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java @@ -21,22 +21,24 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.MethodWriter; - /** * Represents a do-while loop. */ public final class SDo extends AStatement { - final SBlock block; - AExpression condition; + private final SBlock block; + private AExpression condition; + + private boolean continuous = false; public SDo(Location location, SBlock block, AExpression condition) { super(location); @@ -44,10 +46,11 @@ public final class SDo extends AStatement { this.condition = Objects.requireNonNull(condition); this.block = block; } - + @Override void extractVariables(Set variables) { condition.extractVariables(variables); + if (block != null) { block.extractVariables(variables); } @@ -75,7 +78,7 @@ public final class SDo extends AStatement { condition = condition.cast(locals); if (condition.constant != null) { - final boolean continuous = (boolean)condition.constant; + continuous = (boolean)condition.constant; if (!continuous) { throw createError(new IllegalArgumentException("Extraneous do while loop.")); @@ -110,8 +113,10 @@ public final class SDo extends AStatement { writer.mark(begin); - condition.fals = end; - condition.write(writer, globals); + if (!continuous) { + condition.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, end); + } if (loopCounter != null) { writer.writeLoopCounter(loopCounter.getSlot(), Math.max(1, block.statementCount), location); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index 772c4af4c48..6d0d0ee1bcc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -19,51 +19,29 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; -import org.objectweb.asm.Label; -import org.objectweb.asm.Opcodes; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.ITERATOR_HASNEXT; -import static org.elasticsearch.painless.WriterConstants.ITERATOR_NEXT; -import static org.elasticsearch.painless.WriterConstants.ITERATOR_TYPE; - /** - * Represents a for-each loop shortcut for iterables. Defers to other S-nodes for non-iterable types. + * Represents a for-each loop and defers to subnodes depending on type. */ public class SEach extends AStatement { - final String type; - final String name; - AExpression expression; - final SBlock block; + private final String type; + private final String name; + private AExpression expression; + private final SBlock block; - // Members for all cases. - Variable variable = null; - Cast cast = null; - - // Members for the array case. - Variable array = null; - Variable index = null; - Type indexed = null; - - // Members for the iterable case. - Variable iterator = null; - Method method = null; + private AStatement sub = null; public SEach(Location location, String type, String name, AExpression expression, SBlock block) { super(location); @@ -73,11 +51,13 @@ public class SEach extends AStatement { this.expression = Objects.requireNonNull(expression); this.block = block; } - + @Override void extractVariables(Set variables) { variables.add(name); + expression.extractVariables(variables); + if (block != null) { block.extractVariables(variables); } @@ -98,17 +78,18 @@ public class SEach extends AStatement { } locals = Locals.newLocalScope(locals); - - variable = locals.addVariable(location, type, name, true); + Variable variable = locals.addVariable(location, type, name, true); if (expression.actual.sort == Sort.ARRAY) { - analyzeArray(locals, type); + sub = new SSubEachArray(location, variable, expression, block); } else if (expression.actual.sort == Sort.DEF || Iterable.class.isAssignableFrom(expression.actual.clazz)) { - analyzeIterable(locals, type); + sub = new SSubEachIterable(location, variable, expression, block); } else { throw createError(new IllegalArgumentException("Illegal for each type [" + expression.actual.name + "].")); } + sub.analyze(locals); + if (block == null) { throw createError(new IllegalArgumentException("Extraneous for each loop.")); } @@ -125,110 +106,12 @@ public class SEach extends AStatement { statementCount = 1; if (locals.hasVariable(Locals.LOOP)) { - loopCounter = locals.getVariable(location, Locals.LOOP); + sub.loopCounter = locals.getVariable(location, Locals.LOOP); } } - void analyzeArray(Locals variables, Type type) { - // We must store the array and index as variables for securing slots on the stack, and - // also add the location offset to make the names unique in case of nested for each loops. - array = variables.addVariable(location, expression.actual, "#array" + location.getOffset(), true); - index = variables.addVariable(location, Definition.INT_TYPE, "#index" + location.getOffset(), true); - indexed = Definition.getType(expression.actual.struct, expression.actual.dimensions - 1); - cast = AnalyzerCaster.getLegalCast(location, indexed, type, true, true); - } - - void analyzeIterable(Locals variables, Type type) { - // We must store the iterator as a variable for securing a slot on the stack, and - // also add the location offset to make the name unique in case of nested for each loops. - iterator = variables.addVariable(location, Definition.getType("Iterator"), "#itr" + location.getOffset(), true); - - if (expression.actual.sort == Sort.DEF) { - method = null; - } else { - method = expression.actual.struct.methods.get(new MethodKey("iterator", 0)); - - if (method == null) { - throw createError(new IllegalArgumentException( - "Unable to create iterator for the type [" + expression.actual.name + "].")); - } - } - - cast = AnalyzerCaster.getLegalCast(location, Definition.DEF_TYPE, type, true, true); - } - @Override void write(MethodWriter writer, Globals globals) { - writer.writeStatementOffset(location); - - if (array != null) { - writeArray(writer, globals); - } else if (iterator != null) { - writeIterable(writer, globals); - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - - void writeArray(MethodWriter writer, Globals globals) { - expression.write(writer, globals); - writer.visitVarInsn(array.type.type.getOpcode(Opcodes.ISTORE), array.getSlot()); - writer.push(-1); - writer.visitVarInsn(index.type.type.getOpcode(Opcodes.ISTORE), index.getSlot()); - - Label begin = new Label(); - Label end = new Label(); - - writer.mark(begin); - - writer.visitIincInsn(index.getSlot(), 1); - writer.visitVarInsn(index.type.type.getOpcode(Opcodes.ILOAD), index.getSlot()); - writer.visitVarInsn(array.type.type.getOpcode(Opcodes.ILOAD), array.getSlot()); - writer.arrayLength(); - writer.ifICmp(MethodWriter.GE, end); - - writer.visitVarInsn(array.type.type.getOpcode(Opcodes.ILOAD), array.getSlot()); - writer.visitVarInsn(index.type.type.getOpcode(Opcodes.ILOAD), index.getSlot()); - writer.arrayLoad(indexed.type); - writer.writeCast(cast); - writer.visitVarInsn(variable.type.type.getOpcode(Opcodes.ISTORE), variable.getSlot()); - - block.write(writer, globals); - - writer.goTo(begin); - writer.mark(end); - } - - void writeIterable(MethodWriter writer, Globals globals) { - expression.write(writer, globals); - - if (method == null) { - Type itr = Definition.getType("Iterator"); - org.objectweb.asm.Type methodType = org.objectweb.asm.Type.getMethodType(itr.type, Definition.DEF_TYPE.type); - writer.invokeDefCall("iterator", methodType, DefBootstrap.ITERATOR); - } else { - method.write(writer); - } - - writer.visitVarInsn(iterator.type.type.getOpcode(Opcodes.ISTORE), iterator.getSlot()); - - Label begin = new Label(); - Label end = new Label(); - - writer.mark(begin); - - writer.visitVarInsn(iterator.type.type.getOpcode(Opcodes.ILOAD), iterator.getSlot()); - writer.invokeInterface(ITERATOR_TYPE, ITERATOR_HASNEXT); - writer.ifZCmp(MethodWriter.EQ, end); - - writer.visitVarInsn(iterator.type.type.getOpcode(Opcodes.ILOAD), iterator.getSlot()); - writer.invokeInterface(ITERATOR_TYPE, ITERATOR_NEXT); - writer.writeCast(cast); - writer.visitVarInsn(variable.type.type.getOpcode(Opcodes.ISTORE), variable.getSlot()); - - block.write(writer, globals); - - writer.goTo(begin); - writer.mark(end); + sub.write(writer, globals); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java index 94eb180ea03..4493de09533 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java @@ -19,30 +19,29 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.MethodWriter; - /** * Represents the top-level node for an expression as a statement. */ public final class SExpression extends AStatement { - AExpression expression; + private AExpression expression; public SExpression(Location location, AExpression expression) { super(location); this.expression = Objects.requireNonNull(expression); } - + @Override void extractVariables(Set variables) { expression.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java index c324682040b..9d462b10330 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java @@ -21,23 +21,25 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; import java.util.Set; -import org.elasticsearch.painless.MethodWriter; - /** * Represents a for loop. */ public final class SFor extends AStatement { - ANode initializer; - AExpression condition; - AExpression afterthought; - final SBlock block; + private ANode initializer; + private AExpression condition; + private AExpression afterthought; + private final SBlock block; + + private boolean continuous = false; public SFor(Location location, ANode initializer, AExpression condition, AExpression afterthought, SBlock block) { super(location); @@ -47,18 +49,21 @@ public final class SFor extends AStatement { this.afterthought = afterthought; this.block = block; } - + @Override void extractVariables(Set variables) { if (initializer != null) { initializer.extractVariables(variables); } + if (condition != null) { condition.extractVariables(variables); } + if (afterthought != null) { afterthought.extractVariables(variables); } + if (block != null) { block.extractVariables(variables); } @@ -68,11 +73,9 @@ public final class SFor extends AStatement { void analyze(Locals locals) { locals = Locals.newLocalScope(locals); - boolean continuous = false; - if (initializer != null) { if (initializer instanceof AStatement) { - ((AStatement)initializer).analyze(locals); + initializer.analyze(locals); } else if (initializer instanceof AExpression) { AExpression initializer = (AExpression)this.initializer; @@ -150,7 +153,7 @@ public final class SFor extends AStatement { Label end = new Label(); if (initializer instanceof SDeclBlock) { - ((SDeclBlock)initializer).write(writer, globals); + initializer.write(writer, globals); } else if (initializer instanceof AExpression) { AExpression initializer = (AExpression)this.initializer; @@ -160,9 +163,9 @@ public final class SFor extends AStatement { writer.mark(start); - if (condition != null) { - condition.fals = end; + if (condition != null && !continuous) { condition.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, end); } boolean allEscape = false; @@ -179,6 +182,7 @@ public final class SFor extends AStatement { if (loopCounter != null) { writer.writeLoopCounter(loopCounter.getSlot(), statementCount, location); } + block.write(writer, globals); } else { if (loopCounter != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 3eee2a7b2d8..44afe828ef2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -23,16 +23,17 @@ import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Constant; import org.elasticsearch.painless.Def; import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Parameter; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; +import org.elasticsearch.painless.node.SSource.Reserved; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Handle; import org.objectweb.asm.Opcodes; @@ -50,24 +51,49 @@ import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; /** * Represents a user-defined function. */ -public class SFunction extends AStatement { +public final class SFunction extends AStatement { + public static final class FunctionReserved implements Reserved { + public static final String THIS = "#this"; + public static final String LOOP = "#loop"; + + private int maxLoopCounter = 0; + + public void markReserved(String name) { + // Do nothing. + } + + public boolean isReserved(String name) { + return name.equals(THIS) || name.equals(LOOP); + } + + @Override + public void setMaxLoopCounter(int max) { + maxLoopCounter = max; + } + + @Override + public int getMaxLoopCounter() { + return maxLoopCounter; + } + } + final FunctionReserved reserved; - final String rtnTypeStr; + private final String rtnTypeStr; public final String name; - final List paramTypeStrs; - final List paramNameStrs; - final List statements; + private final List paramTypeStrs; + private final List paramNameStrs; + private final List statements; public final boolean synthetic; Type rtnType = null; List parameters = new ArrayList<>(); Method method = null; - Variable loop = null; + private Variable loop = null; - public SFunction(FunctionReserved reserved, Location location, - String rtnType, String name, List paramTypes, - List paramNames, List statements, boolean synthetic) { + public SFunction(FunctionReserved reserved, Location location, String rtnType, String name, + List paramTypes, List paramNames, List statements, + boolean synthetic) { super(location); this.reserved = Objects.requireNonNull(reserved); @@ -78,14 +104,14 @@ public class SFunction extends AStatement { this.statements = Collections.unmodifiableList(statements); this.synthetic = synthetic; } - + @Override void extractVariables(Set variables) { // we should never be extracting from a function, as functions are top-level! throw new IllegalStateException("Illegal tree structure"); } - void generate() { + void generateSignature() { try { rtnType = Definition.getType(rtnTypeStr); } catch (IllegalArgumentException exception) { @@ -150,7 +176,7 @@ public class SFunction extends AStatement { loop = locals.getVariable(null, FunctionReserved.LOOP); } } - + /** Writes the function to given ClassVisitor. */ void write (ClassVisitor writer, CompilerSettings settings, Globals globals) { int access = Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC; @@ -185,7 +211,7 @@ public class SFunction extends AStatement { } String staticHandleFieldName = Def.getUserFunctionHandleFieldName(name, parameters.size()); - globals.addConstantInitializer(new Constant(location, WriterConstants.METHOD_HANDLE_TYPE, + globals.addConstantInitializer(new Constant(location, WriterConstants.METHOD_HANDLE_TYPE, staticHandleFieldName, this::initializeConstant)); } @@ -197,43 +223,4 @@ public class SFunction extends AStatement { false); writer.push(handle); } - - /** - * Tracks reserved variables. Must be given to any source of input - * prior to beginning the analysis phase so that reserved variables - * are known ahead of time to assign appropriate slots without - * being wasteful. - */ - public interface Reserved { - void markReserved(String name); - boolean isReserved(String name); - - void setMaxLoopCounter(int max); - int getMaxLoopCounter(); - } - - public static final class FunctionReserved implements Reserved { - public static final String THIS = "#this"; - public static final String LOOP = "#loop"; - - private int maxLoopCounter = 0; - - public void markReserved(String name) { - // Do nothing. - } - - public boolean isReserved(String name) { - return name.equals(THIS) || name.equals(LOOP); - } - - @Override - public void setMaxLoopCounter(int max) { - maxLoopCounter = max; - } - - @Override - public int getMaxLoopCounter() { - return maxLoopCounter; - } - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java index eb862b1177a..a266df464ea 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java @@ -21,15 +21,15 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.MethodWriter; - /** * Represents an if block. */ @@ -44,10 +44,11 @@ public final class SIf extends AStatement { this.condition = Objects.requireNonNull(condition); this.ifblock = ifblock; } - + @Override void extractVariables(Set variables) { condition.extractVariables(variables); + if (ifblock != null) { ifblock.extractVariables(variables); } @@ -84,8 +85,8 @@ public final class SIf extends AStatement { Label fals = new Label(); - condition.fals = fals; condition.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, fals); ifblock.continu = continu; ifblock.brake = brake; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java index 8296ddcada0..d36fb6cd370 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java @@ -21,23 +21,23 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.MethodWriter; - /** * Represents an if/else block. */ public final class SIfElse extends AStatement { - AExpression condition; - final SBlock ifblock; - final SBlock elseblock; + private AExpression condition; + private final SBlock ifblock; + private final SBlock elseblock; public SIfElse(Location location, AExpression condition, SBlock ifblock, SBlock elseblock) { super(location); @@ -46,13 +46,15 @@ public final class SIfElse extends AStatement { this.ifblock = ifblock; this.elseblock = elseblock; } - + @Override void extractVariables(Set variables) { condition.extractVariables(variables); + if (ifblock != null) { ifblock.extractVariables(variables); } + if (elseblock != null) { elseblock.extractVariables(variables); } @@ -104,11 +106,11 @@ public final class SIfElse extends AStatement { void write(MethodWriter writer, Globals globals) { writer.writeStatementOffset(location); + Label fals = new Label(); Label end = new Label(); - Label fals = elseblock != null ? new Label() : end; - condition.fals = fals; condition.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, fals); ifblock.continu = continu; ifblock.brake = brake; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java index 36e629ca6d8..e6986f5bdf6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java @@ -19,9 +19,9 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import java.util.Objects; @@ -32,14 +32,14 @@ import java.util.Set; */ public final class SReturn extends AStatement { - AExpression expression; + private AExpression expression; public SReturn(Location location, AExpression expression) { super(location); this.expression = Objects.requireNonNull(expression); } - + @Override void extractVariables(Set variables) { expression.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index e55ad91d492..a4cf1cc8eee 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -27,11 +27,10 @@ import org.elasticsearch.painless.Executable; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; -import org.elasticsearch.painless.node.SFunction.Reserved; -import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.SimpleChecksAdapter; +import org.elasticsearch.painless.WriterConstants; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Opcodes; @@ -60,20 +59,72 @@ import static org.elasticsearch.painless.WriterConstants.MAP_TYPE; */ public final class SSource extends AStatement { - final String name; - final String source; - final Printer debugStream; - final CompilerSettings settings; - final MainMethodReserved reserved; - final List functions; - final Globals globals; - final List statements; + /** + * Tracks reserved variables. Must be given to any source of input + * prior to beginning the analysis phase so that reserved variables + * are known ahead of time to assign appropriate slots without + * being wasteful. + */ + public interface Reserved { + void markReserved(String name); + boolean isReserved(String name); + + void setMaxLoopCounter(int max); + int getMaxLoopCounter(); + } + + public static final class MainMethodReserved implements Reserved { + private boolean score = false; + private boolean ctx = false; + private int maxLoopCounter = 0; + + @Override + public void markReserved(String name) { + if (Locals.SCORE.equals(name)) { + score = true; + } else if (Locals.CTX.equals(name)) { + ctx = true; + } + } + + @Override + public boolean isReserved(String name) { + return Locals.KEYWORDS.contains(name); + } + + public boolean usesScore() { + return score; + } + + public boolean usesCtx() { + return ctx; + } + + @Override + public void setMaxLoopCounter(int max) { + maxLoopCounter = max; + } + + @Override + public int getMaxLoopCounter() { + return maxLoopCounter; + } + } + + private final CompilerSettings settings; + private final String name; + private final String source; + private final Printer debugStream; + private final MainMethodReserved reserved; + private final List functions; + private final Globals globals; + private final List statements; private Locals mainMethod; private byte[] bytes; - public SSource(CompilerSettings settings, String name, String source, Printer debugStream, - MainMethodReserved reserved, Location location, + public SSource(CompilerSettings settings, String name, String source, Printer debugStream, + MainMethodReserved reserved, Location location, List functions, Globals globals, List statements) { super(location); this.settings = Objects.requireNonNull(settings); @@ -88,18 +139,18 @@ public final class SSource extends AStatement { this.statements = Collections.unmodifiableList(statements); this.globals = globals; } - + @Override void extractVariables(Set variables) { // we should never be extracting from a function, as functions are top-level! - throw new IllegalStateException("Illegal tree structure"); + throw new IllegalStateException("Illegal tree structure."); } public void analyze() { Map methods = new HashMap<>(); for (SFunction function : functions) { - function.generate(); + function.generateSignature(); MethodKey key = new MethodKey(function.name, function.parameters.size()); @@ -114,7 +165,7 @@ public final class SSource extends AStatement { @Override void analyze(Locals program) { for (SFunction function : functions) { - Locals functionLocals = Locals.newFunctionScope(program, function.rtnType, function.parameters, + Locals functionLocals = Locals.newFunctionScope(program, function.rtnType, function.parameters, function.reserved.getMaxLoopCounter()); function.analyze(functionLocals); } @@ -154,7 +205,7 @@ public final class SSource extends AStatement { ClassWriter writer = new ClassWriter(classFrames); ClassVisitor visitor = writer; - + // if picky is enabled, turn on some checks. instead of VerifyError at the end, you get a helpful stacktrace. if (settings.isPicky()) { visitor = new SimpleChecksAdapter(visitor); @@ -180,12 +231,12 @@ public final class SSource extends AStatement { execute.visitCode(); write(execute, globals); execute.endMethod(); - + // Write all functions: for (SFunction function : functions) { function.write(visitor, settings, globals); } - + // Write all synthetic functions. Note that this process may add more :) while (!globals.getSyntheticMethods().isEmpty()) { List current = new ArrayList<>(globals.getSyntheticMethods().values()); @@ -210,7 +261,7 @@ public final class SSource extends AStatement { } // Initialize the constants in a static initializer - final MethodWriter clinit = new MethodWriter(Opcodes.ACC_STATIC, + final MethodWriter clinit = new MethodWriter(Opcodes.ACC_STATIC, WriterConstants.CLINIT, visitor, globals.getStatements(), settings); clinit.visitCode(); for (Constant constant : inits) { @@ -220,13 +271,13 @@ public final class SSource extends AStatement { clinit.returnValue(); clinit.endMethod(); } - + // End writing the class and store the generated bytes. visitor.visitEnd(); bytes = writer.toByteArray(); } - + @Override void write(MethodWriter writer, Globals globals) { if (reserved.usesScore()) { @@ -281,43 +332,4 @@ public final class SSource extends AStatement { public byte[] getBytes() { return bytes; } - - - public static final class MainMethodReserved implements Reserved { - private boolean score = false; - private boolean ctx = false; - private int maxLoopCounter = 0; - - @Override - public void markReserved(String name) { - if (Locals.SCORE.equals(name)) { - score = true; - } else if (Locals.CTX.equals(name)) { - ctx = true; - } - } - - @Override - public boolean isReserved(String name) { - return Locals.KEYWORDS.contains(name); - } - - public boolean usesScore() { - return score; - } - - public boolean usesCtx() { - return ctx; - } - - @Override - public void setMaxLoopCounter(int max) { - maxLoopCounter = max; - } - - @Override - public int getMaxLoopCounter() { - return maxLoopCounter; - } - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java new file mode 100644 index 00000000000..d425d59f21e --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Locals.Variable; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; + +import java.util.Objects; +import java.util.Set; + +/** + * Represents a for-each loop for arrays. + */ +final class SSubEachArray extends AStatement { + private final Variable variable; + private AExpression expression; + private final SBlock block; + + private Cast cast = null; + private Variable array = null; + private Variable index = null; + private Type indexed = null; + + public SSubEachArray(Location location, Variable variable, AExpression expression, SBlock block) { + super(location); + + this.variable = Objects.requireNonNull(variable); + this.expression = Objects.requireNonNull(expression); + this.block = block; + } + + @Override + void extractVariables(Set variables) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + + @Override + void analyze(Locals locals) { + // We must store the array and index as variables for securing slots on the stack, and + // also add the location offset to make the names unique in case of nested for each loops. + array = locals.addVariable(location, expression.actual, "#array" + location.getOffset(), true); + index = locals.addVariable(location, Definition.INT_TYPE, "#index" + location.getOffset(), true); + indexed = Definition.getType(expression.actual.struct, expression.actual.dimensions - 1); + cast = AnalyzerCaster.getLegalCast(location, indexed, variable.type, true, true); + } + + @Override + void write(MethodWriter writer, Globals globals) { + writer.writeStatementOffset(location); + + expression.write(writer, globals); + writer.visitVarInsn(array.type.type.getOpcode(Opcodes.ISTORE), array.getSlot()); + writer.push(-1); + writer.visitVarInsn(index.type.type.getOpcode(Opcodes.ISTORE), index.getSlot()); + + Label begin = new Label(); + Label end = new Label(); + + writer.mark(begin); + + writer.visitIincInsn(index.getSlot(), 1); + writer.visitVarInsn(index.type.type.getOpcode(Opcodes.ILOAD), index.getSlot()); + writer.visitVarInsn(array.type.type.getOpcode(Opcodes.ILOAD), array.getSlot()); + writer.arrayLength(); + writer.ifICmp(MethodWriter.GE, end); + + writer.visitVarInsn(array.type.type.getOpcode(Opcodes.ILOAD), array.getSlot()); + writer.visitVarInsn(index.type.type.getOpcode(Opcodes.ILOAD), index.getSlot()); + writer.arrayLoad(indexed.type); + writer.writeCast(cast); + writer.visitVarInsn(variable.type.type.getOpcode(Opcodes.ISTORE), variable.getSlot()); + + if (loopCounter != null) { + writer.writeLoopCounter(loopCounter.getSlot(), statementCount, location); + } + + block.write(writer, globals); + + writer.goTo(begin); + writer.mark(end); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java new file mode 100644 index 00000000000..ce4507ed983 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.DefBootstrap; +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.MethodKey; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Locals.Variable; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; + +import java.util.Objects; +import java.util.Set; + +import static org.elasticsearch.painless.WriterConstants.ITERATOR_HASNEXT; +import static org.elasticsearch.painless.WriterConstants.ITERATOR_NEXT; +import static org.elasticsearch.painless.WriterConstants.ITERATOR_TYPE; + +/** + * Represents a for-each loop for iterables. + */ +final class SSubEachIterable extends AStatement { + + private AExpression expression; + private final SBlock block; + private final Variable variable; + + private Cast cast = null; + private Variable iterator = null; + private Method method = null; + + public SSubEachIterable(Location location, Variable variable, AExpression expression, SBlock block) { + super(location); + + this.variable = Objects.requireNonNull(variable); + this.expression = Objects.requireNonNull(expression); + this.block = block; + } + + @Override + void extractVariables(Set variables) { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + + @Override + void analyze(Locals locals) { + // We must store the iterator as a variable for securing a slot on the stack, and + // also add the location offset to make the name unique in case of nested for each loops. + iterator = locals.addVariable(location, Definition.getType("Iterator"), "#itr" + location.getOffset(), true); + + if (expression.actual.sort == Sort.DEF) { + method = null; + } else { + method = expression.actual.struct.methods.get(new MethodKey("iterator", 0)); + + if (method == null) { + throw createError(new IllegalArgumentException( + "Unable to create iterator for the type [" + expression.actual.name + "].")); + } + } + + cast = AnalyzerCaster.getLegalCast(location, Definition.DEF_TYPE, variable.type, true, true); + } + + @Override + void write(MethodWriter writer, Globals globals) { + writer.writeStatementOffset(location); + + expression.write(writer, globals); + + if (method == null) { + Type itr = Definition.getType("Iterator"); + org.objectweb.asm.Type methodType = org.objectweb.asm.Type.getMethodType(itr.type, Definition.DEF_TYPE.type); + writer.invokeDefCall("iterator", methodType, DefBootstrap.ITERATOR); + } else { + method.write(writer); + } + + writer.visitVarInsn(iterator.type.type.getOpcode(Opcodes.ISTORE), iterator.getSlot()); + + Label begin = new Label(); + Label end = new Label(); + + writer.mark(begin); + + writer.visitVarInsn(iterator.type.type.getOpcode(Opcodes.ILOAD), iterator.getSlot()); + writer.invokeInterface(ITERATOR_TYPE, ITERATOR_HASNEXT); + writer.ifZCmp(MethodWriter.EQ, end); + + writer.visitVarInsn(iterator.type.type.getOpcode(Opcodes.ILOAD), iterator.getSlot()); + writer.invokeInterface(ITERATOR_TYPE, ITERATOR_NEXT); + writer.writeCast(cast); + writer.visitVarInsn(variable.type.type.getOpcode(Opcodes.ISTORE), variable.getSlot()); + + if (loopCounter != null) { + writer.writeLoopCounter(loopCounter.getSlot(), statementCount, location); + } + + block.write(writer, globals); + + writer.goTo(begin); + writer.mark(end); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java index c78e0503f30..b3442b2f2ee 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java @@ -21,8 +21,8 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import java.util.Objects; @@ -33,14 +33,14 @@ import java.util.Set; */ public final class SThrow extends AStatement { - AExpression expression; + private AExpression expression; public SThrow(Location location, AExpression expression) { super(location); this.expression = Objects.requireNonNull(expression); } - + @Override void extractVariables(Set variables) { expression.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java index 1986b5eeb66..47dc955a914 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java @@ -21,9 +21,9 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; -import org.objectweb.asm.Label; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Label; import java.util.Collections; import java.util.List; @@ -34,8 +34,8 @@ import java.util.Set; */ public final class STry extends AStatement { - final SBlock block; - final List catches; + private final SBlock block; + private final List catches; public STry(Location location, SBlock block, List catches) { super(location); @@ -43,7 +43,7 @@ public final class STry extends AStatement { this.block = block; this.catches = Collections.unmodifiableList(catches); } - + @Override void extractVariables(Set variables) { if (block != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java index bd6740dd9b8..514ba801bbf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java @@ -21,22 +21,24 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.MethodWriter; - /** * Represents a while loop. */ public final class SWhile extends AStatement { - AExpression condition; - final SBlock block; + private AExpression condition; + private final SBlock block; + + private boolean continuous = false; public SWhile(Location location, AExpression condition, SBlock block) { super(location); @@ -44,7 +46,7 @@ public final class SWhile extends AStatement { this.condition = Objects.requireNonNull(condition); this.block = block; } - + @Override void extractVariables(Set variables) { condition.extractVariables(variables); @@ -61,8 +63,6 @@ public final class SWhile extends AStatement { condition.analyze(locals); condition = condition.cast(locals); - boolean continuous = false; - if (condition.constant != null) { continuous = (boolean)condition.constant; @@ -109,8 +109,10 @@ public final class SWhile extends AStatement { writer.mark(begin); - condition.fals = end; - condition.write(writer, globals); + if (!continuous) { + condition.write(writer, globals); + writer.ifZCmp(Opcodes.IFEQ, end); + } if (block != null) { if (loopCounter != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java index 2e3d07ebf92..b8a1af073bf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java @@ -22,52 +22,58 @@ *

* The following are the types of nodes: * A* (abstract) - These are the abstract nodes that are the superclasses for the other types. - * I* (interface) -- These are marker interfaces to denote a property of the node. - * S* (statement) - These are nodes that represent a statement in Painless. These are the highest level nodes. - * E* (expression) - These are nodes that represent an expression in Painless. These are the middle level nodes. - * L* (link) - These are nodes that represent a piece of a variable/method chain. The are the lowest level nodes. + * I* (interface) - These are marker interfaces to denote a property of the node. + * S* (statement) - These are nodes that represent a statement in Painless. + * E* (expression) - These are nodes that represent an expression in Painless. + * P* (postfix) - These are nodes that represent a postfix of a variable chain. + * E/P* (storeable) - These are nodes that are allowed to store a value to memory. + * *Sub* (sub) - These are partial nodes with a parent (S/E/P)* node used to split up logic into smaller pieces. *

* The following is a brief description of each node: - * {@link org.elasticsearch.painless.node.AExpression} - The superclass for all E* (expression) nodes. - * {@link org.elasticsearch.painless.node.ALink} - The superclass for all L* (link) nodes. - * {@link org.elasticsearch.painless.node.ANode} - The superclass for all other nodes. + * {@link org.elasticsearch.painless.node.AExpression} - The superclass for all E* (expression) and P* (postfix) nodes. + * {@link org.elasticsearch.painless.node.ANode} - The superclass for all nodes. * {@link org.elasticsearch.painless.node.AStatement} - The superclass for all S* (statement) nodes. + * {@link org.elasticsearch.painless.node.AStoreable} - The super class for an expression that can store a value in local memory. + * {@link org.elasticsearch.painless.node.EAssignment} - Represents an assignment with the lhs and rhs as child nodes. * {@link org.elasticsearch.painless.node.EBinary} - Represents a binary math expression. * {@link org.elasticsearch.painless.node.EBool} - Represents a boolean expression. * {@link org.elasticsearch.painless.node.EBoolean} - Represents a boolean constant. + * {@link org.elasticsearch.painless.node.ECallLocal} - Represents a user-defined call. * {@link org.elasticsearch.painless.node.ECapturingFunctionRef} - Represents a function reference (capturing). - * {@link org.elasticsearch.painless.node.ECast} - Represents an implicit cast in most cases. (Internal only.) - * {@link org.elasticsearch.painless.node.EChain} - Represents the entirety of a variable/method chain for read/write operations. + * {@link org.elasticsearch.painless.node.ECast} - Represents a cast inserted into the tree replacing others. (Internal only.) * {@link org.elasticsearch.painless.node.EComp} - Represents a comparison expression. * {@link org.elasticsearch.painless.node.EConditional} - Represents a conditional expression. - * {@link org.elasticsearch.painless.node.EConstant} - Represents a constant. (Internal only.) + * {@link org.elasticsearch.painless.node.EConstant} - Represents a constant inserted into the tree replacing others. (Internal only.) * {@link org.elasticsearch.painless.node.EDecimal} - Represents a decimal constant. * {@link org.elasticsearch.painless.node.EExplicit} - Represents an explicit cast. * {@link org.elasticsearch.painless.node.EFunctionRef} - Represents a function reference (non-capturing). * {@link org.elasticsearch.painless.node.EInstanceof} - Represents an instanceof check. + * {@link org.elasticsearch.painless.node.ELambda} - Represents a lambda function. * {@link org.elasticsearch.painless.node.EListInit} - Represents a list initialization shortcut. * {@link org.elasticsearch.painless.node.EMapInit} - Represents a map initialization shortcut. + * {@link org.elasticsearch.painless.node.ENewArray} - Represents an array instantiation. + * {@link org.elasticsearch.painless.node.ENewObj} - Represents and object instantiation. * {@link org.elasticsearch.painless.node.ENull} - Represents a null constant. * {@link org.elasticsearch.painless.node.ENumeric} - Represents a non-decimal numeric constant. + * {@link org.elasticsearch.painless.node.ERegex} - Represents a regular expression constant. + * {@link org.elasticsearch.painless.node.EStatic} - Represents a static type target. + * {@link org.elasticsearch.painless.node.EString} - Represents a string constant. * {@link org.elasticsearch.painless.node.EUnary} - Represents a unary math expression. - * {@link org.elasticsearch.painless.node.IDefLink} - A marker interface for all LDef* (link) nodes. - * {@link org.elasticsearch.painless.node.LArrayLength} - Represents an array length field load. - * {@link org.elasticsearch.painless.node.LBrace} - Represents an array load/store or defers to possible shortcuts. - * {@link org.elasticsearch.painless.node.LCallInvoke} - Represents a method call or defers to a def call. - * {@link org.elasticsearch.painless.node.LCallLocal} - Represents a user-defined call. - * {@link org.elasticsearch.painless.node.LCast} - Represents a cast made in a variable/method chain. - * {@link org.elasticsearch.painless.node.LDefArray} - Represents an array load/store or shortcut on a def type. (Internal only.) - * {@link org.elasticsearch.painless.node.LDefCall} - Represents a method call made on a def type. (Internal only.) - * {@link org.elasticsearch.painless.node.LDefField} - Represents a field load/store or shortcut on a def type. (Internal only.) - * {@link org.elasticsearch.painless.node.LField} - Represents a field load/store or defers to a possible shortcuts. - * {@link org.elasticsearch.painless.node.LListShortcut} - Represents a list load/store shortcut. (Internal only.) - * {@link org.elasticsearch.painless.node.LMapShortcut} - Represents a map load/store shortcut. (Internal only.) - * {@link org.elasticsearch.painless.node.LNewArray} - Represents an array instantiation. - * {@link org.elasticsearch.painless.node.LNewObj} - Represents and object instantiation. - * {@link org.elasticsearch.painless.node.LShortcut} - Represents a field load/store shortcut. (Internal only.) - * {@link org.elasticsearch.painless.node.LStatic} - Represents a static type target. - * {@link org.elasticsearch.painless.node.LString} - Represents a string constant. - * {@link org.elasticsearch.painless.node.LVariable} - Represents a variable load/store. + * {@link org.elasticsearch.painless.node.EVariable} - Represents a variable load/store. + * {@link org.elasticsearch.painless.node.ILambda} - Represents a marker to signify this node is a lambda function. + * {@link org.elasticsearch.painless.node.PBrace} - Represents an array load/store and defers to a child subnode. + * {@link org.elasticsearch.painless.node.PCallInvoke} - Represents a method call and defers to a child subnode. + * {@link org.elasticsearch.painless.node.PField} - Represents a field load/store and defers to a child subnode. + * {@link org.elasticsearch.painless.node.PSubArrayLength} - Represents an array length field load. + * {@link org.elasticsearch.painless.node.PSubBrace} - Represents an array load/store. + * {@link org.elasticsearch.painless.node.PSubCallInvoke} - Represents a method call. + * {@link org.elasticsearch.painless.node.PSubDefArray} - Represents an array load/store or shortcut on a def type. (Internal only.) + * {@link org.elasticsearch.painless.node.PSubDefCall} - Represents a method call made on a def type. (Internal only.) + * {@link org.elasticsearch.painless.node.PSubDefField} - Represents a field load/store or shortcut on a def type. (Internal only.) + * {@link org.elasticsearch.painless.node.PSubField} - Represents a field load/store. + * {@link org.elasticsearch.painless.node.PSubListShortcut} - Represents a list load/store shortcut. (Internal only.) + * {@link org.elasticsearch.painless.node.PSubMapShortcut} - Represents a map load/store shortcut. (Internal only.) + * {@link org.elasticsearch.painless.node.PSubShortcut} - Represents a field load/store shortcut. (Internal only.) * {@link org.elasticsearch.painless.node.SBlock} - Represents a set of statements as a branch of control-flow. * {@link org.elasticsearch.painless.node.SBreak} - Represents a break statement. * {@link org.elasticsearch.painless.node.SCatch} - Represents a catch block as part of a try-catch block. @@ -75,7 +81,7 @@ * {@link org.elasticsearch.painless.node.SDeclaration} - Represents a single variable declaration. * {@link org.elasticsearch.painless.node.SDeclBlock} - Represents a series of declarations. * {@link org.elasticsearch.painless.node.SDo} - Represents a do-while loop. - * {@link org.elasticsearch.painless.node.SEach} - Represents a for each loop shortcut for iterables. + * {@link org.elasticsearch.painless.node.SEach} - Represents a for-each loop and defers to subnodes depending on type. * {@link org.elasticsearch.painless.node.SExpression} - Represents the top-level node for an expression as a statement. * {@link org.elasticsearch.painless.node.SFor} - Represents a for loop. * {@link org.elasticsearch.painless.node.SFunction} - Represents a user-defined function. @@ -83,6 +89,8 @@ * {@link org.elasticsearch.painless.node.SIfElse} - Represents an if/else block. * {@link org.elasticsearch.painless.node.SReturn} - Represents a return statement. * {@link org.elasticsearch.painless.node.SSource} - The root of all Painless trees. Contains a series of statements. + * {@link org.elasticsearch.painless.node.SSubEachArray} - Represents a for-each loop for arrays. + * {@link org.elasticsearch.painless.node.SSubEachIterable} - Represents a for-each loop for iterables. * {@link org.elasticsearch.painless.node.SThrow} - Represents a throw statement. * {@link org.elasticsearch.painless.node.STry} - Represents the try block as part of a try-catch block. * {@link org.elasticsearch.painless.node.SWhile} - Represents a while loop. @@ -92,14 +100,14 @@ *

* All Painless trees must start with an SSource node at the root. Each node has a constructor that requires * all of its values and children be passed in at the time of instantiation. This means that Painless trees - * are build bottom-up; however, this helps enforce tree structure to be correct and fits naturally with a + * are build bottom-up; however, this helps enforce tree structure correctness and fits naturally with a * standard recursive-descent parser. *

* Generally, statement nodes have member data that evaluate legal control-flow during the analysis phase. * The typical order for statement nodes is for each node to call analyze on it's children during the analysis phase * and write on it's children during the writing phase. *

- * Generally, expression nodes have member data that evaluate static types. The typical order for an expression node + * Generally, expression nodes have member data that evaluate static and def types. The typical order for an expression node * during the analysis phase looks like the following: * {@code * For known expected types: @@ -127,16 +135,24 @@ * } * Expression nodes just call each child during the writing phase. *

- * Generally, link nodes have member data that help keep track of items necessary to do a - * load/store on a variable/field/method. Analysis of link nodes happens in a chain node - * where each link node will be analysed with the chain node acting as a bridge to pass the - * previous link's after type to the next link's before type. Upon analysis completion, a link - * will return either itself or another link node depending on if a shortcut or def type was found. - * Cast nodes as links will return null and be removed from the chain node if the cast is - * unnecessary. Link nodes have three methods for writing -- write, load, and store. The write - * method is always once called before a load/store to give links a chance to write any values - * such as array indices before the load/store happens. Load is called to read a link node, and - * store is called to write a link node. Note that store will only ever be called on the final - * link node in a chain, all previous links will be considered loads. + * Postfix nodes represent postfixes in a variable/method chain including braces, calls, or fields. + * Postfix nodes will always have a prefix node that is the prior piece of the variable/method chain. + * Analysis of a postfix node will cause a chain of analysis calls to happen where the prefix will + * be analyzed first and continue until the prefix is not a postfix node. Writing out a series of + * loads from a postfix node works in the same fashion. Stores work somewhat differently as + * described by later documentation. + *

+ * Storebable nodes have three methods for writing -- setup, load, and store. These methods + * are used in conjuction with a parent node aware of the storeable node (lhs) that has a node + * representing a value to store (rhs). The setup method is always once called before a store + * to give storeable nodes a chance to write any prefixes they may have and any values such as + * array indices before the store happens. Load is called on a storeable node that must also + * be read from, and store is called to write a value to memory. + *

+ * Sub nodes are partial nodes that require a parent to work correctly. These nodes can really + * represent anything the parent node would like to split up into logical pieces and don't really + * have any distinct set of rules. The currently existing subnodes all have ANode as a super class + * somewhere in their class heirachy so the parent node can defer some analysis and writing to + * the sub node. */ package org.elasticsearch.painless.node; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java index ba6885eabf3..90f524f76de 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java @@ -67,7 +67,7 @@ public class BasicAPITests extends ScriptTestCase { ctx.put("_source", _source); params.put("ctx", ctx); - assertEquals("testvalue", exec("ctx._source['load'].5 = ctx._source['load'].remove('load5')", params)); + assertEquals("testvalue", exec("ctx._source['load'].5 = ctx._source['load'].remove('load5')", params, true)); } /** Test loads and stores with a list */ @@ -118,7 +118,7 @@ public class BasicAPITests extends ScriptTestCase { assertEquals("{}", exec("Map map = new HashMap(); return map.toString();")); assertEquals("{}", exec("def map = new HashMap(); return map.toString();")); } - + public void testPrimitivesHaveMethods() { assertEquals(5, exec("int x = 5; return x.intValue();")); assertEquals("5", exec("int x = 5; return x.toString();")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java index f59a9209f4c..cbfdd31b143 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java @@ -68,14 +68,14 @@ public class BasicExpressionTests extends ScriptTestCase { "((Map)y).put(2, 3);\n" + "return x.get(2);\n")); } - + public void testIllegalDefCast() { - Exception exception = expectScriptThrows(ClassCastException.class, () -> { + Exception exception = expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1.0; int y = x; return y;"); }); assertTrue(exception.getMessage().contains("cannot be cast")); - exception = expectScriptThrows(ClassCastException.class, () -> { + exception = expectScriptThrows(ClassCastException.class, () -> { exec("def x = (short)1; byte y = x; return y;"); }); assertTrue(exception.getMessage().contains("cannot be cast")); @@ -112,11 +112,11 @@ public class BasicExpressionTests extends ScriptTestCase { */ public void testBoxing() { // return - assertEquals(4, exec("return params.get(\"x\");", Collections.singletonMap("x", 4))); + assertEquals(4, exec("return params.get(\"x\");", Collections.singletonMap("x", 4), true)); // assignment - assertEquals(4, exec("int y = params.get(\"x\"); return y;", Collections.singletonMap("x", 4))); + assertEquals(4, exec("int y = params.get(\"x\"); return y;", Collections.singletonMap("x", 4), true)); // comparison - assertEquals(true, exec("return 5 > params.get(\"x\");", Collections.singletonMap("x", 4))); + assertEquals(true, exec("return 5 > params.get(\"x\");", Collections.singletonMap("x", 4), true)); } public void testBool() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java index e023ac364b3..a4f85f393bf 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java @@ -251,25 +251,25 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(10, exec("def i = 1; if (i == 1) {i = 2; return 10}")); assertEquals(10, exec("def i = 1; if (i == 1) {i = 2; return 10} else {return 12}")); } - + public void testArrayLoopWithoutCounter() { - assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + - "for (int i = 0; i < array.length; i++) { sum += array[i] } return sum", + assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + + "for (int i = 0; i < array.length; i++) { sum += array[i] } return sum", Collections.emptyMap(), Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), - null + null, true )); - assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + - "int i = 0; while (i < array.length) { sum += array[i++] } return sum", + assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + + "int i = 0; while (i < array.length) { sum += array[i++] } return sum", Collections.emptyMap(), Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), - null + null, true )); - assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + - "int i = 0; do { sum += array[i++] } while (i < array.length); return sum", + assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + + "int i = 0; do { sum += array[i++] } while (i < array.length); return sum", Collections.emptyMap(), Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), - null + null, true )); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java index 7e4448495a9..1df7eb349fb 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java @@ -23,6 +23,7 @@ package org.elasticsearch.painless; public class EqualsTests extends ScriptTestCase { public void testTypesEquals() { assertEquals(true, exec("return false === false;")); + assertEquals(false, exec("boolean x = false; boolean y = true; return x === y;")); assertEquals(true, exec("boolean x = false; boolean y = false; return x === y;")); assertEquals(false, exec("return (byte)3 === (byte)4;")); assertEquals(true, exec("byte x = 3; byte y = 3; return x === y;")); @@ -40,6 +41,7 @@ public class EqualsTests extends ScriptTestCase { assertEquals(true, exec("double x = 3; double y = 3; return x === y;")); assertEquals(true, exec("return false == false;")); + assertEquals(false, exec("boolean x = false; boolean y = true; return x == y;")); assertEquals(true, exec("boolean x = false; boolean y = false; return x == y;")); assertEquals(false, exec("return (byte)3 == (byte)4;")); assertEquals(true, exec("byte x = 3; byte y = 3; return x == y;")); @@ -59,6 +61,7 @@ public class EqualsTests extends ScriptTestCase { public void testTypesNotEquals() { assertEquals(false, exec("return true !== true;")); + assertEquals(true, exec("boolean x = true; boolean y = false; return x !== y;")); assertEquals(false, exec("boolean x = false; boolean y = false; return x !== y;")); assertEquals(true, exec("return (byte)3 !== (byte)4;")); assertEquals(false, exec("byte x = 3; byte y = 3; return x !== y;")); @@ -76,6 +79,7 @@ public class EqualsTests extends ScriptTestCase { assertEquals(false, exec("double x = 3; double y = 3; return x !== y;")); assertEquals(false, exec("return true != true;")); + assertEquals(true, exec("boolean x = true; boolean y = false; return x != y;")); assertEquals(false, exec("boolean x = false; boolean y = false; return x != y;")); assertEquals(true, exec("return (byte)3 != (byte)4;")); assertEquals(false, exec("byte x = 3; byte y = 3; return x != y;")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java index c57a756f109..8fbc82dce35 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java @@ -64,7 +64,7 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(10, exec("10")); assertEquals(10, exec("5 + 5")); assertEquals(10, exec("5 + 5")); - assertEquals(10, exec("params.param == 'yes' ? 10 : 5", Collections.singletonMap("param", "yes"))); + assertEquals(10, exec("params.param == 'yes' ? 10 : 5", Collections.singletonMap("param", "yes"), true)); } @SuppressWarnings("rawtypes") diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PostfixTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PostfixTests.java new file mode 100644 index 00000000000..806b885a826 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PostfixTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +public class PostfixTests extends ScriptTestCase { + public void testConstantPostfixes() { + assertEquals("2", exec("2.toString()")); + assertEquals(4, exec("[1, 2, 3, 4, 5][3]")); + assertEquals("4", exec("[1, 2, 3, 4, 5][3].toString()")); + assertEquals(3, exec("new int[] {1, 2, 3, 4, 5}[2]")); + assertEquals("4", exec("(2 + 2).toString()")); + } + + public void testConditionalPostfixes() { + assertEquals("5", exec("boolean b = false; (b ? 4 : 5).toString()")); + assertEquals(3, exec( + "Map x = new HashMap(); x['test'] = 3;" + + "Map y = new HashMap(); y['test'] = 4;" + + "boolean b = true;" + + "return (int)(b ? x : y).get('test')") + ); + } + + public void testAssignmentPostfixes() { + assertEquals(true, exec("int x; '3' == (x = 3).toString()")); + assertEquals(-1, exec("int x; (x = 3).compareTo(4)")); + assertEquals(3L, exec("long[] x; (x = new long[1])[0] = 3; return x[0]")); + assertEquals(2, exec("int x; ((x)) = 2; return x;")); + } + + public void testDefConditionalPostfixes() { + assertEquals("5", exec("def b = false; (b ? 4 : 5).toString()")); + assertEquals(3, exec( + "def x = new HashMap(); x['test'] = 3;" + + "def y = new HashMap(); y['test'] = 4;" + + "boolean b = true;" + + "return (b ? x : y).get('test')") + ); + } + + public void testDefAssignmentPostfixes() { + assertEquals(true, exec("def x; '3' == (x = 3).toString()")); + assertEquals(-1, exec("def x; (x = 3).compareTo(4)")); + assertEquals(3L, exec("def x; (x = new long[1])[0] = 3; return x[0]")); + assertEquals(2, exec("def x; ((x)) = 2; return x;")); + } +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java index 615dec67dc4..dbbb9958d71 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java @@ -114,8 +114,8 @@ public class RegexTests extends ScriptTestCase { } public void testFindOnInput() { - assertEquals(true, exec("return params.s =~ /foo/", singletonMap("s", "fooasdfdf"))); - assertEquals(false, exec("return params.s =~ /foo/", singletonMap("s", "11f2ooasdfdf"))); + assertEquals(true, exec("return params.s =~ /foo/", singletonMap("s", "fooasdfdf"), true)); + assertEquals(false, exec("return params.s =~ /foo/", singletonMap("s", "11f2ooasdfdf"), true)); } public void testGroup() { @@ -183,7 +183,7 @@ public class RegexTests extends ScriptTestCase { public void testReplaceAllMatchesCharSequence() { CharSequence charSequence = CharBuffer.wrap("the quick brown fox"); assertEquals("thE qUIck brOwn fOx", - exec("params.a.replaceAll(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence))); + exec("params.a.replaceAll(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true)); } public void testReplaceAllNoMatchString() { @@ -193,7 +193,7 @@ public class RegexTests extends ScriptTestCase { public void testReplaceAllNoMatchCharSequence() { CharSequence charSequence = CharBuffer.wrap("i am cat"); assertEquals("i am cat", - exec("params.a.replaceAll(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence))); + exec("params.a.replaceAll(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true)); } public void testReplaceAllQuoteReplacement() { @@ -211,7 +211,7 @@ public class RegexTests extends ScriptTestCase { public void testReplaceFirstMatchesCharSequence() { CharSequence charSequence = CharBuffer.wrap("the quick brown fox"); assertEquals("thE quick brown fox", - exec("params.a.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence))); + exec("params.a.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true)); } public void testReplaceFirstNoMatchString() { @@ -221,7 +221,7 @@ public class RegexTests extends ScriptTestCase { public void testReplaceFirstNoMatchCharSequence() { CharSequence charSequence = CharBuffer.wrap("i am cat"); assertEquals("i am cat", - exec("params.a.replaceFirst(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence))); + exec("params.a.replaceFirst(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true)); } public void testReplaceFirstQuoteReplacement() { @@ -255,7 +255,7 @@ public class RegexTests extends ScriptTestCase { public void testBogusRegexFlag() { IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("/asdf/b", emptyMap(), emptyMap(), null); // Not picky so we get a non-assertion error + exec("/asdf/b", false); // Not picky so we get a non-assertion error }); assertEquals("unexpected token ['b'] was expecting one of [{, ';'}].", e.getMessage()); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ReservedWordTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ReservedWordTests.java index fdc4fba4313..08b78b1c708 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ReservedWordTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ReservedWordTests.java @@ -75,7 +75,7 @@ public class ReservedWordTests extends ScriptTestCase { /** check that we can modify its contents though */ public void testCtxStoreMap() { - assertEquals(5, exec("ctx.foo = 5; return ctx.foo;", Collections.singletonMap("ctx", new HashMap()))); + assertEquals(5, exec("ctx.foo = 5; return ctx.foo;", Collections.singletonMap("ctx", new HashMap()), true)); } /** check that we can't declare a variable of _value, its really reserved! */ diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java index 63947ced79a..a0df66dac73 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java @@ -53,7 +53,8 @@ public class ScoreTests extends ScriptTestCase { public float score() throws IOException { return 2.5f; } - })); + }, + true)); } public void testScoreNotUsed() { @@ -63,7 +64,8 @@ public class ScoreTests extends ScriptTestCase { public float score() throws IOException { throw new AssertionError("score() should not be called"); } - })); + }, + true)); } public void testScoreCached() { @@ -77,6 +79,7 @@ public class ScoreTests extends ScriptTestCase { } throw new AssertionError("score() should not be called twice"); } - })); + }, + true)); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java index 828b0b8e73b..2cd21c0596f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java @@ -46,12 +46,12 @@ public class ScriptEngineTests extends ScriptTestCase { obj1.put("l", Arrays.asList("2", "1")); vars.put("obj1", obj1); - Object value = exec("return params['obj1'];", vars); + Object value = exec("return params['obj1'];", vars, true); obj1 = (Map)value; assertEquals("value1", obj1.get("prop1")); assertEquals("value2", ((Map) obj1.get("obj2")).get("prop2")); - value = exec("return params.obj1.l.0;", vars); + value = exec("return params.obj1.l.0;", vars, true); assertEquals("2", value); } @@ -65,15 +65,15 @@ public class ScriptEngineTests extends ScriptTestCase { obj1.put("obj2", obj2); vars.put("l", Arrays.asList("1", "2", "3", obj1)); - assertEquals(4, exec("return params.l.size();", vars)); - assertEquals("1", exec("return params.l.0;", vars)); + assertEquals(4, exec("return params.l.size();", vars, true)); + assertEquals("1", exec("return params.l.0;", vars, true)); - Object value = exec("return params.l.3;", vars); + Object value = exec("return params.l.3;", vars, true); obj1 = (Map)value; assertEquals("value1", obj1.get("prop1")); assertEquals("value2", ((Map)obj1.get("obj2")).get("prop2")); - assertEquals("value1", exec("return params.l.3.prop1;", vars)); + assertEquals("value1", exec("return params.l.3.prop1;", vars, true)); } public void testChangingVarsCrossExecution1() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 0bec55b5428..63c929a69a7 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -22,6 +22,7 @@ package org.elasticsearch.painless; import org.apache.lucene.search.Scorer; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.painless.antlr.Walker; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptException; @@ -49,19 +50,30 @@ public abstract class ScriptTestCase extends ESTestCase { /** Compiles and returns the result of {@code script} */ public Object exec(String script) { - return exec(script, null); + return exec(script, null, true); + } + + /** Compiles and returns the result of {@code script} with access to {@code picky} */ + public Object exec(String script, boolean picky) { + return exec(script, null, picky); } /** Compiles and returns the result of {@code script} with access to {@code vars} */ - public Object exec(String script, Map vars) { + public Object exec(String script, Map vars, boolean picky) { Map compilerSettings = new HashMap<>(); - compilerSettings.put(CompilerSettings.PICKY, "true"); compilerSettings.put(CompilerSettings.INITIAL_CALL_SITE_DEPTH, random().nextBoolean() ? "0" : "10"); - return exec(script, vars, compilerSettings, null); + return exec(script, vars, compilerSettings, null, picky); } /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ - public Object exec(String script, Map vars, Map compileParams, Scorer scorer) { + public Object exec(String script, Map vars, Map compileParams, Scorer scorer, boolean picky) { + // test for ambiguity errors before running the actual script if picky is true + if (picky) { + CompilerSettings pickySettings = new CompilerSettings(); + pickySettings.setPicky(true); + Walker.buildPainlessTree(getTestName(), script, pickySettings, null); + } + // test actual script execution Object object = scriptEngine.compile(null, script, compileParams); CompiledScript compiled = new CompiledScript(ScriptService.ScriptType.INLINE, getTestName(), "painless", object); ExecutableScript executableScript = scriptEngine.executable(compiled, vars); @@ -79,7 +91,7 @@ public abstract class ScriptTestCase extends ESTestCase { final String asm = Debugger.toString(script); assertTrue("bytecode not found, got: \n" + asm , asm.contains(bytecode)); } - + /** * Uses the {@link Debugger} to get the bytecode output for a script and compare * it against an expected bytecode pattern as a regular expression (please try to avoid!) @@ -88,7 +100,7 @@ public abstract class ScriptTestCase extends ESTestCase { final String asm = Debugger.toString(script); assertTrue("bytecode not found, got: \n" + asm , asm.matches(pattern)); } - + /** Checks a specific exception class is thrown (boxed inside ScriptException) and returns it. */ public static T expectScriptThrows(Class expectedType, ThrowingRunnable runnable) { try { @@ -104,7 +116,7 @@ public abstract class ScriptTestCase extends ESTestCase { assertion.initCause(e); throw assertion; } - AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " + AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName()); assertion.initCause(e); throw assertion; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/TryCatchTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/TryCatchTests.java index bf78ee0afa5..2e6aa80b3e4 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/TryCatchTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/TryCatchTests.java @@ -31,27 +31,27 @@ public class TryCatchTests extends ScriptTestCase { }); assertEquals("test", exception.getMessage()); } - + /** catches the exact exception */ public void testCatch() { - assertEquals(1, exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + - "catch (RuntimeException e) { return 1; } return 2;", - Collections.singletonMap("param", "true"))); + assertEquals(1, exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + + "catch (RuntimeException e) { return 1; } return 2;", + Collections.singletonMap("param", "true"), true)); } - + /** catches superclass of the exception */ public void testCatchSuperclass() { - assertEquals(1, exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + - "catch (Exception e) { return 1; } return 2;", - Collections.singletonMap("param", "true"))); + assertEquals(1, exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + + "catch (Exception e) { return 1; } return 2;", + Collections.singletonMap("param", "true"), true)); } - + /** tries to catch a different type of exception */ public void testNoCatch() { RuntimeException exception = expectScriptThrows(RuntimeException.class, () -> { - exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + - "catch (ArithmeticException e) { return 1; } return 2;", - Collections.singletonMap("param", "true")); + exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + + "catch (ArithmeticException e) { return 1; } return 2;", + Collections.singletonMap("param", "true"), true); }); assertEquals("test", exception.getMessage()); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java index fa1538e56aa..b09705803fb 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java @@ -85,7 +85,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { public void testBogusParameter() { IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { - exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue"), null); + exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue"), null, true); }); assertTrue(expected.getMessage().contains("Unrecognized compile-time parameter")); } @@ -138,7 +138,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { "The maximum number of statements that can be executed in a loop has been reached.")); RuntimeException parseException = expectScriptThrows(RuntimeException.class, () -> { - exec("try { int x; } catch (PainlessError error) {}"); + exec("try { int x; } catch (PainlessError error) {}", false); fail("should have hit ParseException"); }); assertTrue(parseException.getMessage().contains("unexpected token ['PainlessError']")); @@ -208,9 +208,9 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { public void testRCurlyNotDelim() { IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> { // We don't want PICKY here so we get the normal error message - exec("def i = 1} return 1", emptyMap(), emptyMap(), null); + exec("def i = 1} return 1", emptyMap(), emptyMap(), null, false); }); - assertEquals("invalid sequence of tokens near ['}'].", e.getMessage()); + assertEquals("unexpected token ['}'] was expecting one of [].", e.getMessage()); } public void testBadBoxingCast() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/antlr/ParserTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/antlr/ParserTests.java deleted file mode 100644 index e2bd880b646..00000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/antlr/ParserTests.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.antlr; - -import org.antlr.v4.runtime.ANTLRInputStream; -import org.antlr.v4.runtime.BaseErrorListener; -import org.antlr.v4.runtime.CommonTokenStream; -import org.antlr.v4.runtime.DiagnosticErrorListener; -import org.antlr.v4.runtime.RecognitionException; -import org.antlr.v4.runtime.Recognizer; -import org.antlr.v4.runtime.atn.PredictionMode; -import org.elasticsearch.painless.antlr.PainlessParser.SourceContext; -import org.elasticsearch.painless.ScriptTestCase; - -import java.text.ParseException; - -public class ParserTests extends ScriptTestCase { - private static class TestException extends RuntimeException { - TestException(String msg) { - super(msg); - } - } - - private SourceContext buildAntlrTree(String source) { - ANTLRInputStream stream = new ANTLRInputStream(source); - PainlessLexer lexer = new EnhancedPainlessLexer(stream, "testing"); - PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer)); - ParserErrorStrategy strategy = new ParserErrorStrategy("testing"); - - lexer.removeErrorListeners(); - parser.removeErrorListeners(); - - // Diagnostic listener invokes syntaxError on other listeners for ambiguity issues, - parser.addErrorListener(new DiagnosticErrorListener(true)); - // a second listener to fail the test when the above happens. - parser.addErrorListener(new BaseErrorListener() { - @Override - public void syntaxError(final Recognizer recognizer, final Object offendingSymbol, final int line, - final int charPositionInLine, final String msg, final RecognitionException e) { - throw new TestException("line: " + line + ", offset: " + charPositionInLine + - ", symbol:" + offendingSymbol + " " + msg); - } - }); - - // Enable exact ambiguity detection (costly). we enable exact since its the default for - // DiagnosticErrorListener, life is too short to think about what 'inexact ambiguity' might mean. - parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); - parser.setErrorHandler(strategy); - - return parser.source(); - } - - public void testIllegalSecondary() { - //TODO: Need way more corner case tests. - Exception exception = expectThrows(TestException.class, () -> buildAntlrTree("(x = 5).y")); - assertTrue(exception.getMessage().contains("no viable alternative")); - exception = expectThrows(TestException.class, () -> buildAntlrTree("((x = 5).y = 2).z;")); - assertTrue(exception.getMessage().contains("no viable alternative")); - exception = expectThrows(TestException.class, () -> buildAntlrTree("(2 + 2).z")); - assertTrue(exception.getMessage().contains("no viable alternative")); - exception = expectThrows(RuntimeException.class, () -> buildAntlrTree("((Map)x.-x)")); - assertTrue(exception.getMessage().contains("unexpected character")); - } - - public void testLambdaSyntax() { - buildAntlrTree("call(p -> {p.doSomething();});"); - buildAntlrTree("call(int p -> {p.doSomething();});"); - buildAntlrTree("call((p, u, v) -> {p.doSomething(); blah = 1;});"); - buildAntlrTree("call(1, (p, u, v) -> {p.doSomething(); blah = 1;}, 3);"); - buildAntlrTree("call((p, u, v) -> {p.doSomething(); blah = 1;});"); - buildAntlrTree("call(x, y, z, (int p, int u, int v) -> {p.doSomething(); blah = 1;});"); - buildAntlrTree("call(x, y, z, (long p, List u, String v) -> {p.doSomething(); blah = 1;});"); - buildAntlrTree("call(x, y, z, (int p, u, int v) -> {p.doSomething(); blah = 1;});"); - buildAntlrTree("call(x, (int p, u, int v) -> {p.doSomething(); blah = 1;}, z," + - " (int p, u, int v) -> {p.doSomething(); blah = 1;}, 'test');"); - } -} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index d40c1ea6622..03c5054afd2 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -56,6 +56,7 @@ import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static java.util.Collections.emptyMap; import static java.util.Objects.requireNonNull; import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -151,11 +152,12 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler headers = extractStringStringMap(remote, "headers"); if (false == remote.isEmpty()) { throw new IllegalArgumentException( "Unsupported fields in [remote]: [" + Strings.collectionToCommaDelimitedString(remote.keySet()) + "]"); } - return new RemoteInfo(scheme, host, port, queryForRemote(source), username, password); + return new RemoteInfo(scheme, host, port, queryForRemote(source), username, password, headers); } /** @@ -189,6 +191,25 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler extractStringStringMap(Map source, String name) { + Object value = source.remove(name); + if (value == null) { + return emptyMap(); + } + if (false == value instanceof Map) { + throw new IllegalArgumentException("Expected [" + name + "] to be an object containing strings but was [" + value + "]"); + } + Map map = (Map) value; + for (Map.Entry entry : map.entrySet()) { + if (false == entry.getKey() instanceof String || false == entry.getValue() instanceof String) { + throw new IllegalArgumentException("Expected [" + name + "] to be an object containing strings but has [" + entry + "]"); + } + } + @SuppressWarnings("unchecked") // We just checked.... + Map safe = (Map) map; + return safe; + } + private static BytesReference queryForRemote(Map source) throws IOException { XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); Object query = source.remove("query"); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 2238bec433b..291cd4f7b4d 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -19,7 +19,13 @@ package org.elasticsearch.index.reindex; +import org.apache.http.Header; import org.apache.http.HttpHost; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.message.BasicHeader; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BackoffPolicy; @@ -31,6 +37,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -178,16 +185,27 @@ public class TransportReindexAction extends HandledTransportAction header : remoteInfo.getHeaders().entrySet()) { + clientHeaders[i] = new BasicHeader(header.getKey(), header.getValue()); } - RestClient restClient = RestClient.builder( - new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme())).build(); - return new RemoteScrollableHitSource(logger, backoffPolicy, threadPool, task::countSearchRetry, - this::finishHim, restClient, remoteInfo.getQuery(), mainRequest.getSearchRequest()); + RestClientBuilder restClient = RestClient + .builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme())) + .setDefaultHeaders(clientHeaders); + if (remoteInfo.getUsername() != null) { + restClient.setHttpClientConfigCallback(c -> { + UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(), + remoteInfo.getPassword()); + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, creds); + c.setDefaultCredentialsProvider(credentialsProvider); + return c; + }); + } + return new RemoteScrollableHitSource(logger, backoffPolicy, threadPool, task::countSearchRetry, this::finishHim, + restClient.build(), remoteInfo.getQuery(), mainRequest.getSearchRequest()); } return super.buildScrollableResultSource(backoffPolicy); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/package-info.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/package-info.java new file mode 100644 index 00000000000..bb16842867f --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Actions that modify documents based on the results of a scrolling query like {@link ReindexAction}, {@link UpdateByQueryAction}, and + * {@link DeleteByQueryAction}. + */ +package org.elasticsearch.index.reindex; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java index 89d6cb18401..1405d656d99 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java @@ -26,7 +26,10 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import static java.util.Collections.unmodifiableMap; import static java.util.Objects.requireNonNull; public class RemoteInfo implements Writeable { @@ -36,14 +39,17 @@ public class RemoteInfo implements Writeable { private final BytesReference query; private final String username; private final String password; + private final Map headers; - public RemoteInfo(String scheme, String host, int port, BytesReference query, String username, String password) { + public RemoteInfo(String scheme, String host, int port, BytesReference query, String username, String password, + Map headers) { this.scheme = requireNonNull(scheme, "[scheme] must be specified to reindex from a remote cluster"); this.host = requireNonNull(host, "[host] must be specified to reindex from a remote cluster"); this.port = port; this.query = requireNonNull(query, "[query] must be specified to reindex from a remote cluster"); this.username = username; this.password = password; + this.headers = unmodifiableMap(requireNonNull(headers, "[headers] is required")); } /** @@ -56,6 +62,12 @@ public class RemoteInfo implements Writeable { query = in.readBytesReference(); username = in.readOptionalString(); password = in.readOptionalString(); + int headersLength = in.readVInt(); + Map headers = new HashMap<>(headersLength); + for (int i = 0; i < headersLength; i++) { + headers.put(in.readString(), in.readString()); + } + this.headers = unmodifiableMap(headers); } @Override @@ -66,6 +78,11 @@ public class RemoteInfo implements Writeable { out.writeBytesReference(query); out.writeOptionalString(username); out.writeOptionalString(password); + out.writeVInt(headers.size()); + for (Map.Entry header : headers.entrySet()) { + out.writeString(header.getKey()); + out.writeString(header.getValue()); + } } public String getScheme() { @@ -94,6 +111,10 @@ public class RemoteInfo implements Writeable { return password; } + public Map getHeaders() { + return headers; + } + @Override public String toString() { StringBuilder b = new StringBuilder(); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java index eee3e2c59ae..41f6dd5f946 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -20,13 +20,16 @@ package org.elasticsearch.index.reindex.remote; import org.apache.http.HttpEntity; +import org.apache.http.util.EntityUtils; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseListener; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcherSupplier; import org.elasticsearch.common.Strings; @@ -34,6 +37,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -130,6 +134,8 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { private void execute(String method, String uri, Map params, HttpEntity entity, BiFunction parser, Consumer listener) { + // Preserve the thread context so headers survive after the call + ThreadContext.StoredContext ctx = threadPool.getThreadContext().newStoredContext(); class RetryHelper extends AbstractRunnable { private final Iterator retries = backoffPolicy.iterator(); @@ -138,6 +144,8 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { client.performRequest(method, uri, params, entity, new ResponseListener() { @Override public void onSuccess(org.elasticsearch.client.Response response) { + // Restore the thread context to get the precious headers + ctx.restore(); T parsedResponse; try { HttpEntity responseEntity = response.getEntity(); @@ -172,6 +180,8 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { return; } } + e = wrapExceptionToPreserveStatus(re.getResponse().getStatusLine().getStatusCode(), + re.getResponse().getEntity(), re); } fail.accept(e); } @@ -185,4 +195,31 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { } new RetryHelper().run(); } + + /** + * Wrap the ResponseException in an exception that'll preserve its status code if possible so we can send it back to the user. We might + * not have a constant for the status code so in that case we just use 500 instead. We also extract make sure to include the response + * body in the message so the user can figure out *why* the remote Elasticsearch service threw the error back to us. + */ + static ElasticsearchStatusException wrapExceptionToPreserveStatus(int statusCode, @Nullable HttpEntity entity, Exception cause) { + RestStatus status = RestStatus.fromCode(statusCode); + String messagePrefix = ""; + if (status == null) { + messagePrefix = "Couldn't extract status [" + statusCode + "]. "; + status = RestStatus.INTERNAL_SERVER_ERROR; + } + String message; + if (entity == null) { + message = messagePrefix + "No error body."; + } else { + try { + message = messagePrefix + "body=" + EntityUtils.toString(entity); + } catch (IOException ioe) { + ElasticsearchStatusException e = new ElasticsearchStatusException(messagePrefix + "Failed to extract body.", status, cause); + e.addSuppressed(ioe); + return e; + } + } + return new ElasticsearchStatusException(message, status, cause); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/IDefLink.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/package-info.java similarity index 79% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/IDefLink.java rename to modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/package-info.java index 12c39f2e886..6dd03ad2952 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/IDefLink.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/package-info.java @@ -17,11 +17,7 @@ * under the License. */ -package org.elasticsearch.painless.node; - /** - * A marker interface applied to LDef* nodes allowing changes to {@link ALink#after} from outside, - * by default {@code after} is {@code DEF}. + * Support for reindexing from a remote Elasticsearch cluster. */ -interface IDefLink { -} +package org.elasticsearch.index.reindex.remote; \ No newline at end of file diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java index b88ed135515..61204177072 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java @@ -31,6 +31,8 @@ import java.net.UnknownHostException; import java.util.HashSet; import java.util.Set; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.index.reindex.TransportReindexAction.checkRemoteWhitelist; @@ -58,7 +60,7 @@ public class ReindexFromRemoteWhitelistTests extends ESTestCase { String[] inList = whitelist.iterator().next().split(":"); String host = inList[0]; int port = Integer.valueOf(inList[1]); - checkRemoteWhitelist(whitelist, new RemoteInfo(randomAsciiOfLength(5), host, port, new BytesArray("test"), null, null), + checkRemoteWhitelist(whitelist, new RemoteInfo(randomAsciiOfLength(5), host, port, new BytesArray("test"), null, null, emptyMap()), localhostOrNone()); } @@ -66,14 +68,15 @@ public class ReindexFromRemoteWhitelistTests extends ESTestCase { Set whitelist = randomWhitelist(); whitelist.add("myself"); TransportAddress publishAddress = new InetSocketTransportAddress(InetAddress.getByAddress(new byte[] {0x7f,0x00,0x00,0x01}), 9200); - checkRemoteWhitelist(whitelist, new RemoteInfo(randomAsciiOfLength(5), "127.0.0.1", 9200, new BytesArray("test"), null, null), - publishAddress); + checkRemoteWhitelist(whitelist, + new RemoteInfo(randomAsciiOfLength(5), "127.0.0.1", 9200, new BytesArray("test"), null, null, emptyMap()), publishAddress); } public void testUnwhitelistedRemote() { int port = between(1, Integer.MAX_VALUE); - Exception e = expectThrows(IllegalArgumentException.class, () -> checkRemoteWhitelist(randomWhitelist(), - new RemoteInfo(randomAsciiOfLength(5), "not in list", port, new BytesArray("test"), null, null), localhostOrNone())); + RemoteInfo remoteInfo = new RemoteInfo(randomAsciiOfLength(5), "not in list", port, new BytesArray("test"), null, null, emptyMap()); + Exception e = expectThrows(IllegalArgumentException.class, + () -> checkRemoteWhitelist(randomWhitelist(), remoteInfo, localhostOrNone())); assertEquals("[not in list:" + port + "] not whitelisted in reindex.remote.whitelist", e.getMessage()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java new file mode 100644 index 00000000000..d305fc77331 --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -0,0 +1,197 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.reindex.remote.RemoteInfo; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Netty4Plugin; +import org.junit.Before; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static org.elasticsearch.index.reindex.ReindexTestCase.matcher; +import static org.hamcrest.Matchers.containsString; + +public class ReindexFromRemoteWithAuthTests extends ESSingleNodeTestCase { + private TransportAddress address; + + @Override + protected Collection> getPlugins() { + return Arrays.asList(RetryTests.BogusPlugin.class, + Netty4Plugin.class, + ReindexFromRemoteWithAuthTests.TestPlugin.class, + ReindexPlugin.class); + } + + @Override + protected Settings nodeSettings() { + Settings.Builder settings = Settings.builder().put(super.nodeSettings()); + // Weird incantation required to test with netty + settings.put("netty.assert.buglevel", false); + settings.put(NetworkModule.HTTP_ENABLED.getKey(), true); + // Whitelist reindexing from the http host we're going to use + settings.put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "myself"); + settings.put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME); + return settings.build(); + } + + @Before + public void setupSourceIndex() { + client().prepareIndex("source", "test").setSource("test", "test").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + } + + @Before + public void fetchTransportAddress() { + NodeInfo nodeInfo = client().admin().cluster().prepareNodesInfo().get().getNodes().get(0); + address = nodeInfo.getHttp().getAddress().publishAddress(); + } + + public void testReindexFromRemoteWithAuthentication() throws Exception { + RemoteInfo remote = new RemoteInfo("http", address.getHost(), address.getPort(), new BytesArray("{\"match_all\":{}}"), "Aladdin", + "open sesame", emptyMap()); + ReindexRequestBuilder request = ReindexAction.INSTANCE.newRequestBuilder(client()).source("source").destination("dest") + .setRemoteInfo(remote); + assertThat(request.get(), matcher().created(1)); + } + + public void testReindexSendsHeaders() throws Exception { + RemoteInfo remote = new RemoteInfo("http", address.getHost(), address.getPort(), new BytesArray("{\"match_all\":{}}"), null, null, + singletonMap(TestFilter.EXAMPLE_HEADER, "doesn't matter")); + ReindexRequestBuilder request = ReindexAction.INSTANCE.newRequestBuilder(client()).source("source").destination("dest") + .setRemoteInfo(remote); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> request.get()); + assertEquals(RestStatus.BAD_REQUEST, e.status()); + assertThat(e.getMessage(), containsString("Hurray! Sent the header!")); + } + + public void testReindexWithoutAuthenticationWhenRequired() throws Exception { + RemoteInfo remote = new RemoteInfo("http", address.getHost(), address.getPort(), new BytesArray("{\"match_all\":{}}"), null, null, + emptyMap()); + ReindexRequestBuilder request = ReindexAction.INSTANCE.newRequestBuilder(client()).source("source").destination("dest") + .setRemoteInfo(remote); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> request.get()); + assertEquals(RestStatus.UNAUTHORIZED, e.status()); + assertThat(e.getMessage(), containsString("\"reason\":\"Authentication required\"")); + assertThat(e.getMessage(), containsString("\"WWW-Authenticate\":\"Basic realm=auth-realm\"")); + } + + public void testReindexWithBadAuthentication() throws Exception { + RemoteInfo remote = new RemoteInfo("http", address.getHost(), address.getPort(), new BytesArray("{\"match_all\":{}}"), "junk", + "auth", emptyMap()); + ReindexRequestBuilder request = ReindexAction.INSTANCE.newRequestBuilder(client()).source("source").destination("dest") + .setRemoteInfo(remote); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> request.get()); + assertThat(e.getMessage(), containsString("\"reason\":\"Bad Authorization\"")); + } + + /** + * Plugin that demands authentication. + */ + public static class TestPlugin extends Plugin implements ActionPlugin { + @Override + public List> getActionFilters() { + return singletonList(ReindexFromRemoteWithAuthTests.TestFilter.class); + } + + @Override + public Collection getRestHeaders() { + return Arrays.asList(TestFilter.AUTHORIZATION_HEADER, TestFilter.EXAMPLE_HEADER); + } + } + + /** + * Action filter that will reject the request if it isn't authenticated. + */ + public static class TestFilter implements ActionFilter { + /** + * The authorization required. Corresponds to username="Aladdin" and password="open sesame". It is the example in + * HTTP/1.0's RFC. + */ + private static final String REQUIRED_AUTH = "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=="; + private static final String AUTHORIZATION_HEADER = "Authorization"; + private static final String EXAMPLE_HEADER = "Example-Header"; + private final ThreadContext context; + + @Inject + public TestFilter(ThreadPool threadPool) { + context = threadPool.getThreadContext(); + } + + @Override + public int order() { + return Integer.MIN_VALUE; + } + + @Override + public , Response extends ActionResponse> void apply(Task task, String action, + Request request, ActionListener listener, ActionFilterChain chain) { + if (false == action.equals(SearchAction.NAME)) { + chain.proceed(task, action, request, listener); + return; + } + if (context.getHeader(EXAMPLE_HEADER) != null) { + throw new IllegalArgumentException("Hurray! Sent the header!"); + } + String auth = context.getHeader(AUTHORIZATION_HEADER); + if (auth == null) { + ElasticsearchSecurityException e = new ElasticsearchSecurityException("Authentication required", + RestStatus.UNAUTHORIZED); + e.addHeader("WWW-Authenticate", "Basic realm=auth-realm"); + throw e; + } + if (false == REQUIRED_AUTH.equals(auth)) { + throw new ElasticsearchSecurityException("Bad Authorization", RestStatus.FORBIDDEN); + } + chain.proceed(task, action, request, listener); + } + + @Override + public void apply(String action, Response response, ActionListener listener, + ActionFilterChain chain) { + chain.proceed(action, response, listener); + } + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java index efaf5e627ad..0455e43ec09 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.test.ESTestCase; +import static java.util.Collections.emptyMap; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; /** @@ -44,7 +45,7 @@ public class ReindexRequestTests extends ESTestCase { public void testReindexFromRemoteDoesNotSupportSearchQuery() { ReindexRequest reindex = request(); reindex.setRemoteInfo(new RemoteInfo(randomAsciiOfLength(5), randomAsciiOfLength(5), between(1, Integer.MAX_VALUE), - new BytesArray("real_query"), null, null)); + new BytesArray("real_query"), null, null, emptyMap())); reindex.getSearchRequest().source().query(matchAllQuery()); // Unsupported place to put query ActionRequestValidationException e = reindex.validate(); assertEquals("Validation Failed: 1: reindex from remote sources should use RemoteInfo's query instead of source's query;", diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java index 66896406c66..1213762155b 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.test.ESTestCase; +import static java.util.Collections.emptyMap; import static org.hamcrest.Matchers.containsString; /** @@ -86,9 +87,10 @@ public class ReindexSourceTargetValidationTests extends ESTestCase { public void testRemoteInfoSkipsValidation() { // The index doesn't have to exist - succeeds(new RemoteInfo(randomAsciiOfLength(5), "test", 9200, new BytesArray("test"), null, null), "does_not_exist", "target"); + succeeds(new RemoteInfo(randomAsciiOfLength(5), "test", 9200, new BytesArray("test"), null, null, emptyMap()), "does_not_exist", + "target"); // And it doesn't matter if they are the same index. They are considered to be different because the remote one is, well, remote. - succeeds(new RemoteInfo(randomAsciiOfLength(5), "test", 9200, new BytesArray("test"), null, null), "target", "target"); + succeeds(new RemoteInfo(randomAsciiOfLength(5), "test", 9200, new BytesArray("test"), null, null, emptyMap()), "target", "target"); } private void fails(String target, String... sources) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java index 1cbec59c49d..b27ecfa3eb1 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -41,10 +41,16 @@ public class RestReindexActionTests extends ESTestCase { } public void testBuildRemoteInfoFullyLoaded() throws IOException { + Map headers = new HashMap<>(); + headers.put("first", "a"); + headers.put("second", "b"); + headers.put("third", ""); + Map remote = new HashMap<>(); remote.put("host", "https://example.com:9200"); remote.put("username", "testuser"); remote.put("password", "testpass"); + remote.put("headers", headers); Map query = new HashMap<>(); query.put("a", "b"); @@ -60,6 +66,7 @@ public class RestReindexActionTests extends ESTestCase { assertEquals("{\n \"a\" : \"b\"\n}", remoteInfo.getQuery().utf8ToString()); assertEquals("testuser", remoteInfo.getUsername()); assertEquals("testpass", remoteInfo.getPassword()); + assertEquals(headers, remoteInfo.getHeaders()); } public void testBuildRemoteInfoWithoutAllParts() throws IOException { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 2be27a1a1ad..ecebe141ce9 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -47,6 +47,7 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.CyclicBarrier; +import static java.util.Collections.emptyMap; import static org.elasticsearch.index.reindex.ReindexTestCase.matcher; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.greaterThan; @@ -114,8 +115,8 @@ public class RetryTests extends ESSingleNodeTestCase { @Override protected Settings nodeSettings() { Settings.Builder settings = Settings.builder().put(super.nodeSettings()); - // Use pools of size 1 so we can block them settings.put("netty.assert.buglevel", false); + // Use pools of size 1 so we can block them settings.put("thread_pool.bulk.size", 1); settings.put("thread_pool.search.size", 1); // Use queues of size 1 because size 0 is broken and because search requests need the queue to function @@ -140,7 +141,8 @@ public class RetryTests extends ESSingleNodeTestCase { public void testReindexFromRemote() throws Exception { NodeInfo nodeInfo = client().admin().cluster().prepareNodesInfo().get().getNodes().get(0); TransportAddress address = nodeInfo.getHttp().getAddress().publishAddress(); - RemoteInfo remote = new RemoteInfo("http", address.getHost(), address.getPort(), new BytesArray("{\"match_all\":{}}"), null, null); + RemoteInfo remote = new RemoteInfo("http", address.getHost(), address.getPort(), new BytesArray("{\"match_all\":{}}"), null, null, + emptyMap()); ReindexRequestBuilder request = ReindexAction.INSTANCE.newRequestBuilder(client()).source("source").destination("dest") .setRemoteInfo(remote); testCase(ReindexAction.NAME, request, matcher().created(DOC_COUNT)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 3e3b3a63d62..d30c7c8735d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -38,7 +38,9 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.HashMap; import java.util.List; +import java.util.Map; import static java.lang.Math.abs; import static java.util.Collections.emptyList; @@ -62,7 +64,12 @@ public class RoundTripTests extends ESTestCase { BytesReference query = new BytesArray(randomAsciiOfLength(5)); String username = randomBoolean() ? randomAsciiOfLength(5) : null; String password = username != null && randomBoolean() ? randomAsciiOfLength(5) : null; - reindex.setRemoteInfo(new RemoteInfo(randomAsciiOfLength(5), randomAsciiOfLength(5), port, query, username, password)); + int headersCount = randomBoolean() ? 0 : between(1, 10); + Map headers = new HashMap<>(headersCount); + while (headers.size() < headersCount) { + headers.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + } + reindex.setRemoteInfo(new RemoteInfo(randomAsciiOfLength(5), randomAsciiOfLength(5), port, query, username, password, headers)); } ReindexRequest tripped = new ReindexRequest(); roundTrip(reindex, tripped); @@ -78,6 +85,7 @@ public class RoundTripTests extends ESTestCase { assertEquals(reindex.getRemoteInfo().getQuery(), tripped.getRemoteInfo().getQuery()); assertEquals(reindex.getRemoteInfo().getUsername(), tripped.getRemoteInfo().getUsername()); assertEquals(reindex.getRemoteInfo().getPassword(), tripped.getRemoteInfo().getPassword()); + assertEquals(reindex.getRemoteInfo().getHeaders(), tripped.getRemoteInfo().getHeaders()); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java index 5492a05986c..3ee647aa55b 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java @@ -22,15 +22,17 @@ package org.elasticsearch.index.reindex.remote; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.test.ESTestCase; +import static java.util.Collections.emptyMap; + public class RemoteInfoTests extends ESTestCase { public void testToString() { - RemoteInfo info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), null, null); + RemoteInfo info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), null, null, emptyMap()); assertEquals("host=testhost port=12344 query=testquery", info.toString()); - info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), "testuser", null); + info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), "testuser", null, emptyMap()); assertEquals("host=testhost port=12344 query=testquery username=testuser", info.toString()); - info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), "testuser", "testpass"); + info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), "testuser", "testpass", emptyMap()); assertEquals("host=testhost port=12344 query=testquery username=testuser password=<<>>", info.toString()); - info = new RemoteInfo("https", "testhost", 12344, new BytesArray("testquery"), "testuser", "testpass"); + info = new RemoteInfo("https", "testhost", 12344, new BytesArray("testquery"), "testuser", "testpass", emptyMap()); assertEquals("scheme=https host=testhost port=12344 query=testquery username=testuser password=<<>>", info.toString()); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 6af4dab9405..6407bc0195b 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.reindex.remote; +import org.apache.http.HttpEntity; import org.apache.http.HttpEntityEnclosingRequest; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; @@ -27,12 +28,14 @@ import org.apache.http.StatusLine; import org.apache.http.concurrent.FutureCallback; import org.apache.http.entity.ContentType; import org.apache.http.entity.InputStreamEntity; +import org.apache.http.entity.StringEntity; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicStatusLine; import org.apache.http.nio.protocol.HttpAsyncRequestProducer; import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.search.SearchRequest; @@ -53,6 +56,7 @@ import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import java.io.IOException; import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.StandardCharsets; @@ -316,6 +320,66 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals(retriesAllowed, retries); } + public void testThreadContextRestored() throws Exception { + String header = randomAsciiOfLength(5); + threadPool.getThreadContext().putHeader("test", header); + AtomicBoolean called = new AtomicBoolean(); + sourceWithMockedRemoteCall("start_ok.json").doStart(r -> { + assertEquals(header, threadPool.getThreadContext().getHeader("test")); + called.set(true); + }); + assertTrue(called.get()); + } + + public void testWrapExceptionToPreserveStatus() throws IOException { + Exception cause = new Exception(); + + // Successfully get the status without a body + RestStatus status = randomFrom(RestStatus.values()); + ElasticsearchStatusException wrapped = RemoteScrollableHitSource.wrapExceptionToPreserveStatus(status.getStatus(), null, cause); + assertEquals(status, wrapped.status()); + assertEquals(cause, wrapped.getCause()); + assertEquals("No error body.", wrapped.getMessage()); + + // Successfully get the status without a body + HttpEntity okEntity = new StringEntity("test body", StandardCharsets.UTF_8); + wrapped = RemoteScrollableHitSource.wrapExceptionToPreserveStatus(status.getStatus(), okEntity, cause); + assertEquals(status, wrapped.status()); + assertEquals(cause, wrapped.getCause()); + assertEquals("body=test body", wrapped.getMessage()); + + // Successfully get the status with a broken body + IOException badEntityException = new IOException(); + HttpEntity badEntity = mock(HttpEntity.class); + when(badEntity.getContent()).thenThrow(badEntityException); + wrapped = RemoteScrollableHitSource.wrapExceptionToPreserveStatus(status.getStatus(), badEntity, cause); + assertEquals(status, wrapped.status()); + assertEquals(cause, wrapped.getCause()); + assertEquals("Failed to extract body.", wrapped.getMessage()); + assertEquals(badEntityException, wrapped.getSuppressed()[0]); + + // Fail to get the status without a body + int notAnHttpStatus = -1; + assertNull(RestStatus.fromCode(notAnHttpStatus)); + wrapped = RemoteScrollableHitSource.wrapExceptionToPreserveStatus(notAnHttpStatus, null, cause); + assertEquals(RestStatus.INTERNAL_SERVER_ERROR, wrapped.status()); + assertEquals(cause, wrapped.getCause()); + assertEquals("Couldn't extract status [" + notAnHttpStatus + "]. No error body.", wrapped.getMessage()); + + // Fail to get the status without a body + wrapped = RemoteScrollableHitSource.wrapExceptionToPreserveStatus(notAnHttpStatus, okEntity, cause); + assertEquals(RestStatus.INTERNAL_SERVER_ERROR, wrapped.status()); + assertEquals(cause, wrapped.getCause()); + assertEquals("Couldn't extract status [" + notAnHttpStatus + "]. body=test body", wrapped.getMessage()); + + // Fail to get the status with a broken body + wrapped = RemoteScrollableHitSource.wrapExceptionToPreserveStatus(notAnHttpStatus, badEntity, cause); + assertEquals(RestStatus.INTERNAL_SERVER_ERROR, wrapped.status()); + assertEquals(cause, wrapped.getCause()); + assertEquals("Couldn't extract status [" + notAnHttpStatus + "]. Failed to extract body.", wrapped.getMessage()); + assertEquals(badEntityException, wrapped.getSuppressed()[0]); + } + private RemoteScrollableHitSource sourceWithMockedRemoteCall(String... paths) throws Exception { return sourceWithMockedRemoteCall(true, paths); } @@ -342,8 +406,9 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { @Override public Future answer(InvocationOnMock invocationOnMock) throws Throwable { + // Throw away the current thread context to simulate running async httpclient's thread pool + threadPool.getThreadContext().stashContext(); HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; - @SuppressWarnings("unchecked") FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[2]; HttpEntityEnclosingRequest request = (HttpEntityEnclosingRequest)requestProducer.generateRequest(); URL resource = resources[responseCount]; diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpChannelTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpChannelTests.java index 41ea8612fed..ccb8aedd94c 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpChannelTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpChannelTests.java @@ -50,6 +50,7 @@ import org.junit.Before; import java.net.SocketAddress; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; @@ -70,7 +71,7 @@ public class Netty3HttpChannelTests extends ESTestCase { @Before public void setup() throws Exception { - networkService = new NetworkService(Settings.EMPTY); + networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); threadPool = new TestThreadPool("test"); bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); } diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerPipeliningTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerPipeliningTests.java index ebaff71e0c2..b432708de15 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerPipeliningTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerPipeliningTests.java @@ -49,6 +49,7 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -72,7 +73,7 @@ public class Netty3HttpServerPipeliningTests extends ESTestCase { @Before public void setup() throws Exception { - networkService = new NetworkService(Settings.EMPTY); + networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); threadPool = new TestThreadPool("test"); bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); } diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java index 901d517bf95..d291f76ff38 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java @@ -34,6 +34,7 @@ import org.junit.After; import org.junit.Before; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; @@ -55,7 +56,7 @@ public class Netty3HttpServerTransportTests extends ESTestCase { @Before public void setup() throws Exception { - networkService = new NetworkService(Settings.EMPTY); + networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); threadPool = new TestThreadPool("test"); bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); } diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/Netty3SizeHeaderFrameDecoderTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/Netty3SizeHeaderFrameDecoderTests.java index e4047798071..46e65d9d1fe 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/Netty3SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/Netty3SizeHeaderFrameDecoderTests.java @@ -38,6 +38,7 @@ import java.io.InputStreamReader; import java.net.InetAddress; import java.net.Socket; import java.nio.charset.StandardCharsets; +import java.util.Collections; import static org.hamcrest.Matchers.is; @@ -61,7 +62,7 @@ public class Netty3SizeHeaderFrameDecoderTests extends ESTestCase { @Before public void startThreadPool() { threadPool = new ThreadPool(settings); - NetworkService networkService = new NetworkService(settings); + NetworkService networkService = new NetworkService(settings, Collections.emptyList()); BigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); nettyTransport = new Netty3Transport(settings, threadPool, networkService, bigArrays, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3ScheduledPingTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3ScheduledPingTests.java index 16694dbed0b..3b02caf3b99 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3ScheduledPingTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3ScheduledPingTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.transport.TransportResponseOptions; import org.elasticsearch.transport.TransportSettings; import java.io.IOException; +import java.util.Collections; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -62,14 +63,14 @@ public class Netty3ScheduledPingTests extends ESTestCase { CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService(); NamedWriteableRegistry registryA = new NamedWriteableRegistry(); - final Netty3Transport nettyA = new Netty3Transport(settings, threadPool, new NetworkService(settings), + final Netty3Transport nettyA = new Netty3Transport(settings, threadPool, new NetworkService(settings, Collections.emptyList()), BigArrays.NON_RECYCLING_INSTANCE, registryA, circuitBreakerService); MockTransportService serviceA = new MockTransportService(settings, nettyA, threadPool); serviceA.start(); serviceA.acceptIncomingRequests(); NamedWriteableRegistry registryB = new NamedWriteableRegistry(); - final Netty3Transport nettyB = new Netty3Transport(settings, threadPool, new NetworkService(settings), + final Netty3Transport nettyB = new Netty3Transport(settings, threadPool, new NetworkService(settings, Collections.emptyList()), BigArrays.NON_RECYCLING_INSTANCE, registryB, circuitBreakerService); MockTransportService serviceB = new MockTransportService(settings, nettyB, threadPool); diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortTests.java index f21edf3f596..c460f630d5e 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortTests.java @@ -34,6 +34,8 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; import org.junit.Before; +import java.util.Collections; + import static org.hamcrest.Matchers.is; public class Netty3TransportMultiPortTests extends ESTestCase { @@ -135,8 +137,8 @@ public class Netty3TransportMultiPortTests extends ESTestCase { private TcpTransport startTransport(Settings settings, ThreadPool threadPool) { BigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); - TcpTransport transport = new Netty3Transport(settings, threadPool, new NetworkService(settings), bigArrays, - new NamedWriteableRegistry(), new NoneCircuitBreakerService()); + TcpTransport transport = new Netty3Transport(settings, threadPool, new NetworkService(settings, Collections.emptyList()), + bigArrays, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); transport.start(); assertThat(transport.lifecycleState(), is(Lifecycle.State.STARTED)); diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/SimpleNetty3TransportTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/SimpleNetty3TransportTests.java index db400f4df43..60994273fb4 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/SimpleNetty3TransportTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/SimpleNetty3TransportTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.transport.TransportSettings; import java.net.InetAddress; import java.net.UnknownHostException; +import java.util.Collections; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -47,8 +48,8 @@ public class SimpleNetty3TransportTests extends AbstractSimpleTransportTestCase Settings settings, ThreadPool threadPool, final Version version) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); - Transport transport = new Netty3Transport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, - namedWriteableRegistry, new NoneCircuitBreakerService()) { + Transport transport = new Netty3Transport(settings, threadPool, new NetworkService(settings, Collections.emptyList()), + BigArrays.NON_RECYCLING_INSTANCE, namedWriteableRegistry, new NoneCircuitBreakerService()) { @Override protected Version getCurrentVersion() { return version; diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java index 58c6a476fe8..1185419d0dd 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java @@ -58,6 +58,7 @@ import org.junit.Before; import java.net.SocketAddress; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; @@ -77,7 +78,7 @@ public class Netty4HttpChannelTests extends ESTestCase { @Before public void setup() throws Exception { - networkService = new NetworkService(Settings.EMPTY); + networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); threadPool = new TestThreadPool("test"); bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index d8e61908011..4d94dc2ccaf 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -49,6 +49,7 @@ import org.junit.Before; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -67,7 +68,7 @@ public class Netty4HttpServerPipeliningTests extends ESTestCase { @Before public void setup() throws Exception { - networkService = new NetworkService(Settings.EMPTY); + networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); threadPool = new TestThreadPool("test"); bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index dc63907265c..2683e69c917 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -33,6 +33,7 @@ import org.junit.After; import org.junit.Before; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; @@ -55,7 +56,7 @@ public class Netty4HttpServerTransportTests extends ESTestCase { @Before public void setup() throws Exception { - networkService = new NetworkService(Settings.EMPTY); + networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); threadPool = new TestThreadPool("test"); bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java index 0e2ee23f950..c65a63646d1 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.transport.TransportResponseOptions; import org.elasticsearch.transport.TransportSettings; import java.io.IOException; +import java.util.Collections; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -62,14 +63,14 @@ public class Netty4ScheduledPingTests extends ESTestCase { CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService(); NamedWriteableRegistry registryA = new NamedWriteableRegistry(); - final Netty4Transport nettyA = new Netty4Transport(settings, threadPool, new NetworkService(settings), + final Netty4Transport nettyA = new Netty4Transport(settings, threadPool, new NetworkService(settings, Collections.emptyList()), BigArrays.NON_RECYCLING_INSTANCE, registryA, circuitBreakerService); MockTransportService serviceA = new MockTransportService(settings, nettyA, threadPool); serviceA.start(); serviceA.acceptIncomingRequests(); NamedWriteableRegistry registryB = new NamedWriteableRegistry(); - final Netty4Transport nettyB = new Netty4Transport(settings, threadPool, new NetworkService(settings), + final Netty4Transport nettyB = new Netty4Transport(settings, threadPool, new NetworkService(settings, Collections.emptyList()), BigArrays.NON_RECYCLING_INSTANCE, registryB, circuitBreakerService); MockTransportService serviceB = new MockTransportService(settings, nettyB, threadPool); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java index 8e7ac400f46..6a0d8b71968 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -38,6 +38,7 @@ import java.io.InputStreamReader; import java.net.InetAddress; import java.net.Socket; import java.nio.charset.StandardCharsets; +import java.util.Collections; import static org.hamcrest.Matchers.is; @@ -61,7 +62,7 @@ public class Netty4SizeHeaderFrameDecoderTests extends ESTestCase { @Before public void startThreadPool() { threadPool = new ThreadPool(settings); - NetworkService networkService = new NetworkService(settings); + NetworkService networkService = new NetworkService(settings, Collections.emptyList()); BigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); nettyTransport = new Netty4Transport(settings, threadPool, networkService, bigArrays, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java index e76431daf8d..04488cb5b01 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java @@ -34,6 +34,8 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; import org.junit.Before; +import java.util.Collections; + import static org.hamcrest.Matchers.is; public class NettyTransportMultiPortTests extends ESTestCase { @@ -135,8 +137,8 @@ public class NettyTransportMultiPortTests extends ESTestCase { private TcpTransport startTransport(Settings settings, ThreadPool threadPool) { BigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); - TcpTransport transport = new Netty4Transport(settings, threadPool, new NetworkService(settings), bigArrays, - new NamedWriteableRegistry(), new NoneCircuitBreakerService()); + TcpTransport transport = new Netty4Transport(settings, threadPool, new NetworkService(settings, Collections.emptyList()), + bigArrays, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); transport.start(); assertThat(transport.lifecycleState(), is(Lifecycle.State.STARTED)); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index bafec9d2ffa..f370ab2aa03 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.transport.TransportSettings; import java.net.InetAddress; import java.net.UnknownHostException; +import java.util.Collections; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -47,8 +48,8 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase Settings settings, ThreadPool threadPool, final Version version) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); - Transport transport = new Netty4Transport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, - namedWriteableRegistry, new NoneCircuitBreakerService()) { + Transport transport = new Netty4Transport(settings, threadPool, new NetworkService(settings, Collections.emptyList()), + BigArrays.NON_RECYCLING_INSTANCE, namedWriteableRegistry, new NoneCircuitBreakerService()) { @Override protected Version getCurrentVersion() { return version; diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index a31dcde20a0..506215708e2 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -45,10 +45,6 @@ dependencyLicenses { test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name - // this could be needed by AwsEc2ServiceImplTests#testAWSCredentialsWithSystemProviders() - // As it's marked as Ignored for now, we can comment those - // systemProperty 'aws.accessKeyId', 'DUMMY_ACCESS_KEY' - // systemProperty 'aws.secretKey', 'DUMMY_SECRET_KEY' } thirdPartyAudit.excludes = [ diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index b51c9c03f6e..e35b082899e 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -23,11 +23,8 @@ import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.AWSCredentialsProviderChain; import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.auth.EnvironmentVariableCredentialsProvider; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; -import com.amazonaws.auth.SystemPropertiesCredentialsProvider; +import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.retry.RetryPolicy; @@ -55,10 +52,8 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements Aws private AmazonEC2Client client; @Inject - public AwsEc2ServiceImpl(Settings settings, NetworkService networkService) { + public AwsEc2ServiceImpl(Settings settings) { super(settings); - // add specific ec2 name resolver - networkService.addCustomNameResolver(new Ec2NameResolver(settings)); } @Override @@ -83,16 +78,10 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements Aws String secret = CLOUD_EC2.SECRET_SETTING.get(settings); if (key.isEmpty() && secret.isEmpty()) { logger.debug("Using either environment variables, system properties or instance profile credentials"); - credentials = new AWSCredentialsProviderChain( - new EnvironmentVariableCredentialsProvider(), - new SystemPropertiesCredentialsProvider(), - new InstanceProfileCredentialsProvider() - ); + credentials = new DefaultAWSCredentialsProviderChain(); } else { logger.debug("Using basic key/secret credentials"); - credentials = new AWSCredentialsProviderChain( - new StaticCredentialsProvider(new BasicAWSCredentials(key, secret)) - ); + credentials = new StaticCredentialsProvider(new BasicAWSCredentials(key, secret)); } return credentials; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java index eb4afbcf784..346372f554e 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java @@ -38,22 +38,26 @@ import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.cloud.aws.AwsEc2ServiceImpl; import org.elasticsearch.cloud.aws.Ec2Module; +import org.elasticsearch.cloud.aws.network.Ec2NameResolver; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.ec2.AwsEc2UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; /** * */ -public class Ec2DiscoveryPlugin extends Plugin { +public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin { private static ESLogger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class); @@ -105,6 +109,12 @@ public class Ec2DiscoveryPlugin extends Plugin { discoveryModule.addUnicastHostProvider(EC2, AwsEc2UnicastHostsProvider.class); } + @Override + public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) { + logger.debug("Register _ec2_, _ec2:xxx_ network names"); + return new Ec2NameResolver(settings); + } + @Override public List> getSettings() { return Arrays.asList( diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImplTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImplTests.java index 7ee82516926..42216df51ea 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImplTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImplTests.java @@ -23,22 +23,20 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; public class AwsEc2ServiceImplTests extends ESTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/19556") public void testAWSCredentialsWithSystemProviders() { AWSCredentialsProvider credentialsProvider = AwsEc2ServiceImpl.buildCredentials(logger, Settings.EMPTY); - - AWSCredentials credentials = credentialsProvider.getCredentials(); - assertThat(credentials.getAWSAccessKeyId(), is("DUMMY_ACCESS_KEY")); - assertThat(credentials.getAWSSecretKey(), is("DUMMY_SECRET_KEY")); + assertThat(credentialsProvider, instanceOf(DefaultAWSCredentialsProviderChain.class)); } public void testAWSCredentialsWithElasticsearchAwsSettings() { diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java index b69ebd369a4..11ba53ca59e 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.net.InetAddress; +import java.util.Collections; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; @@ -42,8 +43,7 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2_") .build(); - NetworkService networkService = new NetworkService(nodeSettings); - networkService.addCustomNameResolver(new Ec2NameResolver(nodeSettings)); + NetworkService networkService = new NetworkService(nodeSettings, Collections.singletonList(new Ec2NameResolver(nodeSettings))); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); @@ -60,8 +60,7 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:publicIp_") .build(); - NetworkService networkService = new NetworkService(nodeSettings); - networkService.addCustomNameResolver(new Ec2NameResolver(nodeSettings)); + NetworkService networkService = new NetworkService(nodeSettings, Collections.singletonList(new Ec2NameResolver(nodeSettings))); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); @@ -78,8 +77,7 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:privateIp_") .build(); - NetworkService networkService = new NetworkService(nodeSettings); - networkService.addCustomNameResolver(new Ec2NameResolver(nodeSettings)); + NetworkService networkService = new NetworkService(nodeSettings, Collections.singletonList(new Ec2NameResolver(nodeSettings))); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); @@ -96,8 +94,7 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:privateIpv4_") .build(); - NetworkService networkService = new NetworkService(nodeSettings); - networkService.addCustomNameResolver(new Ec2NameResolver(nodeSettings)); + NetworkService networkService = new NetworkService(nodeSettings, Collections.singletonList(new Ec2NameResolver(nodeSettings))); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); @@ -114,8 +111,7 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:privateDns_") .build(); - NetworkService networkService = new NetworkService(nodeSettings); - networkService.addCustomNameResolver(new Ec2NameResolver(nodeSettings)); + NetworkService networkService = new NetworkService(nodeSettings, Collections.singletonList(new Ec2NameResolver(nodeSettings))); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); @@ -132,8 +128,7 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:publicIpv4_") .build(); - NetworkService networkService = new NetworkService(nodeSettings); - networkService.addCustomNameResolver(new Ec2NameResolver(nodeSettings)); + NetworkService networkService = new NetworkService(nodeSettings, Collections.singletonList(new Ec2NameResolver(nodeSettings))); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); @@ -150,8 +145,7 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:publicDns_") .build(); - NetworkService networkService = new NetworkService(nodeSettings); - networkService.addCustomNameResolver(new Ec2NameResolver(nodeSettings)); + NetworkService networkService = new NetworkService(nodeSettings, Collections.singletonList(new Ec2NameResolver(nodeSettings))); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); @@ -169,8 +163,7 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_local_") .build(); - NetworkService networkService = new NetworkService(nodeSettings); - networkService.addCustomNameResolver(new Ec2NameResolver(nodeSettings)); + NetworkService networkService = new NetworkService(nodeSettings, Collections.singletonList(new Ec2NameResolver(nodeSettings))); InetAddress[] addresses = networkService.resolveBindHostAddresses(null); assertThat(addresses, arrayContaining(networkService.resolveBindHostAddresses(new String[] { "_local_" }))); } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java similarity index 77% rename from plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java rename to plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java index 00a44cd56af..39db86c672a 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java @@ -25,13 +25,12 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; -import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.function.Function; -public interface GceComputeService extends LifecycleComponent { +public interface GceInstancesService extends LifecycleComponent { /** * GCE API Version: Elasticsearch/GceCloud/1.0 @@ -76,19 +75,4 @@ public interface GceComputeService extends LifecycleComponent { * @return a collection of running instances within the same GCE project */ Collection instances(); - - /** - *

Gets metadata on the current running machine (call to - * http://metadata.google.internal/computeMetadata/v1/instance/xxx).

- *

For example, you can retrieve network information by replacing xxx with:

- *
    - *
  • `hostname` when we need to resolve the host name
  • - *
  • `network-interfaces/0/ip` when we need to resolve private IP
  • - *
- * @see org.elasticsearch.cloud.gce.network.GceNameResolver for bindings - * @param metadataPath path to metadata information - * @return extracted information (for example a hostname or an IP address) - * @throws IOException in case metadata URL is not accessible - */ - String metadata(String metadataPath) throws IOException; } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java similarity index 72% rename from plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java rename to plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java index 8d1090dd82c..5ec4b18e910 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java @@ -21,9 +21,6 @@ package org.elasticsearch.cloud.gce; import com.google.api.client.googleapis.compute.ComputeCredential; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.http.GenericUrl; -import com.google.api.client.http.HttpHeaders; -import com.google.api.client.http.HttpResponse; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; import com.google.api.client.json.JsonFactory; @@ -33,10 +30,8 @@ import com.google.api.services.compute.model.Instance; import com.google.api.services.compute.model.InstanceList; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.SpecialPermission; -import org.elasticsearch.cloud.gce.network.GceNameResolver; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -44,10 +39,8 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.gce.RetryHttpInitializerWrapper; import java.io.IOException; -import java.net.URL; import java.security.AccessController; import java.security.GeneralSecurityException; -import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; @@ -56,27 +49,16 @@ import java.util.Collections; import java.util.List; import java.util.function.Function; -public class GceComputeServiceImpl extends AbstractLifecycleComponent - implements GceComputeService { +public class GceInstancesServiceImpl extends AbstractLifecycleComponent implements GceInstancesService { // all settings just used for testing - not registered by default public static final Setting GCE_VALIDATE_CERTIFICATES = Setting.boolSetting("cloud.gce.validate_certificates", true, Property.NodeScope); - public static final Setting GCE_HOST = - new Setting<>("cloud.gce.host", "http://metadata.google.internal", Function.identity(), Property.NodeScope); public static final Setting GCE_ROOT_URL = new Setting<>("cloud.gce.root_url", "https://www.googleapis.com", Function.identity(), Property.NodeScope); private final String project; private final List zones; - // Forcing Google Token API URL as set in GCE SDK to - // http://metadata/computeMetadata/v1/instance/service-accounts/default/token - // See https://developers.google.com/compute/docs/metadata#metadataserver - private final String gceHost; - private final String metaDataUrl; - private final String tokenServerEncodedUrl; - private String gceRootUrl; - @Override public Collection instances() { @@ -117,47 +99,6 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent return instances; } - @Override - public String metadata(String metadataPath) throws IOException { - String urlMetadataNetwork = this.metaDataUrl + "/" + metadataPath; - logger.debug("get metadata from [{}]", urlMetadataNetwork); - final URL url = new URL(urlMetadataNetwork); - HttpHeaders headers; - try { - // hack around code messiness in GCE code - // TODO: get this fixed - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - sm.checkPermission(new SpecialPermission()); - } - headers = AccessController.doPrivileged(new PrivilegedExceptionAction() { - @Override - public HttpHeaders run() throws IOException { - return new HttpHeaders(); - } - }); - GenericUrl genericUrl = AccessController.doPrivileged(new PrivilegedAction() { - @Override - public GenericUrl run() { - return new GenericUrl(url); - } - }); - - // This is needed to query meta data: https://cloud.google.com/compute/docs/metadata - headers.put("Metadata-Flavor", "Google"); - HttpResponse response; - response = getGceHttpTransport().createRequestFactory() - .buildGetRequest(genericUrl) - .setHeaders(headers) - .execute(); - String metadata = response.parseAsString(); - logger.debug("metadata found [{}]", metadata); - return metadata; - } catch (Exception e) { - throw new IOException("failed to fetch metadata from [" + urlMetadataNetwork + "]", e); - } - } - private Compute client; private TimeValue refreshInterval = null; private long lastRefresh; @@ -169,17 +110,13 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent private JsonFactory gceJsonFactory; private final boolean validateCerts; + @Inject - public GceComputeServiceImpl(Settings settings, NetworkService networkService) { + public GceInstancesServiceImpl(Settings settings) { super(settings); this.project = PROJECT_SETTING.get(settings); this.zones = ZONE_SETTING.get(settings); - this.gceHost = GCE_HOST.get(settings); - this.metaDataUrl = gceHost + "/computeMetadata/v1/instance"; - this.gceRootUrl = GCE_ROOT_URL.get(settings); - tokenServerEncodedUrl = metaDataUrl + "/service-accounts/default/token"; this.validateCerts = GCE_VALIDATE_CERTIFICATES.get(settings); - networkService.addCustomNameResolver(new GceNameResolver(settings, this)); } protected synchronized HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException { @@ -208,8 +145,13 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent gceJsonFactory = new JacksonFactory(); logger.info("starting GCE discovery service"); + // Forcing Google Token API URL as set in GCE SDK to + // http://metadata/computeMetadata/v1/instance/service-accounts/default/token + // See https://developers.google.com/compute/docs/metadata#metadataserver + String tokenServerEncodedUrl = GceMetadataService.GCE_HOST.get(settings) + + "/computeMetadata/v1/instance/service-accounts/default/token"; ComputeCredential credential = new ComputeCredential.Builder(getGceHttpTransport(), gceJsonFactory) - .setTokenServerEncodedUrl(this.tokenServerEncodedUrl) + .setTokenServerEncodedUrl(tokenServerEncodedUrl) .build(); // hack around code messiness in GCE code @@ -233,7 +175,7 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponent Compute.Builder builder = new Compute.Builder(getGceHttpTransport(), gceJsonFactory, null).setApplicationName(VERSION) - .setRootUrl(gceRootUrl); + .setRootUrl(GCE_ROOT_URL.get(settings)); if (RETRY_SETTING.exists(settings)) { TimeValue maxWait = MAX_WAIT_SETTING.get(settings); diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceMetadataService.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceMetadataService.java new file mode 100644 index 00000000000..25b7cf72c92 --- /dev/null +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceMetadataService.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cloud.gce; + +import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; +import com.google.api.client.http.GenericUrl; +import com.google.api.client.http.HttpHeaders; +import com.google.api.client.http.HttpResponse; +import com.google.api.client.http.HttpTransport; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.cloud.gce.network.GceNameResolver; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.security.AccessController; +import java.security.GeneralSecurityException; +import java.security.PrivilegedAction; +import java.security.PrivilegedExceptionAction; +import java.util.function.Function; + +public class GceMetadataService extends AbstractLifecycleComponent { + + // Forcing Google Token API URL as set in GCE SDK to + // http://metadata/computeMetadata/v1/instance/service-accounts/default/token + // See https://developers.google.com/compute/docs/metadata#metadataserver + // all settings just used for testing - not registered by default + public static final Setting GCE_HOST = + new Setting<>("cloud.gce.host", "http://metadata.google.internal", Function.identity(), Setting.Property.NodeScope); + + /** Global instance of the HTTP transport. */ + private HttpTransport gceHttpTransport; + + @Inject + public GceMetadataService(Settings settings) { + super(settings); + } + + protected synchronized HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException { + if (gceHttpTransport == null) { + gceHttpTransport = GoogleNetHttpTransport.newTrustedTransport(); + } + return gceHttpTransport; + } + + public String metadata(String metadataPath) throws IOException, URISyntaxException { + // Forcing Google Token API URL as set in GCE SDK to + // http://metadata/computeMetadata/v1/instance/service-accounts/default/token + // See https://developers.google.com/compute/docs/metadata#metadataserver + final URI urlMetadataNetwork = new URI(GCE_HOST.get(settings)).resolve("/computeMetadata/v1/instance/").resolve(metadataPath); + logger.debug("get metadata from [{}]", urlMetadataNetwork); + HttpHeaders headers; + try { + // hack around code messiness in GCE code + // TODO: get this fixed + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + headers = AccessController.doPrivileged(new PrivilegedExceptionAction() { + @Override + public HttpHeaders run() throws IOException { + return new HttpHeaders(); + } + }); + GenericUrl genericUrl = AccessController.doPrivileged(new PrivilegedAction() { + @Override + public GenericUrl run() { + return new GenericUrl(urlMetadataNetwork); + } + }); + + // This is needed to query meta data: https://cloud.google.com/compute/docs/metadata + headers.put("Metadata-Flavor", "Google"); + HttpResponse response; + response = getGceHttpTransport().createRequestFactory() + .buildGetRequest(genericUrl) + .setHeaders(headers) + .execute(); + String metadata = response.parseAsString(); + logger.debug("metadata found [{}]", metadata); + return metadata; + } catch (Exception e) { + throw new IOException("failed to fetch metadata from [" + urlMetadataNetwork + "]", e); + } + } + + @Override + protected void doStart() { + + } + + @Override + protected void doStop() { + if (gceHttpTransport != null) { + try { + gceHttpTransport.shutdown(); + } catch (IOException e) { + logger.warn("unable to shutdown GCE Http Transport", e); + } + gceHttpTransport = null; + } + } + + @Override + protected void doClose() { + + } +} diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceModule.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceModule.java index e1b8d6cf02f..81d10c756e5 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceModule.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceModule.java @@ -20,17 +20,28 @@ package org.elasticsearch.cloud.gce; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; public class GceModule extends AbstractModule { // pkg private so tests can override with mock - static Class computeServiceImpl = GceComputeServiceImpl.class; + static Class computeServiceImpl = GceInstancesServiceImpl.class; - public static Class getComputeServiceImpl() { + protected final Settings settings; + protected final ESLogger logger = Loggers.getLogger(GceModule.class); + + public GceModule(Settings settings) { + this.settings = settings; + } + + public static Class getComputeServiceImpl() { return computeServiceImpl; } @Override protected void configure() { - bind(GceComputeService.class).to(computeServiceImpl).asEagerSingleton(); + logger.debug("configure GceModule (bind compute service)"); + bind(GceInstancesService.class).to(computeServiceImpl).asEagerSingleton(); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java index 0bd5e07da91..54f96a971ca 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java @@ -19,7 +19,7 @@ package org.elasticsearch.cloud.gce.network; -import org.elasticsearch.cloud.gce.GceComputeService; +import org.elasticsearch.cloud.gce.GceMetadataService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.network.NetworkService.CustomNameResolver; @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.net.InetAddress; +import java.net.URISyntaxException; /** *

Resolves certain GCE related 'meta' hostnames into an actual hostname @@ -40,7 +41,7 @@ import java.net.InetAddress; */ public class GceNameResolver extends AbstractComponent implements CustomNameResolver { - private final GceComputeService gceComputeService; + private final GceMetadataService gceMetadataService; /** * enum that can be added to over time with more meta-data types @@ -72,9 +73,9 @@ public class GceNameResolver extends AbstractComponent implements CustomNameReso /** * Construct a {@link CustomNameResolver}. */ - public GceNameResolver(Settings settings, GceComputeService gceComputeService) { + public GceNameResolver(Settings settings, GceMetadataService gceMetadataService) { super(settings); - this.gceComputeService = gceComputeService; + this.gceMetadataService = gceMetadataService; } /** @@ -93,7 +94,7 @@ public class GceNameResolver extends AbstractComponent implements CustomNameReso // We extract the network interface from gce:privateIp:XX String network = "0"; String[] privateIpConfig = value.split(":"); - if (privateIpConfig != null && privateIpConfig.length == 3) { + if (privateIpConfig.length == 3) { network = privateIpConfig[2]; } @@ -105,13 +106,13 @@ public class GceNameResolver extends AbstractComponent implements CustomNameReso } try { - String metadataResult = gceComputeService.metadata(gceMetadataPath); + String metadataResult = gceMetadataService.metadata(gceMetadataPath); if (metadataResult == null || metadataResult.length() == 0) { throw new IOException("no gce metadata returned from [" + gceMetadataPath + "] for [" + value + "]"); } // only one address: because we explicitly ask for only one via the GceHostnameType return new InetAddress[] { InetAddress.getByName(metadataResult) }; - } catch (IOException e) { + } catch (IOException | URISyntaxException e) { throw new IOException("IOException caught when fetching InetAddress from [" + gceMetadataPath + "]", e); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index be3d737b919..c73df8f8395 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -23,7 +23,7 @@ import com.google.api.services.compute.model.AccessConfig; import com.google.api.services.compute.model.Instance; import com.google.api.services.compute.model.NetworkInterface; import org.elasticsearch.Version; -import org.elasticsearch.cloud.gce.GceComputeService; +import org.elasticsearch.cloud.gce.GceInstancesService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; @@ -64,7 +64,7 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas private static final String TERMINATED = "TERMINATED"; } - private final GceComputeService gceComputeService; + private final GceInstancesService gceInstancesService; private TransportService transportService; private NetworkService networkService; @@ -77,17 +77,17 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas private List cachedDiscoNodes; @Inject - public GceUnicastHostsProvider(Settings settings, GceComputeService gceComputeService, + public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstancesService, TransportService transportService, NetworkService networkService) { super(settings); - this.gceComputeService = gceComputeService; + this.gceInstancesService = gceInstancesService; this.transportService = transportService; this.networkService = networkService; - this.refreshInterval = GceComputeService.REFRESH_SETTING.get(settings); - this.project = GceComputeService.PROJECT_SETTING.get(settings); - this.zones = GceComputeService.ZONE_SETTING.get(settings); + this.refreshInterval = GceInstancesService.REFRESH_SETTING.get(settings); + this.project = GceInstancesService.PROJECT_SETTING.get(settings); + this.zones = GceInstancesService.ZONE_SETTING.get(settings); this.tags = TAGS_SETTING.get(settings); if (logger.isDebugEnabled()) { @@ -104,8 +104,8 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas // We check that needed properties have been set if (this.project == null || this.project.isEmpty() || this.zones == null || this.zones.isEmpty()) { throw new IllegalArgumentException("one or more gce discovery settings are missing. " + - "Check elasticsearch.yml file. Should have [" + GceComputeService.PROJECT_SETTING.getKey() + - "] and [" + GceComputeService.ZONE_SETTING.getKey() + "]."); + "Check elasticsearch.yml file. Should have [" + GceInstancesService.PROJECT_SETTING.getKey() + + "] and [" + GceInstancesService.ZONE_SETTING.getKey() + "]."); } if (refreshInterval.millis() != 0) { @@ -131,7 +131,7 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas } try { - Collection instances = gceComputeService.instances(); + Collection instances = gceInstancesService.instances(); if (instances == null) { logger.trace("no instance found for project [{}], zones [{}].", this.project, this.zones); diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index 7f4647281c1..031f7eaf10f 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -22,30 +22,36 @@ package org.elasticsearch.plugin.discovery.gce; import com.google.api.client.http.HttpHeaders; import com.google.api.client.util.ClassInfo; import org.elasticsearch.SpecialPermission; -import org.elasticsearch.cloud.gce.GceComputeService; +import org.elasticsearch.cloud.gce.GceInstancesService; +import org.elasticsearch.cloud.gce.GceMetadataService; import org.elasticsearch.cloud.gce.GceModule; +import org.elasticsearch.cloud.gce.network.GceNameResolver; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.gce.GceUnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; import java.security.AccessController; import java.security.PrivilegedAction; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; -public class GceDiscoveryPlugin extends Plugin { +public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin { public static final String GCE = "gce"; + private final Settings settings; + protected final ESLogger logger = Loggers.getLogger(GceDiscoveryPlugin.class); static { /* @@ -69,32 +75,46 @@ public class GceDiscoveryPlugin extends Plugin { }); } + public GceDiscoveryPlugin(Settings settings) { + this.settings = settings; + logger.trace("starting gce discovery plugin..."); + } + @Override public Collection createGuiceModules() { - return Collections.singletonList(new GceModule()); + return Collections.singletonList(new GceModule(settings)); } @Override @SuppressWarnings("rawtypes") // Supertype uses raw type public Collection> getGuiceServiceClasses() { - return Collections.singletonList(GceModule.getComputeServiceImpl()); + logger.debug("Register gce compute service"); + Collection> services = new ArrayList<>(); + services.add(GceModule.getComputeServiceImpl()); + return services; } public void onModule(DiscoveryModule discoveryModule) { + logger.debug("Register gce discovery type and gce unicast provider"); discoveryModule.addDiscoveryType(GCE, ZenDiscovery.class); - // If discovery.type: gce, we add Gce as a unicast provider - discoveryModule.addUnicastHostProvider(GCE, GceUnicastHostsProvider.class); + discoveryModule.addUnicastHostProvider(GCE, GceUnicastHostsProvider.class); + } + + @Override + public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) { + logger.debug("Register _gce_, _gce:xxx network names"); + return new GceNameResolver(settings, new GceMetadataService(settings)); } @Override public List> getSettings() { return Arrays.asList( - // Register GCE settings - GceComputeService.PROJECT_SETTING, - GceComputeService.ZONE_SETTING, - GceUnicastHostsProvider.TAGS_SETTING, - GceComputeService.REFRESH_SETTING, - GceComputeService.RETRY_SETTING, - GceComputeService.MAX_WAIT_SETTING); + // Register GCE settings + GceInstancesService.PROJECT_SETTING, + GceInstancesService.ZONE_SETTING, + GceUnicastHostsProvider.TAGS_SETTING, + GceInstancesService.REFRESH_SETTING, + GceInstancesService.RETRY_SETTING, + GceInstancesService.MAX_WAIT_SETTING); } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java index 8787f5df791..ef308ac886b 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java @@ -23,14 +23,14 @@ import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpServer; import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsServer; -import org.elasticsearch.cloud.gce.GceComputeServiceImpl; +import org.elasticsearch.cloud.gce.GceInstancesServiceImpl; +import org.elasticsearch.cloud.gce.GceMetadataService; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugin.discovery.gce.GceDiscoveryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -67,8 +67,8 @@ public class GceDiscoverTests extends ESIntegTestCase { public static class TestPlugin extends Plugin { @Override public List> getSettings() { - return Arrays.asList(GceComputeServiceImpl.GCE_HOST, GceComputeServiceImpl.GCE_ROOT_URL, - GceComputeServiceImpl.GCE_VALIDATE_CERTIFICATES); + return Arrays.asList(GceMetadataService.GCE_HOST, GceInstancesServiceImpl.GCE_ROOT_URL, + GceInstancesServiceImpl.GCE_VALIDATE_CERTIFICATES); } } @@ -113,7 +113,7 @@ public class GceDiscoverTests extends ESIntegTestCase { httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0), 0); httpsServer.setHttpsConfigurator(new HttpsConfigurator(sslContext)); httpServer.createContext("/computeMetadata/v1/instance/service-accounts/default/token", (s) -> { - String response = GceComputeServiceMock.readGoogleInternalJsonResponse( + String response = GceMockUtils.readGoogleInternalJsonResponse( "http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token"); byte[] responseAsBytes = response.getBytes(StandardCharsets.UTF_8); s.sendResponseHeaders(200, responseAsBytes.length); diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index 92eb12a99b2..08e7ee963d3 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -20,8 +20,7 @@ package org.elasticsearch.discovery.gce; import org.elasticsearch.Version; -import org.elasticsearch.cloud.gce.GceComputeService; -import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cloud.gce.GceInstancesServiceImpl; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -34,6 +33,7 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; +import java.util.Collections; import java.util.List; import java.util.Locale; @@ -67,8 +67,7 @@ public class GceDiscoveryTests extends ESTestCase { protected static ThreadPool threadPool; protected MockTransportService transportService; - protected NetworkService networkService; - protected GceComputeService mock; + protected GceInstancesServiceMock mock; protected String projectName; @BeforeClass @@ -98,11 +97,6 @@ public class GceDiscoveryTests extends ESTestCase { transportService = MockTransportService.local(Settings.EMPTY, Version.CURRENT, threadPool); } - @Before - public void createNetworkService() { - networkService = new NetworkService(Settings.EMPTY); - } - @After public void stopGceComputeService() { if (mock != null) { @@ -110,9 +104,9 @@ public class GceDiscoveryTests extends ESTestCase { } } - protected List buildDynamicNodes(GceComputeService gceComputeService, Settings nodeSettings) { - GceUnicastHostsProvider provider = new GceUnicastHostsProvider(nodeSettings, gceComputeService, transportService, - new NetworkService(Settings.EMPTY)); + protected List buildDynamicNodes(GceInstancesServiceImpl gceInstancesService, Settings nodeSettings) { + GceUnicastHostsProvider provider = new GceUnicastHostsProvider(nodeSettings, gceInstancesService, + transportService, new NetworkService(Settings.EMPTY, Collections.emptyList())); List discoveryNodes = provider.buildDynamicNodes(); logger.info("--> nodes found: {}", discoveryNodes); @@ -121,21 +115,21 @@ public class GceDiscoveryTests extends ESTestCase { public void testNodesWithDifferentTagsAndNoTagSet() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(2)); } public void testNodesWithDifferentTagsAndOneTagSet() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(1)); assertThat(discoveryNodes.get(0).getId(), is("#cloud-test2-0")); @@ -143,11 +137,11 @@ public class GceDiscoveryTests extends ESTestCase { public void testNodesWithDifferentTagsAndTwoTagSet() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(1)); assertThat(discoveryNodes.get(0).getId(), is("#cloud-test2-0")); @@ -155,52 +149,52 @@ public class GceDiscoveryTests extends ESTestCase { public void testNodesWithSameTagsAndNoTagSet() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(2)); } public void testNodesWithSameTagsAndOneTagSet() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(2)); } public void testNodesWithSameTagsAndTwoTagsSet() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .put(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(2)); } public void testMultipleZonesAndTwoNodesInSameZone() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .putArray(GceComputeService.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(2)); } public void testMultipleZonesAndTwoNodesInDifferentZones() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .putArray(GceComputeService.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(2)); } @@ -210,17 +204,17 @@ public class GceDiscoveryTests extends ESTestCase { */ public void testZeroNode43() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .putArray(GceComputeService.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(0)); } public void testIllegalSettingsMissingAllRequired() { Settings nodeSettings = Settings.EMPTY; - mock = new GceComputeServiceMock(Settings.EMPTY, networkService); + mock = new GceInstancesServiceMock(nodeSettings); try { buildDynamicNodes(mock, nodeSettings); fail("We expect an IllegalArgumentException for incomplete settings"); @@ -231,9 +225,9 @@ public class GceDiscoveryTests extends ESTestCase { public void testIllegalSettingsMissingProject() { Settings nodeSettings = Settings.builder() - .putArray(GceComputeService.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") + .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); try { buildDynamicNodes(mock, nodeSettings); fail("We expect an IllegalArgumentException for incomplete settings"); @@ -244,9 +238,9 @@ public class GceDiscoveryTests extends ESTestCase { public void testIllegalSettingsMissingZone() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); try { buildDynamicNodes(mock, nodeSettings); fail("We expect an IllegalArgumentException for incomplete settings"); @@ -262,10 +256,10 @@ public class GceDiscoveryTests extends ESTestCase { */ public void testNoRegionReturnsEmptyList() { Settings nodeSettings = Settings.builder() - .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) - .putArray(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a") + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a") .build(); - mock = new GceComputeServiceMock(nodeSettings, networkService); + mock = new GceInstancesServiceMock(nodeSettings); List discoveryNodes = buildDynamicNodes(mock, nodeSettings); assertThat(discoveryNodes, hasSize(1)); } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceInstancesServiceMock.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceInstancesServiceMock.java new file mode 100644 index 00000000000..91a37a4d106 --- /dev/null +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceInstancesServiceMock.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.gce; + +import com.google.api.client.http.HttpTransport; +import org.elasticsearch.cloud.gce.GceInstancesServiceImpl; +import org.elasticsearch.common.settings.Settings; + +import java.io.IOException; +import java.security.GeneralSecurityException; + +/** + * + */ +public class GceInstancesServiceMock extends GceInstancesServiceImpl { + + protected HttpTransport mockHttpTransport; + + public GceInstancesServiceMock(Settings settings) { + super(settings); + this.mockHttpTransport = GceMockUtils.configureMock(); + } + + @Override + protected HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException { + return this.mockHttpTransport; + } +} diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMetadataServiceMock.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMetadataServiceMock.java new file mode 100644 index 00000000000..58c50f47a1f --- /dev/null +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMetadataServiceMock.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.gce; + +import com.google.api.client.http.HttpTransport; +import org.elasticsearch.cloud.gce.GceMetadataService; +import org.elasticsearch.common.settings.Settings; + +import java.io.IOException; +import java.security.GeneralSecurityException; + +/** + * Mock for GCE Metadata Service + */ +public class GceMetadataServiceMock extends GceMetadataService { + + protected HttpTransport mockHttpTransport; + + public GceMetadataServiceMock(Settings settings) { + super(settings); + this.mockHttpTransport = GceMockUtils.configureMock(); + } + + @Override + protected HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException { + return this.mockHttpTransport; + } +} diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceComputeServiceMock.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java similarity index 82% rename from plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceComputeServiceMock.java rename to plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java index 209657d89d4..88a6fbd9e92 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceComputeServiceMock.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java @@ -26,38 +26,22 @@ import com.google.api.client.json.Json; import com.google.api.client.testing.http.MockHttpTransport; import com.google.api.client.testing.http.MockLowLevelHttpRequest; import com.google.api.client.testing.http.MockLowLevelHttpResponse; -import org.elasticsearch.cloud.gce.GceComputeServiceImpl; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.util.Callback; import java.io.IOException; import java.io.InputStream; import java.net.URL; -import java.security.GeneralSecurityException; -/** - * - */ -public class GceComputeServiceMock extends GceComputeServiceImpl { - - protected HttpTransport mockHttpTransport; - - public GceComputeServiceMock(Settings settings, NetworkService networkService) { - super(settings, networkService); - this.mockHttpTransport = configureMock(); - } - - @Override - protected HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException { - return this.mockHttpTransport; - } +public class GceMockUtils { + protected static final ESLogger logger = Loggers.getLogger(GceMockUtils.class); public static final String GCE_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance"; - protected HttpTransport configureMock() { + protected static HttpTransport configureMock() { return new MockHttpTransport() { @Override public LowLevelHttpRequest buildRequest(String method, final String url) throws IOException { @@ -94,7 +78,7 @@ public class GceComputeServiceMock extends GceComputeServiceImpl { // We extract from the url the mock file path we want to use String mockFileName = Strings.replace(url, urlRoot, ""); - URL resource = GceComputeServiceMock.class.getResource(mockFileName); + URL resource = GceMockUtils.class.getResource(mockFileName); if (resource == null) { throw new IOException("can't read [" + url + "] in src/test/resources/org/elasticsearch/discovery/gce"); } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java index c09e51fe1ef..5b7b5e2e4d1 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java @@ -26,6 +26,8 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.net.InetAddress; +import java.util.Collections; +import java.util.List; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; @@ -79,7 +81,8 @@ public class GceNetworkTests extends ESTestCase { * network.host: _local_ */ public void networkHostCoreLocal() throws IOException { - resolveGce("_local_", new NetworkService(Settings.EMPTY).resolveBindHostAddresses(new String[] { NetworkService.DEFAULT_NETWORK_HOST })); + resolveGce("_local_", new NetworkService(Settings.EMPTY, Collections.emptyList()) + .resolveBindHostAddresses(new String[] { NetworkService.DEFAULT_NETWORK_HOST })); } /** @@ -103,9 +106,8 @@ public class GceNetworkTests extends ESTestCase { .put("network.host", gceNetworkSetting) .build(); - NetworkService networkService = new NetworkService(nodeSettings); - GceComputeServiceMock mock = new GceComputeServiceMock(nodeSettings, networkService); - networkService.addCustomNameResolver(new GceNameResolver(nodeSettings, mock)); + GceMetadataServiceMock mock = new GceMetadataServiceMock(nodeSettings); + NetworkService networkService = new NetworkService(nodeSettings, Collections.singletonList(new GceNameResolver(nodeSettings, mock))); try { InetAddress[] addresses = networkService.resolveBindHostAddresses(null); if (expected == null) { diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 23aa68e7f2d..a6610178ce8 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -51,10 +51,6 @@ dependencyLicenses { test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name - // this could be needed by AwsS3ServiceImplTests#testAWSCredentialsWithSystemProviders() - // As it's marked as Ignored for now, we can comment those - // systemProperty 'aws.accessKeyId', 'DUMMY_ACCESS_KEY' - // systemProperty 'aws.secretKey', 'DUMMY_SECRET_KEY' } thirdPartyAudit.excludes = [ diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index e1bce876c27..27053379db1 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -22,11 +22,8 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.AWSCredentialsProviderChain; import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.auth.EnvironmentVariableCredentialsProvider; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; -import com.amazonaws.auth.SystemPropertiesCredentialsProvider; +import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; @@ -36,7 +33,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; @@ -122,19 +118,12 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent implements public static AWSCredentialsProvider buildCredentials(ESLogger logger, String key, String secret) { AWSCredentialsProvider credentials; - if (key.isEmpty() && secret.isEmpty()) { logger.debug("Using either environment variables, system properties or instance profile credentials"); - credentials = new AWSCredentialsProviderChain( - new EnvironmentVariableCredentialsProvider(), - new SystemPropertiesCredentialsProvider(), - new InstanceProfileCredentialsProvider() - ); + credentials = new DefaultAWSCredentialsProviderChain(); } else { logger.debug("Using basic key/secret credentials"); - credentials = new AWSCredentialsProviderChain( - new StaticCredentialsProvider(new BasicAWSCredentials(key, secret)) - ); + credentials = new StaticCredentialsProvider(new BasicAWSCredentials(key, secret)); } return credentials; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AwsS3ServiceImplTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AwsS3ServiceImplTests.java index 788ea8b60ed..777bb5ff358 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AwsS3ServiceImplTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AwsS3ServiceImplTests.java @@ -23,22 +23,20 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.s3.S3Repository; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class AwsS3ServiceImplTests extends ESTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/19556") public void testAWSCredentialsWithSystemProviders() { AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, "", ""); - - AWSCredentials credentials = credentialsProvider.getCredentials(); - assertThat(credentials.getAWSAccessKeyId(), is("DUMMY_ACCESS_KEY")); - assertThat(credentials.getAWSSecretKey(), is("DUMMY_SECRET_KEY")); + assertThat(credentialsProvider, instanceOf(DefaultAWSCredentialsProviderChain.class)); } public void testAWSCredentialsWithElasticsearchAwsSettings() { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml index 199d4561d09..8c962407b30 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml @@ -39,7 +39,7 @@ max_docs: 1 - match: { old_index: logs-1 } - - match: { new_index: logs-2 } + - match: { new_index: logs-000002 } - match: { rolled_over: true } - match: { dry_run: false } - match: { conditions: { "[max_docs: 1]": true } } @@ -47,14 +47,14 @@ # ensure new index is created - do: indices.exists: - index: logs-2 + index: logs-000002 - is_true: '' # index into new index - do: index: - index: logs-2 + index: logs-000002 type: test id: "2" body: { "foo": "hello world" } @@ -69,5 +69,5 @@ type: test - match: { hits.total: 1 } - - match: { hits.hits.0._index: "logs-2"} + - match: { hits.hits.0._index: "logs-000002"} diff --git a/test/build.gradle b/test/build.gradle index a80ca59978c..594fa5bbb70 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -20,7 +20,7 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks subprojects { - // fixtures is just an intermediate parent project + // fixtures is just intermediate parent project if (name == 'fixtures') return group = 'org.elasticsearch.test' @@ -28,7 +28,7 @@ subprojects { apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' - + // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/package-info.java b/test/framework/src/main/java/org/elasticsearch/test/rest/package-info.java new file mode 100644 index 00000000000..f3f5cedd13f --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Infrastructure for testing REST. See {@link ESRestTestCase} for running tests against an Elasticsearch cluster using the REST tests and + * the rest of the package for mocking utilities. + */ +package org.elasticsearch.test.rest; \ No newline at end of file diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java similarity index 98% rename from test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestClient.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index c21e905ed5c..41ae7d8c04f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.yaml.client; +package org.elasticsearch.test.rest.yaml; import com.carrotsearch.randomizedtesting.RandomizedTest; @@ -34,7 +34,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index f99ee4be83f..43feb238cc7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -24,9 +24,6 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.test.rest.yaml.client.ClientYamlTestClient; -import org.elasticsearch.test.rest.yaml.client.ClientYamlTestResponse; -import org.elasticsearch.test.rest.yaml.client.ClientYamlTestResponseException; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java similarity index 96% rename from test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponse.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java index 9ccdd89592f..8f449274ea5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java @@ -16,14 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.yaml.client; +package org.elasticsearch.test.rest.yaml; import org.apache.http.client.methods.HttpHead; import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.test.rest.yaml.ObjectPath; -import org.elasticsearch.test.rest.yaml.Stash; import java.io.IOException; import java.nio.charset.StandardCharsets; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponseException.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponseException.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponseException.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponseException.java index 73719202c10..7d983d48029 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponseException.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponseException.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest.yaml.client; +package org.elasticsearch.test.rest.yaml; import org.elasticsearch.client.ResponseException; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index ae1853ccec3..aed60658a76 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -35,7 +35,6 @@ import org.elasticsearch.test.rest.yaml.section.DoSection; import org.elasticsearch.test.rest.yaml.section.ExecutableSection; import org.elasticsearch.test.rest.yaml.section.SkipSection; import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; -import org.elasticsearch.test.rest.yaml.support.FileUtils; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java similarity index 96% rename from test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/Features.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java index 8d7a5a58e16..df797dd53dd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/Features.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest.yaml.support; +package org.elasticsearch.test.rest.yaml; import org.elasticsearch.test.ESIntegTestCase; @@ -35,8 +35,8 @@ import static java.util.Collections.unmodifiableList; * and the related skip sections can be removed from the tests as well. */ public final class Features { - private static final List SUPPORTED = unmodifiableList(Arrays.asList( + "catch_unauthorized", "embedded_stash_key", "groovy_scripting", "headers", diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/FileUtils.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/FileUtils.java similarity index 99% rename from test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/FileUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/FileUtils.java index 783f0f9dcc1..caaa8b2ec83 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/FileUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/FileUtils.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.yaml.support; +package org.elasticsearch.test.rest.yaml; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/package-info.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/package-info.java new file mode 100644 index 00000000000..e0ab9232051 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Infrastructure to run suites of tests written in YAML against a running Elasticsearch cluster using Elasticsearch's low level REST + * client. The YAML tests are run by all official clients and serve as tests for both Elasticsearch and the clients. + */ +package org.elasticsearch.test.rest.yaml; \ No newline at end of file diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/package-info.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/package-info.java new file mode 100644 index 00000000000..de63b46eff3 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Parses YAML test {@link org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite}s containing + * {@link org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection}s. + */ +package org.elasticsearch.test.rest.yaml.parser; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java index d0cfc839fcc..9839bb089ae 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java @@ -20,7 +20,7 @@ package org.elasticsearch.test.rest.yaml.restspec; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.rest.yaml.support.FileUtils; +import org.elasticsearch.test.rest.yaml.FileUtils; import java.io.IOException; import java.io.InputStream; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/package-info.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/package-info.java new file mode 100644 index 00000000000..8951c80290f --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Specification of REST endpoints used to convert YAML {@code do} sections into actual calls to Elasticsearch. + */ +package org.elasticsearch.test.rest.yaml.restspec; \ No newline at end of file diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 78461130783..35507692a88 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -23,8 +23,8 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; -import org.elasticsearch.test.rest.yaml.client.ClientYamlTestResponse; -import org.elasticsearch.test.rest.yaml.client.ClientYamlTestResponseException; +import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; +import org.elasticsearch.test.rest.yaml.ClientYamlTestResponseException; import java.io.IOException; import java.util.HashMap; @@ -142,12 +142,17 @@ public class DoSection implements ExecutableSection { private static Map>> catches = new HashMap<>(); static { - catches.put("missing", tuple("404", equalTo(404))); - catches.put("conflict", tuple("409", equalTo(409))); + catches.put("unauthorized", tuple("401", equalTo(401))); catches.put("forbidden", tuple("403", equalTo(403))); + catches.put("missing", tuple("404", equalTo(404))); catches.put("request_timeout", tuple("408", equalTo(408))); + catches.put("conflict", tuple("409", equalTo(409))); catches.put("unavailable", tuple("503", equalTo(503))); - catches.put("request", tuple("4xx|5xx", - allOf(greaterThanOrEqualTo(400), not(equalTo(404)), not(equalTo(408)), not(equalTo(409)), not(equalTo(403))))); + catches.put("request", tuple("4xx|5xx", allOf(greaterThanOrEqualTo(400), + not(equalTo(401)), + not(equalTo(403)), + not(equalTo(404)), + not(equalTo(408)), + not(equalTo(409))))); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java index c9b6ead4aa8..3ce952b94d3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java @@ -20,7 +20,7 @@ package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.Version; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.rest.yaml.support.Features; +import org.elasticsearch.test.rest.yaml.Features; import java.util.ArrayList; import java.util.List; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/package-info.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/package-info.java new file mode 100644 index 00000000000..fe6a53ff93a --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Sections within the YAML tests that are executed to run the tests. + */ +package org.elasticsearch.test.rest.yaml.section; \ No newline at end of file diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/support/FileUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/FileUtilsTests.java similarity index 98% rename from test/framework/src/test/java/org/elasticsearch/test/rest/yaml/support/FileUtilsTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/FileUtilsTests.java index c02ca5d85d0..4387bf164fa 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/support/FileUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/FileUtilsTests.java @@ -16,10 +16,10 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.yaml.support; +package org.elasticsearch.test.rest.yaml; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.yaml.support.FileUtils; +import org.elasticsearch.test.rest.yaml.FileUtils; import java.nio.file.Files; import java.nio.file.Path; diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java index f94bd11b00c..6cec62cb710 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java @@ -26,12 +26,14 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.transport.MockTransportService; +import java.util.Collections; + public class MockTcpTransportTests extends AbstractSimpleTransportTestCase { @Override protected MockTransportService build(Settings settings, Version version) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); Transport transport = new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, - new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(settings), version); + new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(settings, Collections.emptyList()), version); MockTransportService mockTransportService = new MockTransportService(Settings.EMPTY, transport, threadPool); mockTransportService.start(); return mockTransportService;