Merge branch 'master' into feature/rank-eval

This commit is contained in:
Christoph Büscher 2016-07-29 12:13:15 +02:00
commit f31fecc5a4
217 changed files with 7074 additions and 5107 deletions

View File

@ -35,7 +35,7 @@ Use local transport (default since 1.3):
=== Running Elasticsearch from a checkout
In order to run Elasticsearch from source without building a package, you can
run it using Maven:
run it using Gradle:
-------------------------------------
gradle run
@ -416,8 +416,8 @@ that'd consume a ton of ram.
== Testing scripts more directly
In general its best to stick to testing in vagrant because the bats scripts are
destructive. When working with a single package its generally faster to run its
tests in a tighter loop than maven provides. In one window:
destructive. When working with a single package it's generally faster to run its
tests in a tighter loop than gradle provides. In one window:
--------------------------------
gradle :distribution:rpm:assemble

View File

@ -96,6 +96,10 @@ public final class Response {
return response.getEntity();
}
HttpResponse getHttpResponse() {
return response;
}
@Override
public String toString() {
return "Response{" +

View File

@ -19,6 +19,10 @@
package org.elasticsearch.client;
import org.apache.http.HttpEntity;
import org.apache.http.entity.BufferedHttpEntity;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
/**
@ -34,9 +38,19 @@ public final class ResponseException extends IOException {
this.response = response;
}
private static String buildMessage(Response response) {
return response.getRequestLine().getMethod() + " " + response.getHost() + response.getRequestLine().getUri()
private static String buildMessage(Response response) throws IOException {
String message = response.getRequestLine().getMethod() + " " + response.getHost() + response.getRequestLine().getUri()
+ ": " + response.getStatusLine().toString();
HttpEntity entity = response.getEntity();
if (entity != null) {
if (entity.isRepeatable() == false) {
entity = new BufferedHttpEntity(entity);
response.getHttpResponse().setEntity(entity);
}
message += "\n" + EntityUtils.toString(entity);
}
return message;
}
/**

View File

@ -149,6 +149,7 @@ public class RequestLoggerTests extends RestClientTestCase {
if (getRandom().nextBoolean()) {
entity = new StringEntity(responseBody, StandardCharsets.UTF_8);
} else {
//test a non repeatable entity
entity = new InputStreamEntity(new ByteArrayInputStream(responseBody.getBytes(StandardCharsets.UTF_8)));
}
httpResponse.setEntity(entity);

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.ProtocolVersion;
import org.apache.http.RequestLine;
import org.apache.http.StatusLine;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicHttpResponse;
import org.apache.http.message.BasicRequestLine;
import org.apache.http.message.BasicStatusLine;
import org.apache.http.util.EntityUtils;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
public class ResponseExceptionTests extends RestClientTestCase {
public void testResponseException() throws IOException {
ProtocolVersion protocolVersion = new ProtocolVersion("http", 1, 1);
StatusLine statusLine = new BasicStatusLine(protocolVersion, 500, "Internal Server Error");
HttpResponse httpResponse = new BasicHttpResponse(statusLine);
String responseBody = "{\"error\":{\"root_cause\": {}}}";
boolean hasBody = getRandom().nextBoolean();
if (hasBody) {
HttpEntity entity;
if (getRandom().nextBoolean()) {
entity = new StringEntity(responseBody, StandardCharsets.UTF_8);
} else {
//test a non repeatable entity
entity = new InputStreamEntity(new ByteArrayInputStream(responseBody.getBytes(StandardCharsets.UTF_8)));
}
httpResponse.setEntity(entity);
}
RequestLine requestLine = new BasicRequestLine("GET", "/", protocolVersion);
HttpHost httpHost = new HttpHost("localhost", 9200);
Response response = new Response(requestLine, httpHost, httpResponse);
ResponseException responseException = new ResponseException(response);
assertSame(response, responseException.getResponse());
if (hasBody) {
assertEquals(responseBody, EntityUtils.toString(responseException.getResponse().getEntity()));
} else {
assertNull(responseException.getResponse().getEntity());
}
String message = response.getRequestLine().getMethod() + " " + response.getHost() + response.getRequestLine().getUri()
+ ": " + response.getStatusLine().toString();
if (hasBody) {
message += "\n" + responseBody;
}
assertEquals(message, responseException.getMessage());
}
}

View File

@ -692,8 +692,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
NO_LONGER_PRIMARY_SHARD_EXCEPTION(ShardStateAction.NoLongerPrimaryShardException.class,
ShardStateAction.NoLongerPrimaryShardException::new, 142),
SCRIPT_EXCEPTION(org.elasticsearch.script.ScriptException.class, org.elasticsearch.script.ScriptException::new, 143),
NOT_MASTER_EXCEPTION(org.elasticsearch.cluster.NotMasterException.class, org.elasticsearch.cluster.NotMasterException::new, 144);
NOT_MASTER_EXCEPTION(org.elasticsearch.cluster.NotMasterException.class, org.elasticsearch.cluster.NotMasterException::new, 144),
STATUS_EXCEPTION(org.elasticsearch.ElasticsearchStatusException.class, org.elasticsearch.ElasticsearchStatusException::new, 145);
final Class<? extends ElasticsearchException> exceptionClass;
final FunctionThatThrowsIOException<StreamInput, ? extends ElasticsearchException> constructor;

View File

@ -19,7 +19,6 @@
package org.elasticsearch;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
@ -27,40 +26,39 @@ import java.io.IOException;
/**
* Generic security exception
*/
public class ElasticsearchSecurityException extends ElasticsearchException {
private final RestStatus status;
public class ElasticsearchSecurityException extends ElasticsearchStatusException {
/**
* Build the exception with a specific status and cause.
*/
public ElasticsearchSecurityException(String msg, RestStatus status, Throwable cause, Object... args) {
super(msg, cause, args);
this.status = status ;
super(msg, status, cause, args);
}
/**
* Build the exception with the status derived from the cause.
*/
public ElasticsearchSecurityException(String msg, Exception cause, Object... args) {
this(msg, ExceptionsHelper.status(cause), cause, args);
}
/**
* Build the exception with a status of {@link RestStatus#INTERNAL_SERVER_ERROR} without a cause.
*/
public ElasticsearchSecurityException(String msg, Object... args) {
this(msg, RestStatus.INTERNAL_SERVER_ERROR, null, args);
this(msg, RestStatus.INTERNAL_SERVER_ERROR, args);
}
/**
* Build the exception without a cause.
*/
public ElasticsearchSecurityException(String msg, RestStatus status, Object... args) {
this(msg, status, null, args);
super(msg, status, args);
}
/**
* Read from a stream.
*/
public ElasticsearchSecurityException(StreamInput in) throws IOException {
super(in);
status = RestStatus.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
RestStatus.writeTo(out, status);
}
@Override
public final RestStatus status() {
return status;
}
}

View File

@ -0,0 +1,68 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
/**
* Exception who's {@link RestStatus} is arbitrary rather than derived. Used, for example, by reindex-from-remote to wrap remote exceptions
* that contain a status.
*/
public class ElasticsearchStatusException extends ElasticsearchException {
private final RestStatus status;
/**
* Build the exception with a specific status and cause.
*/
public ElasticsearchStatusException(String msg, RestStatus status, Throwable cause, Object... args) {
super(msg, cause, args);
this.status = status;
}
/**
* Build the exception without a cause.
*/
public ElasticsearchStatusException(String msg, RestStatus status, Object... args) {
this(msg, status, null, args);
}
/**
* Read from a stream.
*/
public ElasticsearchStatusException(StreamInput in) throws IOException {
super(in);
status = RestStatus.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
RestStatus.writeTo(out, status);
}
@Override
public final RestStatus status() {
return status;
}
}

View File

@ -47,6 +47,7 @@ import org.elasticsearch.index.shard.DocsStats;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.Locale;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@ -171,7 +172,7 @@ public class TransportRolloverAction extends TransportMasterNodeAction<RolloverR
int numberIndex = sourceIndexName.lastIndexOf("-");
assert numberIndex != -1 : "no separator '-' found";
int counter = Integer.parseInt(sourceIndexName.substring(numberIndex + 1));
return String.join("-", sourceIndexName.substring(0, numberIndex), String.valueOf(++counter));
return String.join("-", sourceIndexName.substring(0, numberIndex), String.format(Locale.ROOT, "%06d", ++counter));
} else {
throw new IllegalArgumentException("index name [" + sourceIndexName + "] does not match pattern '^.*-(\\d)+$'");
}

View File

@ -81,7 +81,7 @@ public class SimulatePipelineRequest extends ActionRequest<SimulatePipelineReque
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
id = in.readString();
id = in.readOptionalString();
verbose = in.readBoolean();
source = in.readBytesReference();
}
@ -89,7 +89,7 @@ public class SimulatePipelineRequest extends ActionRequest<SimulatePipelineReque
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(id);
out.writeOptionalString(id);
out.writeBoolean(verbose);
out.writeBytesReference(source);
}

View File

@ -60,7 +60,7 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(pipelineId);
out.writeOptionalString(pipelineId);
out.writeBoolean(verbose);
out.writeVInt(results.size());
for (SimulateDocumentResult response : results) {
@ -71,7 +71,7 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
this.pipelineId = in.readString();
this.pipelineId = in.readOptionalString();
boolean verbose = in.readBoolean();
int responsesLength = in.readVInt();
results = new ArrayList<>();

View File

@ -207,7 +207,9 @@ public abstract class TransportWriteAction<
if (location != null) {
pendingOps = true;
indexShard.addRefreshListener(location, forcedRefresh -> {
if (forcedRefresh) {
logger.warn("block_until_refresh request ran out of slots and forced a refresh: [{}]", request);
}
respond.respondAfterAsyncAction(forcedRefresh);
});
}

View File

@ -108,7 +108,7 @@ public abstract class TransportClient extends AbstractClient {
final List<Closeable> resourcesToClose = new ArrayList<>();
final ThreadPool threadPool = new ThreadPool(settings);
resourcesToClose.add(() -> ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS));
final NetworkService networkService = new NetworkService(settings);
final NetworkService networkService = new NetworkService(settings, Collections.emptyList());
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry();
try {
final List<Setting<?>> additionalSettings = new ArrayList<>();

View File

@ -26,7 +26,8 @@ import java.util.HashSet;
import java.util.Set;
/**
* Holds a field that can be found in a request while parsing and its different variants, which may be deprecated.
* Holds a field that can be found in a request while parsing and its different
* variants, which may be deprecated.
*/
public class ParseField {
@ -37,6 +38,14 @@ public class ParseField {
private String allReplacedWith = null;
private final String[] allNames;
/**
* @param name
* the primary name for this field. This will be returned by
* {@link #getPreferredName()}
* @param deprecatedNames
* names for this field which are deprecated and will not be
* accepted when strict matching is used.
*/
public ParseField(String name, String... deprecatedNames) {
this.name = name;
if (deprecatedNames == null || deprecatedNames.length == 0) {
@ -52,20 +61,35 @@ public class ParseField {
this.allNames = allNames.toArray(new String[allNames.size()]);
}
public String getPreferredName(){
/**
* @return the preferred name used for this field
*/
public String getPreferredName() {
return name;
}
/**
* @return All names for this field regardless of whether they are
* deprecated
*/
public String[] getAllNamesIncludedDeprecated() {
return allNames;
}
/**
* @param deprecatedNames
* deprecated names to include with the returned
* {@link ParseField}
* @return a new {@link ParseField} using the preferred name from this one
* but with the specified deprecated names
*/
public ParseField withDeprecation(String... deprecatedNames) {
return new ParseField(this.name, deprecatedNames);
}
/**
* Return a new ParseField where all field names are deprecated and replaced with {@code allReplacedWith}.
* Return a new ParseField where all field names are deprecated and replaced
* with {@code allReplacedWith}.
*/
public ParseField withAllDeprecated(String allReplacedWith) {
ParseField parseField = this.withDeprecation(getAllNamesIncludedDeprecated());
@ -73,16 +97,34 @@ public class ParseField {
return parseField;
}
boolean match(String currentFieldName, boolean strict) {
if (allReplacedWith == null && currentFieldName.equals(name)) {
/**
* @param fieldName
* the field name to match against this {@link ParseField}
* @param strict
* if true an exception will be thrown if a deprecated field name
* is given. If false the deprecated name will be matched but a
* message will also be logged to the {@link DeprecationLogger}
* @return true if <code>fieldName</code> matches any of the acceptable
* names for this {@link ParseField}.
*/
boolean match(String fieldName, boolean strict) {
// if this parse field has not been completely deprecated then try to
// match the preferred name
if (allReplacedWith == null && fieldName.equals(name)) {
return true;
}
// Now try to match against one of the deprecated names. Note that if
// the parse field is entirely deprecated (allReplacedWith != null) all
// fields will be in the deprecatedNames array
String msg;
for (String depName : deprecatedNames) {
if (currentFieldName.equals(depName)) {
msg = "Deprecated field [" + currentFieldName + "] used, expected [" + name + "] instead";
if (fieldName.equals(depName)) {
msg = "Deprecated field [" + fieldName + "] used, expected [" + name + "] instead";
if (allReplacedWith != null) {
msg = "Deprecated field [" + currentFieldName + "] used, replaced by [" + allReplacedWith + "]";
// If the field is entirely deprecated then there is no
// preferred name so instead use the `allReplaceWith`
// message to indicate what should be used instead
msg = "Deprecated field [" + fieldName + "] used, replaced by [" + allReplacedWith + "]";
}
if (strict) {
throw new IllegalArgumentException(msg);
@ -100,10 +142,20 @@ public class ParseField {
return getPreferredName();
}
/**
* @return the message to use if this {@link ParseField} has been entirely
* deprecated in favor of something else. This method will return
* <code>null</code> if the ParseField has not been completely
* deprecated.
*/
public String getAllReplacedWith() {
return allReplacedWith;
}
/**
* @return an array of the names for the {@link ParseField} which are
* deprecated.
*/
public String[] getDeprecatedNames() {
return deprecatedNames;
}

View File

@ -37,6 +37,8 @@ import org.elasticsearch.common.lucene.Lucene;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
@ -224,17 +226,23 @@ public class FiltersFunctionScoreQuery extends Query {
filterExplanations.add(filterExplanation);
}
}
if (filterExplanations.size() > 0) {
FiltersFunctionFactorScorer scorer = functionScorer(context);
int actualDoc = scorer.iterator().advance(doc);
assert (actualDoc == doc);
double score = scorer.computeScore(doc, expl.getValue());
Explanation factorExplanation = Explanation.match(
Explanation factorExplanation;
if (filterExplanations.size() > 0) {
factorExplanation = Explanation.match(
CombineFunction.toFloat(score),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]",
filterExplanations);
expl = combineFunction.explain(expl, factorExplanation, maxBoost);
} else {
// it is a little weird to add a match although no function matches but that is the way function_score behaves right now
factorExplanation = Explanation.match(1.0f,
"No function matched", Collections.emptyList());
}
expl = combineFunction.explain(expl, factorExplanation, maxBoost);
if (minScore != null && minScore > expl.getValue()) {
expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl);
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.common.network;
import org.elasticsearch.action.support.replication.ReplicationTask;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand;
@ -75,13 +74,13 @@ public class NetworkModule extends AbstractModule {
/**
* Creates a network module that custom networking classes can be plugged into.
*
* @param networkService A constructed network service object to bind.
* @param settings The settings for the node
* @param transportClient True if only transport classes should be allowed to be registered, false otherwise.
* @param namedWriteableRegistry registry for named writeables for use during streaming
*/
public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient, NamedWriteableRegistry namedWriteableRegistry) {
public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient,
NamedWriteableRegistry namedWriteableRegistry) {
this.networkService = networkService;
this.settings = settings;
this.transportClient = transportClient;

View File

@ -33,7 +33,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
@ -90,18 +89,12 @@ public class NetworkService extends AbstractComponent {
InetAddress[] resolveIfPossible(String value) throws IOException;
}
private final List<CustomNameResolver> customNameResolvers = new CopyOnWriteArrayList<>();
private final List<CustomNameResolver> customNameResolvers;
public NetworkService(Settings settings) {
public NetworkService(Settings settings, List<CustomNameResolver> customNameResolvers) {
super(settings);
IfConfig.logIfNecessary();
}
/**
* Add a custom name resolver.
*/
public void addCustomNameResolver(CustomNameResolver customNameResolver) {
customNameResolvers.add(customNameResolver);
this.customNameResolvers = customNameResolvers;
}
/**
@ -120,13 +113,15 @@ public class NetworkService extends AbstractComponent {
// if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline
bindHosts = GLOBAL_NETWORK_BINDHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
} else {
// next check any registered custom resolvers
// next check any registered custom resolvers if any
if (customNameResolvers != null) {
for (CustomNameResolver customNameResolver : customNameResolvers) {
InetAddress addresses[] = customNameResolver.resolveDefault();
if (addresses != null) {
return addresses;
}
}
}
// we know it's not here. get the defaults
bindHosts = GLOBAL_NETWORK_BINDHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
}
@ -166,13 +161,15 @@ public class NetworkService extends AbstractComponent {
// if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline
publishHosts = GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
} else {
// next check any registered custom resolvers
// next check any registered custom resolvers if any
if (customNameResolvers != null) {
for (CustomNameResolver customNameResolver : customNameResolvers) {
InetAddress addresses[] = customNameResolver.resolveDefault();
if (addresses != null) {
return addresses[0];
}
}
}
// we know it's not here. get the defaults
publishHosts = GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
}
@ -229,13 +226,15 @@ public class NetworkService extends AbstractComponent {
private InetAddress[] resolveInternal(String host) throws IOException {
if ((host.startsWith("#") && host.endsWith("#")) || (host.startsWith("_") && host.endsWith("_"))) {
host = host.substring(1, host.length() - 1);
// allow custom resolvers to have special names
// next check any registered custom resolvers if any
if (customNameResolvers != null) {
for (CustomNameResolver customNameResolver : customNameResolvers) {
InetAddress addresses[] = customNameResolver.resolveIfPossible(host);
if (addresses != null) {
return addresses;
}
}
}
switch (host) {
case "local":
return NetworkUtils.getLoopbackAddresses();

View File

@ -326,7 +326,10 @@ public class TimeValue implements Writeable {
return new TimeValue(parse(sValue, normalized, 2), TimeUnit.MILLISECONDS);
} else if (normalized.endsWith("s")) {
return new TimeValue(parse(sValue, normalized, 1), TimeUnit.SECONDS);
} else if (normalized.endsWith("m")) {
} else if (sValue.endsWith("m")) {
// parsing minutes should be case sensitive as `M` is generally
// accepted to mean months not minutes. This is the only case where
// the upper and lower case forms indicate different time units
return new TimeValue(parse(sValue, normalized, 1), TimeUnit.MINUTES);
} else if (normalized.endsWith("h")) {
return new TimeValue(parse(sValue, normalized, 1), TimeUnit.HOURS);

View File

@ -92,6 +92,7 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.DiscoveryPlugin;
import org.elasticsearch.plugins.IngestPlugin;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
@ -294,7 +295,8 @@ public class Node implements Closeable {
// so we might be late here already
final SettingsModule settingsModule = new SettingsModule(this.settings, additionalSettings, additionalSettingsFilter);
resourcesToClose.add(resourceWatcherService);
final NetworkService networkService = new NetworkService(settings);
final NetworkService networkService = new NetworkService(settings,
getCustomNameResolvers(pluginsService.filterPlugins(DiscoveryPlugin.class)));
final ClusterService clusterService = new ClusterService(settings, settingsModule.getClusterSettings(), threadPool);
clusterService.add(scriptModule.getScriptService());
resourcesToClose.add(clusterService);
@ -721,4 +723,19 @@ public class Node implements Closeable {
BigArrays createBigArrays(Settings settings, CircuitBreakerService circuitBreakerService) {
return new BigArrays(settings, circuitBreakerService);
}
/**
* Get Custom Name Resolvers list based on a Discovery Plugins list
* @param discoveryPlugins Discovery plugins list
*/
private List<NetworkService.CustomNameResolver> getCustomNameResolvers(List<DiscoveryPlugin> discoveryPlugins) {
List<NetworkService.CustomNameResolver> customNameResolvers = new ArrayList<>();
for (DiscoveryPlugin discoveryPlugin : discoveryPlugins) {
NetworkService.CustomNameResolver customNameResolver = discoveryPlugin.getCustomNameResolver(settings);
if (customNameResolver != null) {
customNameResolvers.add(customNameResolver);
}
}
return customNameResolvers;
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugins;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
/**
* An additional extension point for {@link Plugin}s that extends Elasticsearch's discovery functionality. To add an additional
* {@link NetworkService.CustomNameResolver} just implement the interface and implement the {@link #getCustomNameResolver(Settings)} method:
*
* <pre>{@code
* public class MyDiscoveryPlugin extends Plugin implements DiscoveryPlugin {
* &#64;Override
* public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) {
* return new YourCustomNameResolverInstance(settings);
* }
* }
* }</pre>
*/
public interface DiscoveryPlugin {
/**
* Override to add additional {@link NetworkService.CustomNameResolver}s.
* This can be handy if you want to provide your own Network interface name like _mycard_
* and implement by yourself the logic to get an actual IP address/hostname based on this
* name.
*
* For example: you could call a third party service (an API) to resolve _mycard_.
* Then you could define in elasticsearch.yml settings like:
*
* <pre>{@code
* network.host: _mycard_
* }</pre>
*/
default NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) {
return null;
}
}

View File

@ -24,6 +24,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.unmodifiableMap;
public enum RestStatus {
/**
@ -477,6 +481,15 @@ public enum RestStatus {
*/
INSUFFICIENT_STORAGE(506);
private static final Map<Integer, RestStatus> CODE_TO_STATUS;
static {
RestStatus[] values = values();
Map<Integer, RestStatus> codeToStatus = new HashMap<>(values.length);
for (RestStatus value : values) {
codeToStatus.put(value.status, value);
}
CODE_TO_STATUS = unmodifiableMap(codeToStatus);
}
private int status;
@ -515,4 +528,11 @@ public enum RestStatus {
}
return status;
}
/**
* Turn a status code into a {@link RestStatus}, returning null if we don't know that status.
*/
public static RestStatus fromCode(int code) {
return CODE_TO_STATUS.get(code);
}
}

View File

@ -37,8 +37,10 @@ public class RestStatusToXContentListener<Response extends StatusToXContent> ext
* Build an instance that doesn't support responses with the status {@code 201 CREATED}.
*/
public RestStatusToXContentListener(RestChannel channel) {
// TODO switch this to throwing an exception?
this(channel, r -> null);
this(channel, r -> {
assert false: "Returned a 201 CREATED but not set up to support a Location header";
return null;
});
}
/**

View File

@ -487,7 +487,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
}
}
private class ScriptChangesListener extends FileChangesListener {
private class ScriptChangesListener implements FileChangesListener {
private Tuple<String, String> getScriptNameExt(Path file) {
Path scriptPath = scriptsDirectory.relativize(file);

View File

@ -21,7 +21,9 @@ package org.elasticsearch.search.aggregations.bucket;
/**
* Helper functions for common Bucketing functions
*/
public class BucketUtils {
public final class BucketUtils {
private BucketUtils() {}
/**
* Heuristic used to determine the size of shard-side PriorityQueues when
@ -34,16 +36,22 @@ public class BucketUtils {
* @return A suggested default for the size of any shard-side PriorityQueues
*/
public static int suggestShardSideQueueSize(int finalSize, int numberOfShards) {
assert numberOfShards >= 1;
if (finalSize < 1) {
throw new IllegalArgumentException("size must be positive, got " + finalSize);
}
if (numberOfShards < 1) {
throw new IllegalArgumentException("number of shards must be positive, got " + numberOfShards);
}
if (numberOfShards == 1) {
// In the case of a single shard, we do not need to over-request
return finalSize;
}
//Cap the multiplier used for shards to avoid excessive data transfer
final long shardSampleSize = (long) finalSize * Math.min(10, numberOfShards);
// When finalSize is very small e.g. 1 and there is a low number of
// shards then we need to ensure we still gather a reasonable sample of statistics from each
// shard (at low cost) to improve the chances of the final result being accurate.
return (int) Math.min(Integer.MAX_VALUE, Math.max(10, shardSampleSize));
// Request 50% more buckets on the shards in order to improve accuracy
// as well as a small constant that should help with small values of 'size'
final long shardSampleSize = (long) (finalSize * 1.5 + 10);
return (int) Math.min(Integer.MAX_VALUE, shardSampleSize);
}
}

View File

@ -91,7 +91,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
public static final ParseField SORT_FIELD = new ParseField("sort");
public static final ParseField TRACK_SCORES_FIELD = new ParseField("track_scores");
public static final ParseField INDICES_BOOST_FIELD = new ParseField("indices_boost");
public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations", "aggs");
public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations");
public static final ParseField AGGS_FIELD = new ParseField("aggs");
public static final ParseField HIGHLIGHT_FIELD = new ParseField("highlight");
public static final ParseField SUGGEST_FIELD = new ParseField("suggest");
public static final ParseField RESCORE_FIELD = new ParseField("rescore");
@ -998,7 +999,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
scriptFields.add(new ScriptField(context));
}
} else if (context.getParseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) {
indexBoost = new ObjectFloatHashMap<String>();
indexBoost = new ObjectFloatHashMap<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
@ -1009,7 +1010,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
parser.getTokenLocation());
}
}
} else if (context.getParseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD)) {
} else if (context.getParseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD)
|| context.getParseFieldMatcher().match(currentFieldName, AGGS_FIELD)) {
aggregations = aggParsers.parseAggregators(context);
} else if (context.getParseFieldMatcher().match(currentFieldName, HIGHLIGHT_FIELD)) {
highlightBuilder = HighlightBuilder.fromXContent(context);

View File

@ -23,53 +23,39 @@ import java.nio.file.Path;
/**
* Callback interface that file changes File Watcher is using to notify listeners about changes.
*/
public class FileChangesListener {
public interface FileChangesListener {
/**
* Called for every file found in the watched directory during initialization
*/
public void onFileInit(Path file) {
}
default void onFileInit(Path file) {}
/**
* Called for every subdirectory found in the watched directory during initialization
*/
public void onDirectoryInit(Path file) {
}
default void onDirectoryInit(Path file) {}
/**
* Called for every new file found in the watched directory
*/
public void onFileCreated(Path file) {
}
default void onFileCreated(Path file) {}
/**
* Called for every file that disappeared in the watched directory
*/
public void onFileDeleted(Path file) {
}
default void onFileDeleted(Path file) {}
/**
* Called for every file that was changed in the watched directory
*/
public void onFileChanged(Path file) {
}
default void onFileChanged(Path file) {}
/**
* Called for every new subdirectory found in the watched directory
*/
public void onDirectoryCreated(Path file) {
}
default void onDirectoryCreated(Path file) {}
/**
* Called for every file that disappeared in the watched directory
*/
public void onDirectoryDeleted(Path file) {
}
default void onDirectoryDeleted(Path file) {}
}

View File

@ -792,6 +792,7 @@ public class ExceptionSerializationTests extends ESTestCase {
ids.put(142, ShardStateAction.NoLongerPrimaryShardException.class);
ids.put(143, org.elasticsearch.script.ScriptException.class);
ids.put(144, org.elasticsearch.cluster.NotMasterException.class);
ids.put(145, org.elasticsearch.ElasticsearchStatusException.class);
Map<Class<? extends ElasticsearchException>, Integer> reverse = new HashMap<>();
for (Map.Entry<Integer, Class<? extends ElasticsearchException>> entry : ids.entrySet()) {
@ -842,4 +843,11 @@ public class ExceptionSerializationTests extends ESTestCase {
}
}
}
public void testElasticsearchRemoteException() throws IOException {
ElasticsearchStatusException ex = new ElasticsearchStatusException("something", RestStatus.TOO_MANY_REQUESTS);
ElasticsearchStatusException e = serialize(ex);
assertEquals(ex.status(), e.status());
assertEquals(RestStatus.TOO_MANY_REQUESTS, e.status());
}
}

View File

@ -45,12 +45,12 @@ import org.elasticsearch.test.rest.FakeRestRequest;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableList;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;

View File

@ -39,14 +39,14 @@ public class RolloverIT extends ESIntegTestCase {
assertAcked(prepareCreate("test_index-1").addAlias(new Alias("test_alias")).get());
final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get();
assertThat(response.getOldIndex(), equalTo("test_index-1"));
assertThat(response.getNewIndex(), equalTo("test_index-2"));
assertThat(response.getNewIndex(), equalTo("test_index-000002"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
final ClusterState state = client().admin().cluster().prepareState().get().getState();
final IndexMetaData oldIndex = state.metaData().index("test_index-1");
assertFalse(oldIndex.getAliases().containsKey("test_alias"));
final IndexMetaData newIndex = state.metaData().index("test_index-2");
final IndexMetaData newIndex = state.metaData().index("test_index-000002");
assertTrue(newIndex.getAliases().containsKey("test_alias"));
}
@ -56,14 +56,14 @@ public class RolloverIT extends ESIntegTestCase {
flush("test_index-2");
final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get();
assertThat(response.getOldIndex(), equalTo("test_index-2"));
assertThat(response.getNewIndex(), equalTo("test_index-3"));
assertThat(response.getNewIndex(), equalTo("test_index-000003"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
final ClusterState state = client().admin().cluster().prepareState().get().getState();
final IndexMetaData oldIndex = state.metaData().index("test_index-2");
assertFalse(oldIndex.getAliases().containsKey("test_alias"));
final IndexMetaData newIndex = state.metaData().index("test_index-3");
final IndexMetaData newIndex = state.metaData().index("test_index-000003");
assertTrue(newIndex.getAliases().containsKey("test_alias"));
}
@ -78,14 +78,14 @@ public class RolloverIT extends ESIntegTestCase {
final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias")
.settings(settings).alias(new Alias("extra_alias")).get();
assertThat(response.getOldIndex(), equalTo("test_index-2"));
assertThat(response.getNewIndex(), equalTo("test_index-3"));
assertThat(response.getNewIndex(), equalTo("test_index-000003"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
final ClusterState state = client().admin().cluster().prepareState().get().getState();
final IndexMetaData oldIndex = state.metaData().index("test_index-2");
assertFalse(oldIndex.getAliases().containsKey("test_alias"));
final IndexMetaData newIndex = state.metaData().index("test_index-3");
final IndexMetaData newIndex = state.metaData().index("test_index-000003");
assertThat(newIndex.getNumberOfShards(), equalTo(1));
assertThat(newIndex.getNumberOfReplicas(), equalTo(0));
assertTrue(newIndex.getAliases().containsKey("test_alias"));
@ -98,14 +98,14 @@ public class RolloverIT extends ESIntegTestCase {
flush("test_index-1");
final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").dryRun(true).get();
assertThat(response.getOldIndex(), equalTo("test_index-1"));
assertThat(response.getNewIndex(), equalTo("test_index-2"));
assertThat(response.getNewIndex(), equalTo("test_index-000002"));
assertThat(response.isDryRun(), equalTo(true));
assertThat(response.isRolledOver(), equalTo(false));
assertThat(response.getConditionStatus().size(), equalTo(0));
final ClusterState state = client().admin().cluster().prepareState().get().getState();
final IndexMetaData oldIndex = state.metaData().index("test_index-1");
assertTrue(oldIndex.getAliases().containsKey("test_alias"));
final IndexMetaData newIndex = state.metaData().index("test_index-2");
final IndexMetaData newIndex = state.metaData().index("test_index-000002");
assertNull(newIndex);
}
@ -126,7 +126,7 @@ public class RolloverIT extends ESIntegTestCase {
final ClusterState state = client().admin().cluster().prepareState().get().getState();
final IndexMetaData oldIndex = state.metaData().index("test_index-0");
assertTrue(oldIndex.getAliases().containsKey("test_alias"));
final IndexMetaData newIndex = state.metaData().index("test_index-1");
final IndexMetaData newIndex = state.metaData().index("test_index-000001");
assertNull(newIndex);
}
@ -151,14 +151,14 @@ public class RolloverIT extends ESIntegTestCase {
public void testRolloverOnExistingIndex() throws Exception {
assertAcked(prepareCreate("test_index-0").addAlias(new Alias("test_alias")).get());
index("test_index-0", "type1", "1", "field", "value");
assertAcked(prepareCreate("test_index-1").get());
index("test_index-1", "type1", "1", "field", "value");
flush("test_index-0", "test_index-1");
assertAcked(prepareCreate("test_index-000001").get());
index("test_index-000001", "type1", "1", "field", "value");
flush("test_index-0", "test_index-000001");
try {
client().admin().indices().prepareRolloverIndex("test_alias").get();
fail("expected failure due to existing rollover index");
} catch (IndexAlreadyExistsException e) {
assertThat(e.getIndex().getName(), equalTo("test_index-1"));
assertThat(e.getIndex().getName(), equalTo("test_index-000001"));
}
}
}

View File

@ -35,6 +35,7 @@ import org.elasticsearch.index.shard.DocsStats;
import org.elasticsearch.test.ESTestCase;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import static org.elasticsearch.action.admin.indices.rollover.TransportRolloverAction.evaluateConditions;
@ -158,9 +159,9 @@ public class TransportRolloverActionTests extends ESTestCase {
final String indexPrefix = randomAsciiOfLength(10);
String indexEndingInNumbers = indexPrefix + "-" + num;
assertThat(TransportRolloverAction.generateRolloverIndexName(indexEndingInNumbers),
equalTo(indexPrefix + "-" + (num + 1)));
assertThat(TransportRolloverAction.generateRolloverIndexName("index-name-1"), equalTo("index-name-2"));
assertThat(TransportRolloverAction.generateRolloverIndexName("index-name-2"), equalTo("index-name-3"));
equalTo(indexPrefix + "-" + String.format(Locale.ROOT, "%06d", num + 1)));
assertThat(TransportRolloverAction.generateRolloverIndexName("index-name-1"), equalTo("index-name-000002"));
assertThat(TransportRolloverAction.generateRolloverIndexName("index-name-2"), equalTo("index-name-000003"));
}
public void testCreateIndexRequest() throws Exception {

View File

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.ingest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static org.elasticsearch.ingest.IngestDocumentTests.assertIngestDocument;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.nullValue;
public class SimulatePipelineRequestTests extends ESTestCase {
public void testSerialization() throws IOException {
SimulatePipelineRequest request = new SimulatePipelineRequest(new BytesArray(""));
// Sometimes we set an id
if (randomBoolean()) {
request.setId(randomAsciiOfLengthBetween(1, 10));
}
// Sometimes we explicitly set a boolean (with whatever value)
if (randomBoolean()) {
request.setVerbose(randomBoolean());
}
BytesStreamOutput out = new BytesStreamOutput();
request.writeTo(out);
StreamInput streamInput = out.bytes().streamInput();
SimulatePipelineRequest otherRequest = new SimulatePipelineRequest();
otherRequest.readFrom(streamInput);
assertThat(otherRequest.getId(), equalTo(request.getId()));
assertThat(otherRequest.isVerbose(), equalTo(request.isVerbose()));
}
}

View File

@ -39,6 +39,7 @@ public class SimulatePipelineResponseTests extends ESTestCase {
public void testSerialization() throws IOException {
boolean isVerbose = randomBoolean();
String id = randomBoolean() ? randomAsciiOfLengthBetween(1, 10) : null;
int numResults = randomIntBetween(1, 10);
List<SimulateDocumentResult> results = new ArrayList<>(numResults);
for (int i = 0; i < numResults; i++) {
@ -70,7 +71,7 @@ public class SimulatePipelineResponseTests extends ESTestCase {
}
}
SimulatePipelineResponse response = new SimulatePipelineResponse(randomAsciiOfLengthBetween(1, 10), isVerbose, results);
SimulatePipelineResponse response = new SimulatePipelineResponse(id, isVerbose, results);
BytesStreamOutput out = new BytesStreamOutput();
response.writeTo(out);
StreamInput streamInput = out.bytes().streamInput();

View File

@ -51,8 +51,11 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.plugins.DiscoveryPlugin;
import org.elasticsearch.test.ESAllocationTestCase;
import java.util.Collections;
import static java.util.Collections.singleton;
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;

View File

@ -44,6 +44,7 @@ import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.Collections;
public class NetworkModuleTests extends ModuleTestCase {
@ -112,13 +113,14 @@ public class NetworkModuleTests extends ModuleTestCase {
.put(NetworkModule.HTTP_ENABLED.getKey(), false)
.put(NetworkModule.TRANSPORT_TYPE_KEY, "local")
.build();
NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry());
NetworkModule module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false,
new NamedWriteableRegistry());
module.registerTransportService("custom", FakeTransportService.class);
assertBinding(module, TransportService.class, FakeTransportService.class);
assertFalse(module.isTransportClient());
// check it works with transport only as well
module = new NetworkModule(new NetworkService(settings), settings, true, new NamedWriteableRegistry());
module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, true, new NamedWriteableRegistry());
module.registerTransportService("custom", FakeTransportService.class);
assertBinding(module, TransportService.class, FakeTransportService.class);
assertTrue(module.isTransportClient());
@ -128,13 +130,14 @@ public class NetworkModuleTests extends ModuleTestCase {
Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom")
.put(NetworkModule.HTTP_ENABLED.getKey(), false)
.build();
NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry());
NetworkModule module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false,
new NamedWriteableRegistry());
module.registerTransport("custom", FakeTransport.class);
assertBinding(module, Transport.class, FakeTransport.class);
assertFalse(module.isTransportClient());
// check it works with transport only as well
module = new NetworkModule(new NetworkService(settings), settings, true, new NamedWriteableRegistry());
module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, true, new NamedWriteableRegistry());
module.registerTransport("custom", FakeTransport.class);
assertBinding(module, Transport.class, FakeTransport.class);
assertTrue(module.isTransportClient());
@ -144,13 +147,14 @@ public class NetworkModuleTests extends ModuleTestCase {
Settings settings = Settings.builder()
.put(NetworkModule.HTTP_TYPE_SETTING.getKey(), "custom")
.put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build();
NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry());
NetworkModule module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false,
new NamedWriteableRegistry());
module.registerHttpTransport("custom", FakeHttpTransport.class);
assertBinding(module, HttpServerTransport.class, FakeHttpTransport.class);
assertFalse(module.isTransportClient());
// check registration not allowed for transport only
module = new NetworkModule(new NetworkService(settings), settings, true, new NamedWriteableRegistry());
module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, true, new NamedWriteableRegistry());
assertTrue(module.isTransportClient());
try {
module.registerHttpTransport("custom", FakeHttpTransport.class);
@ -163,7 +167,7 @@ public class NetworkModuleTests extends ModuleTestCase {
// not added if http is disabled
settings = Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false)
.put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build();
module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry());
module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false, new NamedWriteableRegistry());
assertNotBound(module, HttpServerTransport.class);
assertFalse(module.isTransportClient());
}
@ -171,7 +175,7 @@ public class NetworkModuleTests extends ModuleTestCase {
public void testRegisterTaskStatus() {
NamedWriteableRegistry registry = new NamedWriteableRegistry();
Settings settings = Settings.EMPTY;
NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, registry);
NetworkModule module = new NetworkModule(new NetworkService(settings, Collections.emptyList()), settings, false, registry);
assertFalse(module.isTransportClient());
// Builtin reader comes back

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import java.net.InetAddress;
import java.util.Collections;
import static org.hamcrest.Matchers.is;
@ -36,7 +37,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure exception if we bind to multicast ipv4 address
*/
public void testBindMulticastV4() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
try {
service.resolveBindHostAddresses(new String[] { "239.1.1.1" });
fail("should have hit exception");
@ -48,7 +49,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure exception if we bind to multicast ipv6 address
*/
public void testBindMulticastV6() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
try {
service.resolveBindHostAddresses(new String[] { "FF08::108" });
fail("should have hit exception");
@ -61,7 +62,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure exception if we publish to multicast ipv4 address
*/
public void testPublishMulticastV4() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
try {
service.resolvePublishHostAddresses(new String[] { "239.1.1.1" });
fail("should have hit exception");
@ -74,7 +75,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure exception if we publish to multicast ipv6 address
*/
public void testPublishMulticastV6() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
try {
service.resolvePublishHostAddresses(new String[] { "FF08::108" });
fail("should have hit exception");
@ -87,7 +88,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure specifying wildcard ipv4 address will bind to all interfaces
*/
public void testBindAnyLocalV4() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
assertEquals(InetAddress.getByName("0.0.0.0"), service.resolveBindHostAddresses(new String[] { "0.0.0.0" })[0]);
}
@ -95,7 +96,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure specifying wildcard ipv6 address will bind to all interfaces
*/
public void testBindAnyLocalV6() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
assertEquals(InetAddress.getByName("::"), service.resolveBindHostAddresses(new String[] { "::" })[0]);
}
@ -103,7 +104,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure specifying wildcard ipv4 address selects reasonable publish address
*/
public void testPublishAnyLocalV4() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
InetAddress address = service.resolvePublishHostAddresses(new String[] { "0.0.0.0" });
assertFalse(address.isAnyLocalAddress());
}
@ -112,7 +113,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure specifying wildcard ipv6 address selects reasonable publish address
*/
public void testPublishAnyLocalV6() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
InetAddress address = service.resolvePublishHostAddresses(new String[] { "::" });
assertFalse(address.isAnyLocalAddress());
}
@ -121,7 +122,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure we can bind to multiple addresses
*/
public void testBindMultipleAddresses() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
InetAddress[] addresses = service.resolveBindHostAddresses(new String[]{"127.0.0.1", "127.0.0.2"});
assertThat(addresses.length, is(2));
}
@ -130,7 +131,7 @@ public class NetworkServiceTests extends ESTestCase {
* ensure we can't bind to multiple addresses when using wildcard
*/
public void testBindMultipleAddressesWithWildcard() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
NetworkService service = new NetworkService(Settings.EMPTY, Collections.emptyList());
try {
service.resolveBindHostAddresses(new String[]{"0.0.0.0", "127.0.0.1"});
fail("should have hit exception");

View File

@ -92,10 +92,6 @@ public class TimeValueTests extends ESTestCase {
TimeValue.parseTimeValue("10 m", null, "test"));
assertEquals(new TimeValue(10, TimeUnit.MINUTES),
TimeValue.parseTimeValue("10m", null, "test"));
assertEquals(new TimeValue(10, TimeUnit.MINUTES),
TimeValue.parseTimeValue("10 M", null, "test"));
assertEquals(new TimeValue(10, TimeUnit.MINUTES),
TimeValue.parseTimeValue("10M", null, "test"));
assertEquals(new TimeValue(10, TimeUnit.HOURS),
TimeValue.parseTimeValue("10 h", null, "test"));
@ -115,6 +111,17 @@ public class TimeValueTests extends ESTestCase {
assertEquals(new TimeValue(10, TimeUnit.DAYS),
TimeValue.parseTimeValue("10D", null, "test"));
// Time values of months should throw an exception as months are not
// supported. Note that this is the only unit that is not case sensitive
// as `m` is the only character that is overloaded in terms of which
// time unit is expected between the upper and lower case versions
expectThrows(ElasticsearchParseException.class, () -> {
TimeValue.parseTimeValue("10 M", null, "test");
});
expectThrows(ElasticsearchParseException.class, () -> {
TimeValue.parseTimeValue("10M", null, "test");
});
final int length = randomIntBetween(0, 8);
final String zeros = new String(new char[length]).replace('\0', '0');
assertTrue(TimeValue.parseTimeValue("-" + zeros + "1", null, "test") == TimeValue.MINUS_ONE);

View File

@ -45,6 +45,7 @@ import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.TransportSettings;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
@ -66,7 +67,7 @@ public class UnicastZenPingIT extends ESTestCase {
.put(TransportSettings.PORT.getKey(), startPort + "-" + endPort).build();
ThreadPool threadPool = new TestThreadPool(getClass().getName());
NetworkService networkService = new NetworkService(settings);
NetworkService networkService = new NetworkService(settings, Collections.emptyList());
ElectMasterService electMasterService = new ElectMasterService(settings);
NetworkHandle handleA = startServices(settings, threadPool, networkService, "UZP_A", Version.CURRENT);

View File

@ -599,7 +599,7 @@ public class FunctionScoreTests extends ESTestCase {
Explanation ffsqExpl = searcher.explain(ffsq, 0);
assertTrue(ffsqExpl.isMatch());
assertEquals(queryExpl.getValue(), ffsqExpl.getValue(), 0f);
assertEquals(queryExpl.getDescription(), ffsqExpl.getDescription());
assertEquals(queryExpl.getDescription(), ffsqExpl.getDetails()[0].getDescription());
ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 10f,
CombineFunction.MULTIPLY);
@ -726,6 +726,31 @@ public class FunctionScoreTests extends ESTestCase {
}
}
public void testExplanationAndScoreEqualsEvenIfNoFunctionMatches() throws IOException {
IndexSearcher localSearcher = newSearcher(reader);
ScoreMode scoreMode = randomFrom(new
ScoreMode[]{ScoreMode.SUM, ScoreMode.AVG, ScoreMode.FIRST, ScoreMode.MIN, ScoreMode.MAX, ScoreMode.MULTIPLY});
CombineFunction combineFunction = randomFrom(new
CombineFunction[]{CombineFunction.SUM, CombineFunction.AVG, CombineFunction.MIN, CombineFunction.MAX,
CombineFunction.MULTIPLY, CombineFunction.REPLACE});
// check for document that has no macthing function
FiltersFunctionScoreQuery query = new FiltersFunctionScoreQuery(new TermQuery(new Term(FIELD, "out")), scoreMode,
new FilterFunction[]{new FilterFunction(new TermQuery(new Term("_uid", "2")), new WeightFactorFunction(10))},
Float.MAX_VALUE, Float.NEGATIVE_INFINITY, combineFunction);
TopDocs searchResult = localSearcher.search(query, 1);
Explanation explanation = localSearcher.explain(query, searchResult.scoreDocs[0].doc);
assertThat(searchResult.scoreDocs[0].score, equalTo(explanation.getValue()));
// check for document that has a matching function
query = new FiltersFunctionScoreQuery(new TermQuery(new Term(FIELD, "out")), scoreMode,
new FilterFunction[]{new FilterFunction(new TermQuery(new Term("_uid", "1")), new WeightFactorFunction(10))},
Float.MAX_VALUE, Float.NEGATIVE_INFINITY, combineFunction);
searchResult = localSearcher.search(query, 1);
explanation = localSearcher.explain(query, searchResult.scoreDocs[0].doc);
assertThat(searchResult.scoreDocs[0].score, equalTo(explanation.getValue()));
}
private static class DummyScoreFunction extends ScoreFunction {
protected DummyScoreFunction(CombineFunction scoreCombiner) {
super(scoreCombiner);

View File

@ -153,7 +153,8 @@ public class TruncateTranslogIT extends ESIntegTestCase {
Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
// Great, do nothing, we just wanted to obtain the lock
} catch (LockObtainFailedException lofe) {
throw new ElasticsearchException("Still waiting for lock release at [" + idxLocation + "]");
logger.info("--> failed acquiring lock for {}", idxLocation);
fail("still waiting for lock release at [" + idxLocation + "]");
} catch (IOException ioe) {
fail("Got an IOException: " + ioe);
}

View File

@ -0,0 +1,183 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
public class BucketUtilsTests extends ESTestCase {
public void testBadInput() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> BucketUtils.suggestShardSideQueueSize(0, 10));
assertEquals(e.getMessage(), "size must be positive, got 0");
e = expectThrows(IllegalArgumentException.class,
() -> BucketUtils.suggestShardSideQueueSize(10, 0));
assertEquals(e.getMessage(), "number of shards must be positive, got 0");
}
public void testOptimizesSingleShard() {
for (int iter = 0; iter < 10; ++iter) {
final int size = randomIntBetween(1, Integer.MAX_VALUE);
assertEquals(size, BucketUtils.suggestShardSideQueueSize( size, 1));
}
}
public void testOverFlow() {
for (int iter = 0; iter < 10; ++iter) {
final int size = Integer.MAX_VALUE - randomInt(10);
final int numberOfShards = randomIntBetween(1, 10);
final int shardSize = BucketUtils.suggestShardSideQueueSize( size, numberOfShards);
assertThat(shardSize, greaterThanOrEqualTo(shardSize));
}
}
public void testShardSizeIsGreaterThanGlobalSize() {
for (int iter = 0; iter < 10; ++iter) {
final int size = randomIntBetween(1, Integer.MAX_VALUE);
final int numberOfShards = randomIntBetween(1, 10);
final int shardSize = BucketUtils.suggestShardSideQueueSize( size, numberOfShards);
assertThat(shardSize, greaterThanOrEqualTo(size));
}
}
/*// You may use the code below to evaluate the impact of the BucketUtils.suggestShardSideQueueSize
// heuristic
public static void main(String[] args) {
final int numberOfUniqueTerms = 10000;
final int totalNumberOfTerms = 1000000;
final int numberOfShards = 10;
final double skew = 2; // parameter of the zipf distribution
final int size = 100;
double totalWeight = 0;
for (int rank = 1; rank <= numberOfUniqueTerms; ++rank) {
totalWeight += weight(rank, skew);
}
int[] terms = new int[totalNumberOfTerms];
int len = 0;
final int[] actualTopFreqs = new int[size];
for (int rank = 1; len < totalNumberOfTerms; ++rank) {
int freq = (int) (weight(rank, skew) / totalWeight * totalNumberOfTerms);
freq = Math.max(freq, 1);
Arrays.fill(terms, len, Math.min(len + freq, totalNumberOfTerms), rank - 1);
len += freq;
if (rank <= size) {
actualTopFreqs[rank-1] = freq;
}
}
final int maxTerm = terms[terms.length - 1] + 1;
// shuffle terms
Random r = new Random(0);
for (int i = terms.length - 1; i > 0; --i) {
final int swapWith = r.nextInt(i);
int tmp = terms[i];
terms[i] = terms[swapWith];
terms[swapWith] = tmp;
}
// distribute into shards like routing would
int[][] shards = new int[numberOfShards][];
int upTo = 0;
for (int i = 0; i < numberOfShards; ++i) {
shards[i] = Arrays.copyOfRange(terms, upTo, upTo + (terms.length - upTo) / (numberOfShards - i));
upTo += shards[i].length;
}
final int[][] topShards = new int[numberOfShards][];
final int shardSize = BucketUtils.suggestShardSideQueueSize(size, numberOfShards);
for (int shard = 0; shard < numberOfShards; ++shard) {
final int[] data = shards[shard];
final int[] freqs = new int[maxTerm];
for (int d : data) {
freqs[d]++;
}
int[] termIds = new int[maxTerm];
for (int i = 0; i < maxTerm; ++i) {
termIds[i] = i;
}
new InPlaceMergeSorter() {
@Override
protected void swap(int i, int j) {
int tmp = termIds[i];
termIds[i] = termIds[j];
termIds[j] = tmp;
tmp = freqs[i];
freqs[i] = freqs[j];
freqs[j] = tmp;
}
@Override
protected int compare(int i, int j) {
return freqs[j] - freqs[i];
}
}.sort(0, maxTerm);
Arrays.fill(freqs, shardSize, freqs.length, 0);
new InPlaceMergeSorter() {
@Override
protected void swap(int i, int j) {
int tmp = termIds[i];
termIds[i] = termIds[j];
termIds[j] = tmp;
tmp = freqs[i];
freqs[i] = freqs[j];
freqs[j] = tmp;
}
@Override
protected int compare(int i, int j) {
return termIds[i] - termIds[j];
}
}.sort(0, maxTerm);
topShards[shard] = freqs;
}
final int[] computedTopFreqs = new int[size];
for (int[] freqs : topShards) {
for (int i = 0; i < size; ++i) {
computedTopFreqs[i] += freqs[i];
}
}
int numErrors = 0;
int totalFreq = 0;
for (int i = 0; i < size; ++i) {
numErrors += Math.abs(computedTopFreqs[i] - actualTopFreqs[i]);
totalFreq += actualTopFreqs[i];
}
System.out.println("Number of unique terms: " + maxTerm);
System.out.println("Global freqs of top terms: " + Arrays.toString(actualTopFreqs));
System.out.println("Computed freqs of top terms: " + Arrays.toString(computedTopFreqs));
System.out.println("Number of errors: " + numErrors + "/" + totalFreq);
}
private static double weight(int rank, double skew) {
return 1d / Math.pow(rank, skew);
}*/
}

View File

@ -545,6 +545,41 @@ public class SearchSourceBuilderTests extends ESTestCase {
}
}
public void testAggsParsing() throws IOException {
{
String restContent = "{\n" + " " +
"\"aggs\": {" +
" \"test_agg\": {\n" +
" " + "\"terms\" : {\n" +
" \"field\": \"foo\"\n" +
" }\n" +
" }\n" +
" }\n" +
"}\n";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), aggParsers,
suggesters);
assertEquals(1, searchSourceBuilder.aggregations().count());
}
}
{
String restContent = "{\n" +
" \"aggregations\": {" +
" \"test_agg\": {\n" +
" \"terms\" : {\n" +
" \"field\": \"foo\"\n" +
" }\n" +
" }\n" +
" }\n" +
"}\n";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), aggParsers,
suggesters);
assertEquals(1, searchSourceBuilder.aggregations().count());
}
}
}
/**
* test that we can parse the `rescore` element either as single object or as array
*/

View File

@ -36,6 +36,7 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
@ -63,7 +64,7 @@ public class TransportServiceHandshakeTests extends ESTestCase {
BigArrays.NON_RECYCLING_INSTANCE,
new NoneCircuitBreakerService(),
new NamedWriteableRegistry(),
new NetworkService(settings));
new NetworkService(settings, Collections.emptyList()));
TransportService transportService = new MockTransportService(settings, transport, threadPool);
transportService.start();
transportService.acceptIncomingRequests();

View File

@ -37,7 +37,7 @@ import static org.hamcrest.Matchers.hasSize;
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
public class FileWatcherTests extends ESTestCase {
private class RecordingChangeListener extends FileChangesListener {
private class RecordingChangeListener implements FileChangesListener {
private Path rootDir;
private RecordingChangeListener(Path rootDir) {

View File

@ -0,0 +1,113 @@
== Common configuration
The `RestClientBuilder` supports providing both a `RequestConfigCallback` and
an `HttpClientConfigCallback` which allow for any customization that the Apache
Async Http Client exposes. Those callbacks make it possible to modify some
specific behaviour of the client without overriding every other default
configuration that the `RestClient` is initialized with. This section
describes some common scenarios that require additional configuration for the
low-level Java REST Client.
=== Timeouts
Configuring requests timeouts can be done by providing an instance of
`RequestConfigCallback` while building the `RestClient` through its builder.
The interface has one method that receives an instance of
https://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/client/config/RequestConfig.Builder.html[`org.apache.http.client.config.RequestConfig.Builder`]
as an argument and has the same return type. The request config builder can
be modified and then returned. In the following example we increase the
connect timeout (defaults to 1 second) and the socket timeout (defaults to 10
seconds). Also we adjust the max retry timeout accordingly (defaults to 10
seconds too).
[source,java]
--------------------------------------------------
RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200))
.setRequestConfigCallback(new RestClientBuilder.RequestConfigCallback() {
@Override
public RequestConfig.Builder customizeRequestConfig(RequestConfig.Builder requestConfigBuilder) {
return requestConfigBuilder.setConnectTimeout(5000)
.setSocketTimeout(30000);
}
})
.setMaxRetryTimeoutMillis(30000)
.build();
--------------------------------------------------
=== Number of threads
The Apache Http Async Client starts by default one dispatcher thread, and a
number of worker threads used by the connection manager, as many as the number
of locally detected processors (depending on what
`Runtime.getRuntime().availableProcessors()` returns). The number of threads
can be modified as follows:
[source,java]
--------------------------------------------------
RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200))
.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
@Override
public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
return httpClientBuilder.setDefaultIOReactorConfig(
IOReactorConfig.custom().setIoThreadCount(1).build());
}
})
.build();
--------------------------------------------------
=== Basic authentication
Configuring basic authentication can be done by providing an
`HttpClientConfigCallback` while building the `RestClient` through its builder.
The interface has one method that receives an instance of
https://hc.apache.org/httpcomponents-asyncclient-dev/httpasyncclient/apidocs/org/apache/http/impl/nio/client/HttpAsyncClientBuilder.html[`org.apache.http.impl.nio.client.HttpAsyncClientBuilder`]
as an argument and has the same return type. The http client builder can be
modified and then returned. In the following example we set a default
credentials provider that requires basic authentication.
[source,java]
--------------------------------------------------
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials("user", "password"));
RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200))
.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
@Override
public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
}
})
.build();
--------------------------------------------------
=== Encrypted communication
Encrypted communication can also be configured through the
`HttpClientConfigCallback`. The
https://hc.apache.org/httpcomponents-asyncclient-dev/httpasyncclient/apidocs/org/apache/http/impl/nio/client/HttpAsyncClientBuilder.html[`org.apache.http.impl.nio.client.HttpAsyncClientBuilder`]
received as an argument exposes multiple methods to configure encrypted
communication: `setSSLContext`, `setSSLSessionStrategy` and
`setConnectionManager`, in order of precedence from the least important.
The following is an example:
[source,java]
--------------------------------------------------
KeyStore keyStore = KeyStore.getInstance("jks");
try (InputStream is = Files.newInputStream(keyStorePath)) {
keyStore.load(is, keyStorePass.toCharArray());
}
RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200))
.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
@Override
public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
return httpClientBuilder.setSSLContext(sslcontext);
}
})
.build();
--------------------------------------------------
=== Others
For any other required configuration needed, the Apache HttpAsyncClient docs
should be consulted: https://hc.apache.org/httpcomponents-asyncclient-4.1.x/ .

View File

@ -0,0 +1,12 @@
[[java-rest]]
= Java REST Client
:version: 5.0.0-alpha4
include::overview.asciidoc[]
include::usage.asciidoc[]
include::configuration.asciidoc[]
include::sniffer.asciidoc[]

View File

@ -0,0 +1,42 @@
== Overview
Official low-level client for Elasticsearch. Allows to communicate with an
Elasticsearch cluster through http. Compatible with all elasticsearch versions.
=== Features
The low-level client's features include:
* minimal dependencies
* load balancing across all available nodes
* failover in case of node failures and upon specific response codes
* failed connection penalization (whether a failed node is retried depends on
how many consecutive times it failed; the more failed attempts the longer the
client will wait before trying that same node again)
* persistent connections
* trace logging of requests and responses
* optional automatic <<sniffer,discovery of cluster nodes>>
=== License
Copyright 2013-2016 Elasticsearch
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,136 @@
[[sniffer]]
== Sniffer
Minimal library that allows to automatically discover nodes from a running
Elasticsearch cluster and set them to an existing `RestClient` instance.
It retrieves by default the nodes that belong to the cluster using the
Nodes Info api and uses jackson to parse the obtained json response.
Compatible with Elasticsearch 2.x and onwards.
=== Maven Repository
Here is how you can configure the dependency using maven as a dependency manager.
Add the following to your `pom.xml` file:
["source","xml",subs="attributes"]
--------------------------------------------------
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>sniffer</artifactId>
<version>{version}</version>
</dependency>
--------------------------------------------------
The low-level REST client is subject to the same release cycle as
elasticsearch. Replace `${es.version}` with the desired sniffer version, first
released with `5.0.0-alpha4`. There is no relation between the sniffer version
and the elasticsearch version that the client can communicate with. Sniffer
supports fetching the nodes list from elasticsearch 2.x and onwards.
=== Usage
Once a `RestClient` instance has been created, a `Sniffer` can be associated
to it. The `Sniffer` will make use of the provided `RestClient` to periodically
(every 5 minutes by default) fetch the list of current nodes from the cluster
and update them by calling `RestClient#setHosts`.
[source,java]
--------------------------------------------------
Sniffer sniffer = Sniffer.builder(restClient).build();
--------------------------------------------------
It is important to close the `Sniffer` so that its background thread gets
properly shutdown and all of its resources are released. The `Sniffer`
object should have the same lifecycle as the `RestClient` and get closed
right before the client:
[source,java]
--------------------------------------------------
sniffer.close();
restClient.close();
--------------------------------------------------
The Elasticsearch Nodes Info api doesn't return the protocol to use when
connecting to the nodes but only their `host:port` key-pair, hence `http`
is used by default. In case `https` should be used instead, the
`ElasticsearchHostsSniffer` object has to be manually created and provided
as follows:
[source,java]
--------------------------------------------------
HostsSniffer hostsSniffer = new ElasticsearchHostsSniffer(restClient,
ElasticsearchHostsSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT,
ElasticsearchHostsSniffer.Scheme.HTTPS);
Sniffer sniffer = Sniffer.builder(restClient)
.setHostsSniffer(hostsSniffer).build();
--------------------------------------------------
In the same way it is also possible to customize the `sniffRequestTimeout`,
which defaults to one second. That is the `timeout` parameter provided as a
querystring parameter when calling the Nodes Info api, so that when the
timeout expires on the server side, a valid response is still returned
although it may contain only a subset of the nodes that are part of the
cluster, the ones that have responsed until then.
Also, a custom `HostsSniffer` implementation can be provided for advanced
use-cases that may require fetching the hosts from external sources.
The `Sniffer` updates the nodes by default every 5 minutes. This interval can
be customized by providing it (in milliseconds) as follows:
[source,java]
--------------------------------------------------
Sniffer sniffer = Sniffer.builder(restClient)
.setSniffIntervalMillis(60000).build();
--------------------------------------------------
It is also possible to enable sniffing on failure, meaning that after each
failure the nodes list gets updated straightaway rather than at the following
ordinary sniffing round. In this case a `SniffOnFailureListener` needs to
be created at first and provided at `RestClient` creation. Also once the
`Sniffer` is later created, it needs to be associated with that same
`SniffOnFailureListener` instance, which will be notified at each failure
and use the `Sniffer` to perform the additional sniffing round as described.
[source,java]
--------------------------------------------------
SniffOnFailureListener sniffOnFailureListener = new SniffOnFailureListener();
RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200))
.setFailureListener(sniffOnFailureListener).build();
Sniffer sniffer = Sniffer.builder(restClient).build();
sniffOnFailureListener.setSniffer(sniffer);
--------------------------------------------------
When using sniffing on failure, not only do the nodes get updated after each
failure, but an additional sniffing round is also scheduled sooner than usual,
by default one minute after the failure, assuming that things will go back to
normal and we want to detect that as soon as possible. Said interval can be
customized at `Sniffer` creation time as follows:
[source,java]
--------------------------------------------------
Sniffer sniffer = Sniffer.builder(restClient)
.setSniffAfterFailureDelayMillis(30000).build();
--------------------------------------------------
Note that this last configuration parameter has no effect in case sniffing
on failure is not enabled like explained above.
=== License
Copyright 2013-2016 Elasticsearch
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,227 @@
== Getting started
=== Maven Repository
The low-level Java REST client is hosted on
http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.elasticsearch.client%22[Maven
Central]. The minimum Java version required is `1.7`.
Here is how you can configure the dependency using maven as a dependency manager.
Add the following to your `pom.xml` file:
["source","xml",subs="attributes"]
--------------------------------------------------
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>rest</artifactId>
<version>{version}</version>
</dependency>
--------------------------------------------------
The low-level REST client is subject to the same release cycle as
elasticsearch. Replace `${es.version}` with the desired client version, first
released with `5.0.0-alpha4`. There is no relation between the client version
and the elasticsearch version that the client can communicate with. The
low-level REST client is compatible with all elasticsearch versions.
=== Dependencies
The low-level Java REST client internally uses the
http://hc.apache.org/httpcomponents-asyncclient-dev/[Apache Http Async Client]
to send http requests. It depends on the following artifacts, namely the async
http client and its own transitive dependencies:
- org.apache.httpcomponents:httpasyncclient
- org.apache.httpcomponents:httpcore-nio
- org.apache.httpcomponents:httpclient
- org.apache.httpcomponents:httpcore
- commons-codec:commons-codec
- commons-logging:commons-logging
=== Initialization
A `RestClient` instance can be built through the corresponding
`RestClientBuilder` class, created via `RestClient#builder(HttpHost...)`
static method. The only required argument is one or more hosts that the
client will communicate with, provided as instances of
https://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/org/apache/http/HttpHost.html[HttpHost]
as follows:
[source,java]
--------------------------------------------------
RestClient restClient = RestClient.builder(
new HttpHost("http", "localhost", 9200),
new HttpHost("http", "localhost", 9201)).build();
--------------------------------------------------
The `RestClient` class is thread-safe and ideally has the same lifecycle as
the application that uses it. It is important that it gets closed when no
longer needed so that all the resources used by it get properly released,
as well as the underlying http client instance and its threads:
[source,java]
--------------------------------------------------
restClient.close();
--------------------------------------------------
`RestClientBuilder` also allows to optionally set the following configuration
parameters while building the `RestClient` instance:
`setDefaultHeaders`:: default headers that need to be sent with each request,
to prevent having to specify them with each single request
`setMaxRetryTimeoutMillis`:: the timeout that should be honoured in case
multiple attempts are made for the same request. The default value is 10
seconds, same as the default socket timeout. In case the socket timeout is
customized, the maximum retry timeout should be adjusted accordingly
`setFailureListener`:: a listener that gets notified every time a node
fails, in case actions need to be taken. Used internally when sniffing on
failure is enabled
`setRequestConfigCallback`:: callback that allows to modify the default
request configuration (e.g. request timeouts, authentication, or anything that
the https://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/client/config/RequestConfig.Builder.html[`org.apache.http.client.config.RequestConfig.Builder`]
allows to set)
`setHttpClientConfigCallback`:: callback that allows to modify the http client
configuration (e.g. encrypted communication over ssl, or anything that the
http://hc.apache.org/httpcomponents-asyncclient-dev/httpasyncclient/apidocs/org/apache/http/impl/nio/client/HttpAsyncClientBuilder.html[`org.apache.http.impl.nio.client.HttpAsyncClientBuilder`]
allows to set)
=== Performing requests
Once the `RestClient` has been created, requests can be sent by calling one of
the available `performRequest` method variants. The ones that return the
`Response` are executed synchronously, meaning that the client will block and
wait for a response to be returned. The `performRequest` variants that return
`void` accept a `ResponseListener` as an argument and are executed
asynchronously. The provided listener will be notified upon completion or
failure. The following are the arguments accepted by the different
`performRequest` methods:
`method`:: the http method or verb
`endpoint`:: the request path, which identifies the Elasticsearch api to
call (e.g. `/_cluster/health`)
`params`:: the optional parameters to be sent as querystring parameters
`entity`:: the optional request body enclosed in an
`org.apache.http.HttpEntity` object
`responseConsumer`:: the optional
http://hc.apache.org/httpcomponents-core-ga/httpcore-nio/apidocs/org/apache/http/nio/protocol/HttpAsyncResponseConsumer.html[`org.apache.http.nio.protocol.HttpAsyncResponseConsumer`]
callback. Controls how the response body gets streamed from a non-blocking
HTTP connection on the client side. When not provided, the default
implementation is used which buffers the whole response body in heap memory
`responseListener`:: the listener to be notified upon request success or failure
whenever the async `performRequest` method variants are used
`headers`:: optional request headers
=== Reading responses
The `Response` object, either returned by the sync `performRequest` methods or
received as an argument in `ResponseListener#onSucces(Response)`, wraps the
response object returned by the http client and exposes the following information:
`getRequestLine`:: information about the performed request
`getHost`:: the host that returned the response
`getStatusLine`:: the response status line
`getHeaders`:: the response headers, which can also be retrieved by name
though `getHeader(String)`
`getEntity`:: the response body enclosed in an
https://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/org/apache/http/HttpEntity.html[`org.apache.http.HttpEntity`]
object
When performing a request, an exception is thrown (or received as an argument
in `ResponseListener#onSucces(Exception)` in the following scenarios:
`IOException`:: communication problem (e.g. SocketTimeoutException etc.)
`ResponseException`:: a response was returned, but its status code indicated
an error (either `4xx` or `5xx`). A `ResponseException` originates from a valid
http response, hence it exposes its corresponding `Response` object which gives
access to the returned response.
=== Example requests
Here are a couple of examples:
[source,java]
--------------------------------------------------
Response response = restClient.performRequest("GET", "/",
Collections.singletonMap("pretty", "true"));
System.out.println(EntityUtils.toString(response.getEntity()));
//index a document
HttpEntity entity = new NStringEntity(
"{\n" +
" \"user\" : \"kimchy\",\n" +
" \"post_date\" : \"2009-11-15T14:12:12\",\n" +
" \"message\" : \"trying out Elasticsearch\"\n" +
"}", ContentType.APPLICATION_JSON);
Response indexResponse = restClient.performRequest(
"PUT",
"/twitter/tweet/1",
Collections.<String, String>emptyMap(),
entity);
--------------------------------------------------
Note that the low-level client doesn't expose any helper for json marshalling
and un-marshalling. Users are free to use the library that they prefer for that
purpose.
The underlying Apache Async Http Client ships with different
https://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/org/apache/http/HttpEntity.html[`org.apache.http.HttpEntity`]
implementations that allow to provide the request body in different formats
(stream, byte array, string etc.). As for reading the response body, the
`HttpEntity#getContent` method comes handy which returns an `InputStream`
reading from the previously buffered response body. As an alternative, it is
possible to provide a custom
http://hc.apache.org/httpcomponents-core-ga/httpcore-nio/apidocs/org/apache/http/nio/protocol/HttpAsyncResponseConsumer.html[`org.apache.http.nio.protocol.HttpAsyncResponseConsumer`]
that controls how bytes are read and buffered.
The following is a basic example of how async requests can be sent:
[source,java]
--------------------------------------------------
int numRequests = 10;
final CountDownLatch latch = new CountDownLatch(numRequests);
for (int i = 0; i < numRequests; i++) {
restClient.performRequest(
"PUT",
"/twitter/tweet/" + i,
Collections.<String, String>emptyMap(),
//assume that the documents are stored in an entities array
entities[i],
new ResponseListener() {
@Override
public void onSuccess(Response response) {
System.out.println(response);
latch.countDown();
}
@Override
public void onFailure(Exception exception) {
latch.countDown();
}
}
);
}
//wait for all requests to be completed
latch.await();
--------------------------------------------------
=== Logging
The Java REST client uses the same logging library that the Apache Async Http
Client uses: https://commons.apache.org/proper/commons-logging/[Apache Commons Logging],
which comes with support for a number of popular logging implementations. The
java packages to enable logging for are `org.elasticsearch.client` for the
client itself and `org.elasticsearch.client.sniffer` for the sniffer.
The request tracer logging can also be enabled to log every request and
corresponding response in curl format. That comes handy when debugging, for
instance in case a request needs to be manually executed to check whether it
still yields the same response as it did. Enable trace logging for the `tracer`
package to have such log lines printed out. Do note that this type of logging is
expensive and should not be enabled at all times in production environments,
but rather temporarily used only when needed.

View File

@ -163,22 +163,22 @@ sudo bin/elasticsearch-plugin install analysis-icu --timeout 0
[float]
=== Proxy settings
To install a plugin via a proxy, you can pass the proxy details in with the
Java settings `proxyHost` and `proxyPort`. On Unix based systems, these
options can be set on the command line:
To install a plugin via a proxy, you can add the proxy details to the
`ES_JAVA_OPTS` environment variable with the Java settings `http.proxyHost`
and `http.proxyPort` (or `https.proxyHost` and `https.proxyPort`):
[source,shell]
-----------------------------------
sudo ES_JAVA_OPTS="-DproxyHost=host_name -DproxyPort=port_number" bin/elasticsearch-plugin install mobz/elasticsearch-head
sudo ES_JAVA_OPTS="-Dhttp.proxyHost=host_name -Dhttp.proxyPort=port_number -Dhttps.proxyHost=host_name -Dhttps.proxyPort=https_port_number" bin/elasticsearch-plugin install analysis-icu
-----------------------------------
On Windows, they need to be added to the `ES_JAVA_OPTS` environment variable:
Or on Windows:
[source,shell]
-----------------------------------
set ES_JAVA_OPTS="-DproxyHost=host_name -DproxyPort=port_number"
------------------------------------
set ES_JAVA_OPTS="-DproxyHost=host_name -DproxyPort=port_number -Dhttps.proxyHost=host_name -Dhttps.proxyPort=https_port_number"
bin/elasticsearch-plugin install analysis-icu
-----------------------------------
------------------------------------
=== Plugins directory

View File

@ -160,7 +160,8 @@ NOTE: `shard_size` cannot be smaller than `size` (as it doesn't make much sens
override it and reset it to be equal to `size`.
The default `shard_size` is a multiple of the `size` parameter which is dependant on the number of shards.
The default `shard_size` will be `size` if the search request needs to go to a single shard, and `(size * 1.5 + 10)`
otherwise.
==== Calculating Document Count Error
@ -446,7 +447,7 @@ Generating the terms using a script:
"genres" : {
"terms" : {
"script" : {
"inline": "doc['genre'].value"
"inline": "doc['genre'].value",
"lang": "painless"
}
}

View File

@ -12,7 +12,7 @@ the new alias.
[source,js]
--------------------------------------------------
PUT /logs-0001 <1>
PUT /logs-000001 <1>
{
"aliases": {
"logs_write": {}
@ -28,18 +28,18 @@ POST logs_write/_rollover <2>
}
--------------------------------------------------
// CONSOLE
<1> Creates an index called `logs-0001` with the alias `logs_write`.
<1> Creates an index called `logs-0000001` with the alias `logs_write`.
<2> If the index pointed to by `logs_write` was created 7 or more days ago, or
contains 1,000 or more documents, then the `logs-0002` index is created
and the `logs_write` alias is updated to point to `logs-0002`.
and the `logs_write` alias is updated to point to `logs-000002`.
The above request might return the following response:
[source,js]
--------------------------------------------------
{
"old_index": "logs-0001",
"new_index": "logs-0002",
"old_index": "logs-000001",
"new_index": "logs-000002",
"rolled_over": true, <1>
"dry_run": false, <2>
"conditions": { <3>
@ -56,8 +56,9 @@ The above request might return the following response:
=== Naming the new index
If the name of the existing index ends with `-` and a number -- e.g.
`logs-0001` -- then the name of the new index will follow the same pattern,
just incrementing the number (`logs-0002`).
`logs-000001` -- then the name of the new index will follow the same pattern,
incrementing the number (`logs-000002`). The number is zero-padded with a length
of 6, regardless of the old index name.
If the old name doesn't match this pattern then you must specify the name for
the new index as follows:
@ -80,7 +81,7 @@ override any values set in matching index templates. For example, the following
[source,js]
--------------------------------------------------
PUT /logs-0001
PUT /logs-000001
{
"aliases": {
"logs_write": {}
@ -108,7 +109,7 @@ checked without performing the actual rollover:
[source,js]
--------------------------------------------------
PUT /logs-0001
PUT /logs-000001
{
"aliases": {
"logs_write": {}

View File

@ -11,6 +11,20 @@ url entirely. If you add `?refresh=wait_for` Elasticsearch will wait for the
changes to become visible before replying to the request but won't take any
immediate refresh related action. See <<docs-refresh>>.
==== `created` field deprecated in the Index API
The `created` field has been deprecated in the Index API. It now returns
`operation`, returning `"operation": "create"` when it created a document and
`"operation": "index"` when it updated the document. This is also true for
`index` bulk operations.
==== `found` field deprecated in the Delete API
The `found` field has been deprecated in the Delete API. It now returns
`operation`, returning `"operation": "deleted"` when it deleted a document and
`"operation": "noop"` when it didn't found the document. This is also true for
`index` bulk operations.
==== Reindex and Update By Query
Before 5.0.0 `_reindex` and `_update_by_query` only retried bulk failures so
they used the following response format:

View File

@ -62,7 +62,7 @@ All attribute values can be specified with wildcards, eg:
PUT _cluster/settings
{
"transient": {
"cluster.routing.allocation.include._ip": "192.168.2.*"
"cluster.routing.allocation.exclude._ip": "192.168.2.*"
}
}
------------------------

View File

@ -133,5 +133,5 @@ read or write operations, like the get index settings, put mapping and cluster s
`write`:: (default) Write operations will be rejected. Read operations will succeed, based on the last known cluster configuration.
This may result in partial reads of stale data as this node may be isolated from the rest of the cluster.
The `discovery.zen.no_master_block` setting doesn't apply to nodes based apis (for example cluster stats, node info and
node stats apis) which will not be blocked and try to execute on any node possible.
The `discovery.zen.no_master_block` setting doesn't apply to nodes-based apis (for example cluster stats, node info and
node stats apis). Requests to these apis will not be blocked and can run on any available node.

View File

@ -4,9 +4,9 @@
The local gateway module stores the cluster state and shard data across full
cluster restarts.
The following _static_ settings, which must be set on every data node in the
cluster, controls how long nodes should wait before they try to recover any
shards which are stored locally:
The following _static_ settings, which must be set on every master node,
control how long a freshly elected master should wait before it tries to
recover the cluster state and the cluster's data:
`gateway.expected_nodes`::
@ -48,4 +48,3 @@ as long as the following conditions are met:
Recover as long as this many data nodes have joined the cluster.
NOTE: These settings only take effect on a full cluster restart.

View File

@ -59,7 +59,7 @@ There are several thread pools, but the important ones include:
Changing a specific thread pool can be done by setting its type-specific parameters; for example, changing the `index`
thread pool to have more threads:
[source,js]
[source,yaml]
--------------------------------------------------
thread_pool:
index:
@ -87,7 +87,7 @@ requests that have no threads to execute them. By default, it is set to
`-1` which means its unbounded. When a request comes in and the queue is
full, it will abort the request.
[source,js]
[source,yaml]
--------------------------------------------------
thread_pool:
index:
@ -105,7 +105,7 @@ the `core` and `max` parameters.
The `keep_alive` parameter determines how long a thread should be kept
around in the thread pool without it doing any work.
[source,js]
[source,yaml]
--------------------------------------------------
thread_pool:
warmer:
@ -122,5 +122,10 @@ settings are automatically set based on it. Sometimes, the number of processors
are wrongly detected, in such cases, the number of processors can be
explicitly set using the `processors` setting.
[source,yaml]
--------------------------------------------------
processors: 2
--------------------------------------------------
In order to check the number of processors detected, use the nodes info
API with the `os` flag.

View File

@ -54,23 +54,16 @@ GET /_search/template
------------------------------------------
[float]
===== Passing an array of strings
===== Converting parameters to JSON
The `{{toJson}}parameter{{/toJson}}` function can be used to convert parameters
like maps and array to their JSON representation:
[source,js]
------------------------------------------
GET /_search/template
{
"inline": {
"query": {
"terms": {
"status": [
"{{#status}}",
"{{.}}",
"{{/status}}"
]
}
}
},
"inline": "{ \"query\": { \"terms\": { \"status\": {{#toJson}}status{{/toJson}} }}}",
"params": {
"status": [ "pending", "published" ]
}
@ -82,9 +75,48 @@ which is rendered as:
[source,js]
------------------------------------------
{
"query": {
"query": {
"terms": {
"status": [ "pending", "published" ]
"status": [
"pending",
"published"
]
}
}
}
------------------------------------------
A more complex example substitutes an array of JSON objects:
[source,js]
------------------------------------------
{
"inline": "{\"query\":{\"bool\":{\"must\": {{#toJson}}clauses{{/toJson}} }}}",
"params": {
"clauses": [
{ "term": "foo" },
{ "term": "bar" }
]
}
}
------------------------------------------
which is rendered as:
[source,js]
------------------------------------------
{
"query" : {
"bool" : {
"must" : [
{
"term" : "foo"
},
{
"term" : "bar"
}
]
}
}
}
------------------------------------------
@ -223,45 +255,6 @@ for `end`:
}
------------------------------------------
[float]
===== Converting parameters to JSON
The `{{toJson}}parameter{{/toJson}}` function can be used to convert parameters
like maps and array to their JSON representation:
[source,js]
------------------------------------------
{
"inline": "{\"query\":{\"bool\":{\"must\": {{#toJson}}clauses{{/toJson}} }}}",
"params": {
"clauses": [
{ "term": "foo" },
{ "term": "bar" }
]
}
}
------------------------------------------
which is rendered as:
[source,js]
------------------------------------------
{
"query" : {
"bool" : {
"must" : [
{
"term" : "foo"
},
{
"term" : "bar"
}
]
}
}
}
------------------------------------------
[float]
===== Conditional clauses

View File

@ -203,7 +203,7 @@ public class ForEachProcessorTests extends ESTestCase {
));
processor.execute(ingestDocument);
List<String> result = ingestDocument.getFieldValue("values", List.class);
List<Object> result = ingestDocument.getFieldValue("values", List.class);
assertThat(result.get(0), equalTo("STRING"));
assertThat(result.get(1), equalTo(1));
assertThat(result.get(2), equalTo(null));

View File

@ -95,80 +95,95 @@ delimiter
| EOF
;
// Note we return the boolean s. This is returned as true
// if secondaries (postfixes) are allowed, otherwise, false.
// This prevents illegal secondaries from being appended to
// expressions using precedence that aren't variable/method chains.
expression returns [boolean s = true]
: u = unary[false] { $s = $u.s; } # single
| expression ( MUL | DIV | REM ) expression { $s = false; } # binary
| expression ( ADD | SUB ) expression { $s = false; } # binary
| expression ( FIND | MATCH ) expression { $s = false; } # binary
| expression ( LSH | RSH | USH ) expression { $s = false; } # binary
| expression ( LT | LTE | GT | GTE ) expression { $s = false; } # comp
| expression INSTANCEOF decltype { $s = false; } # instanceof
| expression ( EQ | EQR | NE | NER ) expression { $s = false; } # comp
| expression BWAND expression { $s = false; } # binary
| expression XOR expression { $s = false; } # binary
| expression BWOR expression { $s = false; } # binary
| expression BOOLAND expression { $s = false; } # bool
| expression BOOLOR expression { $s = false; } # bool
| <assoc=right> expression COND e0 = expression COLON e1 = expression { $s = $e0.s && $e1.s; } # conditional
// TODO: Should we allow crazy syntax like (x = 5).call()?
// Other crazy syntaxes work, but this one requires
// a complete restructure of the rules as EChain isn't
// designed to handle more postfixes after an assignment.
| <assoc=right> chain[true] ( ASSIGN | AADD | ASUB | AMUL |
expression
: unary # single
| expression ( MUL | DIV | REM ) expression # binary
| expression ( ADD | SUB ) expression # binary
| expression ( FIND | MATCH ) expression # binary
| expression ( LSH | RSH | USH ) expression # binary
| expression ( LT | LTE | GT | GTE ) expression # comp
| expression INSTANCEOF decltype # instanceof
| expression ( EQ | EQR | NE | NER ) expression # comp
| expression BWAND expression # binary
| expression XOR expression # binary
| expression BWOR expression # binary
| expression BOOLAND expression # bool
| expression BOOLOR expression # bool
| <assoc=right> expression COND expression COLON expression # conditional
| <assoc=right> expression ( ASSIGN | AADD | ASUB | AMUL |
ADIV | AREM | AAND | AXOR |
AOR | ALSH | ARSH | AUSH ) expression { $s = false; } # assignment
AOR | ALSH | ARSH | AUSH ) expression # assignment
;
// Note we take in the boolean c. This is used to indicate
// whether or not this rule was called when we are already
// processing a variable/method chain. This prevents the chain
// from being applied to rules where it wouldn't be allowed.
unary[boolean c] returns [boolean s = true]
: { !$c }? ( INCR | DECR ) chain[true] # pre
| { !$c }? chain[true] (INCR | DECR ) # post
| { !$c }? chain[false] # read
| { !$c }? ( OCTAL | HEX | INTEGER | DECIMAL ) { $s = false; } # numeric
| { !$c }? TRUE { $s = false; } # true
| { !$c }? FALSE { $s = false; } # false
| { !$c }? NULL { $s = false; } # null
| { !$c }? listinitializer { $s = false; } # listinit
| { !$c }? mapinitializer { $s = false; } # mapinit
| { !$c }? ( BOOLNOT | BWNOT | ADD | SUB ) unary[false] # operator
| LP decltype RP unary[$c] # cast
unary
: ( INCR | DECR ) chain # pre
| chain (INCR | DECR ) # post
| chain # read
| ( BOOLNOT | BWNOT | ADD | SUB ) unary # operator
| LP decltype RP unary # cast
;
chain[boolean c]
: p = primary[$c] secondary[$p.s]* # dynamic
| decltype dot secondary[true]* # static
chain
: primary postfix* # dynamic
| decltype postdot postfix* # static
| arrayinitializer # newarray
;
primary[boolean c] returns [boolean s = true]
: { !$c }? LP e = expression RP { $s = $e.s; } # exprprec
| { $c }? LP unary[true] RP # chainprec
primary
: LP expression RP # precedence
| ( OCTAL | HEX | INTEGER | DECIMAL ) # numeric
| TRUE # true
| FALSE # false
| NULL # null
| STRING # string
| REGEX # regex
| listinitializer # listinit
| mapinitializer # mapinit
| ID # variable
| ID arguments # calllocal
| NEW TYPE arguments # newobject
;
secondary[boolean s]
: { $s }? dot
| { $s }? brace
postfix
: callinvoke
| fieldaccess
| braceaccess
;
dot
: DOT DOTID arguments # callinvoke
| DOT ( DOTID | DOTINTEGER ) # fieldaccess
postdot
: callinvoke
| fieldaccess
;
brace
: LBRACE expression RBRACE # braceaccess
callinvoke
: DOT DOTID arguments
;
fieldaccess
: DOT ( DOTID | DOTINTEGER )
;
braceaccess
: LBRACE expression RBRACE
;
arrayinitializer
: NEW TYPE ( LBRACE expression RBRACE )+ ( postdot postfix* )? # newstandardarray
| NEW TYPE LBRACE RBRACE LBRACK ( expression ( COMMA expression )* )? SEMICOLON? RBRACK postfix* # newinitializedarray
;
listinitializer
: LBRACE expression ( COMMA expression)* RBRACE
| LBRACE RBRACE
;
mapinitializer
: LBRACE maptoken ( COMMA maptoken )* RBRACE
| LBRACE COLON RBRACE
;
maptoken
: expression COLON expression
;
arguments
@ -190,49 +205,10 @@ lamtype
;
funcref
: classFuncref
| constructorFuncref
| capturingFuncref
| localFuncref
;
// reference to a static or instance method, e.g. ArrayList::size or Integer::compare
classFuncref
: TYPE REF ID
;
// reference to a constructor, e.g. ArrayList::new
// currently limited to simple non-array types
constructorFuncref
: decltype REF NEW
;
// reference to an instance method, e.g. object::toString
// currently limited to capture of a simple variable (id).
capturingFuncref
: ID REF ID
;
// reference to a local function, e.g. this::myfunc
localFuncref
: THIS REF ID
;
arrayinitializer
: NEW TYPE (LBRACE expression RBRACE)+ (dot secondary[true]*)? # newstandardarray
| NEW TYPE LBRACE RBRACE LBRACK ( expression ( COMMA expression )* )? SEMICOLON? RBRACK # newinitializedarray
;
listinitializer
: LBRACE expression ( COMMA expression)* RBRACE
| LBRACE RBRACE
;
mapinitializer
: LBRACE maptoken ( COMMA maptoken )* RBRACE
| LBRACE COLON RBRACE
;
maptoken
: expression COLON expression
: TYPE REF ID # classfuncref // reference to a static or instance method,
// e.g. ArrayList::size or Integer::compare
| decltype REF NEW # constructorfuncref // reference to a constructor, e.g. ArrayList::new
| ID REF ID # capturingfuncref // reference to an instance method, e.g. object::toString
// currently limited to capture of a simple variable (id).
| THIS REF ID # localfuncref // reference to a local function, e.g. this::myfunc
;

View File

@ -91,43 +91,47 @@ public final class Definition {
public static final Type MATCHER_TYPE = getType("Matcher");
public enum Sort {
VOID( void.class , 0 , true , false , false , false ),
BOOL( boolean.class , 1 , true , true , false , true ),
BYTE( byte.class , 1 , true , false , true , true ),
SHORT( short.class , 1 , true , false , true , true ),
CHAR( char.class , 1 , true , false , true , true ),
INT( int.class , 1 , true , false , true , true ),
LONG( long.class , 2 , true , false , true , true ),
FLOAT( float.class , 1 , true , false , true , true ),
DOUBLE( double.class , 2 , true , false , true , true ),
VOID( void.class , Void.class , null , 0 , true , false , false , false ),
BOOL( boolean.class , Boolean.class , null , 1 , true , true , false , true ),
BYTE( byte.class , Byte.class , null , 1 , true , false , true , true ),
SHORT( short.class , Short.class , null , 1 , true , false , true , true ),
CHAR( char.class , Character.class , null , 1 , true , false , true , true ),
INT( int.class , Integer.class , null , 1 , true , false , true , true ),
LONG( long.class , Long.class , null , 2 , true , false , true , true ),
FLOAT( float.class , Float.class , null , 1 , true , false , true , true ),
DOUBLE( double.class , Double.class , null , 2 , true , false , true , true ),
VOID_OBJ( Void.class , 1 , true , false , false , false ),
BOOL_OBJ( Boolean.class , 1 , false , true , false , false ),
BYTE_OBJ( Byte.class , 1 , false , false , true , false ),
SHORT_OBJ( Short.class , 1 , false , false , true , false ),
CHAR_OBJ( Character.class , 1 , false , false , true , false ),
INT_OBJ( Integer.class , 1 , false , false , true , false ),
LONG_OBJ( Long.class , 1 , false , false , true , false ),
FLOAT_OBJ( Float.class , 1 , false , false , true , false ),
DOUBLE_OBJ( Double.class , 1 , false , false , true , false ),
VOID_OBJ( Void.class , null , void.class , 1 , true , false , false , false ),
BOOL_OBJ( Boolean.class , null , boolean.class , 1 , false , true , false , false ),
BYTE_OBJ( Byte.class , null , byte.class , 1 , false , false , true , false ),
SHORT_OBJ( Short.class , null , short.class , 1 , false , false , true , false ),
CHAR_OBJ( Character.class , null , char.class , 1 , false , false , true , false ),
INT_OBJ( Integer.class , null , int.class , 1 , false , false , true , false ),
LONG_OBJ( Long.class , null , long.class , 1 , false , false , true , false ),
FLOAT_OBJ( Float.class , null , float.class , 1 , false , false , true , false ),
DOUBLE_OBJ( Double.class , null , double.class , 1 , false , false , true , false ),
NUMBER( Number.class , 1 , false , false , false , false ),
STRING( String.class , 1 , false , false , false , true ),
NUMBER( Number.class , null , null , 1 , false , false , false , false ),
STRING( String.class , null , null , 1 , false , false , false , true ),
OBJECT( null , 1 , false , false , false , false ),
DEF( null , 1 , false , false , false , false ),
ARRAY( null , 1 , false , false , false , false );
OBJECT( null , null , null , 1 , false , false , false , false ),
DEF( null , null , null , 1 , false , false , false , false ),
ARRAY( null , null , null , 1 , false , false , false , false );
public final Class<?> clazz;
public final Class<?> boxed;
public final Class<?> unboxed;
public final int size;
public final boolean primitive;
public final boolean bool;
public final boolean numeric;
public final boolean constant;
Sort(final Class<?> clazz, final int size, final boolean primitive,
final boolean bool, final boolean numeric, final boolean constant) {
Sort(final Class<?> clazz, final Class<?> boxed, final Class<?> unboxed, final int size,
final boolean primitive, final boolean bool, final boolean numeric, final boolean constant) {
this.clazz = clazz;
this.boxed = boxed;
this.unboxed = unboxed;
this.size = size;
this.bool = bool;
this.primitive = primitive;

View File

@ -1,49 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.antlr;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.LexerNoViableAltException;
import org.antlr.v4.runtime.misc.Interval;
import org.elasticsearch.painless.Location;
/**
* A lexer that will override the default error behavior to fail on the first error.
*/
final class ErrorHandlingLexer extends PainlessLexer {
final String sourceName;
ErrorHandlingLexer(CharStream charStream, String sourceName) {
super(charStream);
this.sourceName = sourceName;
// Replace the TokenFactory with a stashing wrapper so we can do token-level lookbehind for regex detection
_factory = new StashingTokenFactory<>(_factory);
}
@Override
public void recover(final LexerNoViableAltException lnvae) {
final CharStream charStream = lnvae.getInputStream();
final int startIndex = lnvae.getStartIndex();
final String text = charStream.getText(Interval.of(startIndex, charStream.index()));
Location location = new Location(sourceName, _tokenStartCharIndex);
throw location.createError(new IllegalArgumentException("unexpected character [" + getErrorDisplay(text) + "].", lnvae));
}
}

View File

@ -263,48 +263,6 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitRead(PainlessParser.ReadContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNumeric(PainlessParser.NumericContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTrue(PainlessParser.TrueContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFalse(PainlessParser.FalseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNull(PainlessParser.NullContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitListinit(PainlessParser.ListinitContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMapinit(PainlessParser.MapinitContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -346,14 +304,35 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExprprec(PainlessParser.ExprprecContext ctx) { return visitChildren(ctx); }
@Override public T visitPrecedence(PainlessParser.PrecedenceContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitChainprec(PainlessParser.ChainprecContext ctx) { return visitChildren(ctx); }
@Override public T visitNumeric(PainlessParser.NumericContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTrue(PainlessParser.TrueContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFalse(PainlessParser.FalseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNull(PainlessParser.NullContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -368,6 +347,20 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitRegex(PainlessParser.RegexContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitListinit(PainlessParser.ListinitContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMapinit(PainlessParser.MapinitContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -395,7 +388,14 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSecondary(PainlessParser.SecondaryContext ctx) { return visitChildren(ctx); }
@Override public T visitPostfix(PainlessParser.PostfixContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPostdot(PainlessParser.PostdotContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -417,69 +417,6 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBraceaccess(PainlessParser.BraceaccessContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArguments(PainlessParser.ArgumentsContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArgument(PainlessParser.ArgumentContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLambda(PainlessParser.LambdaContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLamtype(PainlessParser.LamtypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFuncref(PainlessParser.FuncrefContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitClassFuncref(PainlessParser.ClassFuncrefContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitConstructorFuncref(PainlessParser.ConstructorFuncrefContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCapturingFuncref(PainlessParser.CapturingFuncrefContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLocalFuncref(PainlessParser.LocalFuncrefContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -515,4 +452,60 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMaptoken(PainlessParser.MaptokenContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArguments(PainlessParser.ArgumentsContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArgument(PainlessParser.ArgumentContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLambda(PainlessParser.LambdaContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLamtype(PainlessParser.LamtypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitClassfuncref(PainlessParser.ClassfuncrefContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitConstructorfuncref(PainlessParser.ConstructorfuncrefContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCapturingfuncref(PainlessParser.CapturingfuncrefContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLocalfuncref(PainlessParser.LocalfuncrefContext ctx) { return visitChildren(ctx); }
}

View File

@ -249,48 +249,6 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitRead(PainlessParser.ReadContext ctx);
/**
* Visit a parse tree produced by the {@code numeric}
* labeled alternative in {@link PainlessParser#unary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNumeric(PainlessParser.NumericContext ctx);
/**
* Visit a parse tree produced by the {@code true}
* labeled alternative in {@link PainlessParser#unary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTrue(PainlessParser.TrueContext ctx);
/**
* Visit a parse tree produced by the {@code false}
* labeled alternative in {@link PainlessParser#unary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFalse(PainlessParser.FalseContext ctx);
/**
* Visit a parse tree produced by the {@code null}
* labeled alternative in {@link PainlessParser#unary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNull(PainlessParser.NullContext ctx);
/**
* Visit a parse tree produced by the {@code listinit}
* labeled alternative in {@link PainlessParser#unary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitListinit(PainlessParser.ListinitContext ctx);
/**
* Visit a parse tree produced by the {@code mapinit}
* labeled alternative in {@link PainlessParser#unary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMapinit(PainlessParser.MapinitContext ctx);
/**
* Visit a parse tree produced by the {@code operator}
* labeled alternative in {@link PainlessParser#unary}.
@ -327,19 +285,40 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
*/
T visitNewarray(PainlessParser.NewarrayContext ctx);
/**
* Visit a parse tree produced by the {@code exprprec}
* Visit a parse tree produced by the {@code precedence}
* labeled alternative in {@link PainlessParser#primary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExprprec(PainlessParser.ExprprecContext ctx);
T visitPrecedence(PainlessParser.PrecedenceContext ctx);
/**
* Visit a parse tree produced by the {@code chainprec}
* Visit a parse tree produced by the {@code numeric}
* labeled alternative in {@link PainlessParser#primary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitChainprec(PainlessParser.ChainprecContext ctx);
T visitNumeric(PainlessParser.NumericContext ctx);
/**
* Visit a parse tree produced by the {@code true}
* labeled alternative in {@link PainlessParser#primary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTrue(PainlessParser.TrueContext ctx);
/**
* Visit a parse tree produced by the {@code false}
* labeled alternative in {@link PainlessParser#primary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFalse(PainlessParser.FalseContext ctx);
/**
* Visit a parse tree produced by the {@code null}
* labeled alternative in {@link PainlessParser#primary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNull(PainlessParser.NullContext ctx);
/**
* Visit a parse tree produced by the {@code string}
* labeled alternative in {@link PainlessParser#primary}.
@ -354,6 +333,20 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitRegex(PainlessParser.RegexContext ctx);
/**
* Visit a parse tree produced by the {@code listinit}
* labeled alternative in {@link PainlessParser#primary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitListinit(PainlessParser.ListinitContext ctx);
/**
* Visit a parse tree produced by the {@code mapinit}
* labeled alternative in {@link PainlessParser#primary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMapinit(PainlessParser.MapinitContext ctx);
/**
* Visit a parse tree produced by the {@code variable}
* labeled alternative in {@link PainlessParser#primary}.
@ -376,86 +369,35 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
*/
T visitNewobject(PainlessParser.NewobjectContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#secondary}.
* Visit a parse tree produced by {@link PainlessParser#postfix}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSecondary(PainlessParser.SecondaryContext ctx);
T visitPostfix(PainlessParser.PostfixContext ctx);
/**
* Visit a parse tree produced by the {@code callinvoke}
* labeled alternative in {@link PainlessParser#dot}.
* Visit a parse tree produced by {@link PainlessParser#postdot}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPostdot(PainlessParser.PostdotContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#callinvoke}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCallinvoke(PainlessParser.CallinvokeContext ctx);
/**
* Visit a parse tree produced by the {@code fieldaccess}
* labeled alternative in {@link PainlessParser#dot}.
* Visit a parse tree produced by {@link PainlessParser#fieldaccess}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFieldaccess(PainlessParser.FieldaccessContext ctx);
/**
* Visit a parse tree produced by the {@code braceaccess}
* labeled alternative in {@link PainlessParser#brace}.
* Visit a parse tree produced by {@link PainlessParser#braceaccess}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitBraceaccess(PainlessParser.BraceaccessContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#arguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArguments(PainlessParser.ArgumentsContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#argument}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArgument(PainlessParser.ArgumentContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#lambda}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLambda(PainlessParser.LambdaContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#lamtype}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLamtype(PainlessParser.LamtypeContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#funcref}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFuncref(PainlessParser.FuncrefContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#classFuncref}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassFuncref(PainlessParser.ClassFuncrefContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#constructorFuncref}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstructorFuncref(PainlessParser.ConstructorFuncrefContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#capturingFuncref}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCapturingFuncref(PainlessParser.CapturingFuncrefContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#localFuncref}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLocalFuncref(PainlessParser.LocalFuncrefContext ctx);
/**
* Visit a parse tree produced by the {@code newstandardarray}
* labeled alternative in {@link PainlessParser#arrayinitializer}.
@ -488,4 +430,56 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitMaptoken(PainlessParser.MaptokenContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#arguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArguments(PainlessParser.ArgumentsContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#argument}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArgument(PainlessParser.ArgumentContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#lambda}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLambda(PainlessParser.LambdaContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#lamtype}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLamtype(PainlessParser.LamtypeContext ctx);
/**
* Visit a parse tree produced by the {@code classfuncref}
* labeled alternative in {@link PainlessParser#funcref}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassfuncref(PainlessParser.ClassfuncrefContext ctx);
/**
* Visit a parse tree produced by the {@code constructorfuncref}
* labeled alternative in {@link PainlessParser#funcref}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstructorfuncref(PainlessParser.ConstructorfuncrefContext ctx);
/**
* Visit a parse tree produced by the {@code capturingfuncref}
* labeled alternative in {@link PainlessParser#funcref}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCapturingfuncref(PainlessParser.CapturingfuncrefContext ctx);
/**
* Visit a parse tree produced by the {@code localfuncref}
* labeled alternative in {@link PainlessParser#funcref}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLocalfuncref(PainlessParser.LocalfuncrefContext ctx);
}

View File

@ -19,39 +19,47 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Locals;
import org.objectweb.asm.Label;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.Location;
import java.util.Objects;
/**
* The superclass for all E* (expression) nodes.
* The superclass for all E* (expression) and P* (postfix) nodes.
*/
public abstract class AExpression extends ANode {
/**
* Prefix is the predecessor to this node in a variable chain.
* This is used to analyze and write variable chains in a
* more natural order since the parent node of a variable
* chain will want the data from the final postfix to be
* analyzed.
*/
AExpression prefix;
/**
* Set to false when an expression will not be read from such as
* a basic assignment. Note this variable is always set by the parent
* as input.
*/
protected boolean read = true;
boolean read = true;
/**
* Set to true when an expression can be considered a stand alone
* statement. Used to prevent extraneous bytecode. This is always
* set by the node as output.
*/
protected boolean statement = false;
boolean statement = false;
/**
* Set to the expected type this node needs to be. Note this variable
* is always set by the parent as input and should never be read from.
*/
protected Type expected = null;
Type expected = null;
/**
* Set to the actual type this node is. Note this variable is always
@ -59,19 +67,19 @@ public abstract class AExpression extends ANode {
* node itself. <b>Also, actual can always be read after a cast is
* called on this node to get the type of the node after the cast.</b>
*/
protected Type actual = null;
Type actual = null;
/**
* Set by {@link EExplicit} if a cast made on an expression node should be
* explicit.
*/
protected boolean explicit = false;
boolean explicit = false;
/**
* Set to true if a cast is allowed to boxed/unboxed. This is used
* for method arguments because casting may be required.
*/
protected boolean internal = false;
boolean internal = false;
/**
* Set to the value of the constant this expression node represents if
@ -79,40 +87,30 @@ public abstract class AExpression extends ANode {
* this node will be replaced by an {@link EConstant} during casting
* if it's not already one.
*/
protected Object constant = null;
Object constant = null;
/**
* Set to true by {@link ENull} to represent a null value.
*/
protected boolean isNull = false;
boolean isNull = false;
/**
* If an expression represents a branch statement, represents the jump should
* the expression evaluate to a true value. It should always be the case that only
* one of tru and fals are non-null or both are null. Only used during the writing phase.
* Standard constructor with location used for error tracking.
*/
protected Label tru = null;
/**
* If an expression represents a branch statement, represents the jump should
* the expression evaluate to a false value. It should always be the case that only
* one of tru and fals are non-null or both are null. Only used during the writing phase.
*/
protected Label fals = null;
public AExpression(Location location) {
AExpression(Location location) {
super(location);
prefix = null;
}
/**
* Checks for errors and collects data for the writing phase.
* This constructor is used by variable/method chains when postfixes are specified.
*/
abstract void analyze(Locals locals);
AExpression(Location location, AExpression prefix) {
super(location);
/**
* Writes ASM based on the data collected during the analysis phase.
*/
abstract void write(MethodWriter writer, Globals globals);
this.prefix = Objects.requireNonNull(prefix);
}
/**
* Inserts {@link ECast} nodes into the tree for implicit casts. Also replaces
@ -120,7 +118,7 @@ public abstract class AExpression extends ANode {
* @return The new child node for the parent node calling this method.
*/
AExpression cast(Locals locals) {
final Cast cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal);
Cast cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal);
if (cast == null) {
if (constant == null || this instanceof EConstant) {

View File

@ -1,121 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.MethodWriter;
/**
* The superclass for all L* (link) nodes.
*/
public abstract class ALink extends ANode {
/**
* Size is set to a value based on this link's size on the stack. This is
* used during the writing phase to dup stack values from this link as
* necessary during certain store operations.
*/
final int size;
/**
* Set to false only if the link is not going to be read from.
*/
boolean load = true;
/**
* Set to true only if the link is going to be written to and
* is the final link in a chain.
*/
boolean store = false;
/**
* Set to true if this link represents a statik type to be accessed.
*/
boolean statik = false;
/**
* Set by the parent chain to type of the previous link or null if
* there was no previous link.
*/
Type before = null;
/**
* Set by the link to be the type after the link has been loaded/stored.
*/
Type after = null;
/**
* Set to true if this link could be a stand-alone statement.
*/
boolean statement = false;
/**
* Used by {@link LString} to set the value of the String constant. Also
* used by shortcuts to represent a constant key.
*/
String string = null;
ALink(Location location, int size) {
super(location);
this.size = size;
}
/**
* Checks for errors and collects data for the writing phase.
* @return Possibly returns a different {@link ALink} node if a type is
* def or a shortcut is used. Otherwise, returns itself. This will be
* updated into the {@link EChain} node's list of links.
*/
abstract ALink analyze(Locals locals);
/**
* Write values before a load/store occurs such as an array index.
*/
abstract void write(MethodWriter writer, Globals globals);
/**
* Write a load for the specific link type.
*/
abstract void load(MethodWriter writer, Globals globals);
/**
* Write a store for the specific link type.
*/
abstract void store(MethodWriter writer, Globals globals);
/**
* Used to copy link data from one to another during analysis in the case of replacement.
*/
final ALink copy(ALink link) {
load = link.load;
store = link.store;
statik = link.statik;
before = link.before;
after = link.after;
statement = link.statement;
string = link.string;
return this;
}
}

View File

@ -19,20 +19,27 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import java.util.Objects;
import java.util.Set;
/**
* The superclass for all other nodes.
* The superclass for all nodes.
*/
public abstract class ANode {
/**
* The identifier of the script and character offset used for debugging and errors.
*/
final Location location;
/**
* Standard constructor with location used for error tracking.
*/
ANode(Location location) {
this.location = Objects.requireNonNull(location);
}
@ -45,7 +52,17 @@ public abstract class ANode {
*/
abstract void extractVariables(Set<String> variables);
public RuntimeException createError(RuntimeException exception) {
/**
* Checks for errors and collects data for the writing phase.
*/
abstract void analyze(Locals locals);
/**
* Writes ASM based on the data collected during the analysis phase.
*/
abstract void write(MethodWriter writer, Globals globals);
RuntimeException createError(RuntimeException exception) {
return location.createError(exception);
}
}

View File

@ -19,12 +19,9 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.Variable;
import org.objectweb.asm.Label;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.objectweb.asm.Label;
/**
* The superclass for all S* (statement) nodes.
@ -110,17 +107,10 @@ public abstract class AStatement extends ANode {
*/
Label brake = null;
/**
* Standard constructor with location used for error tracking.
*/
AStatement(Location location) {
super(location);
}
/**
* Checks for errors and collects data for the writing phase.
*/
abstract void analyze(Locals locals);
/**
* Writes ASM based on the data collected during the analysis phase.
*/
abstract void write(MethodWriter writer, Globals globals);
}

View File

@ -0,0 +1,103 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import java.util.Objects;
/**
* The super class for an expression that can store a value in local memory.
*/
abstract class AStoreable extends AExpression {
/**
* Set to true when this node is an lhs-expression and will be storing
* a value from an rhs-expression.
*/
boolean write = false;
/**
* Standard constructor with location used for error tracking.
*/
AStoreable(Location location) {
super(location);
prefix = null;
}
/**
* This constructor is used by variable/method chains when postfixes are specified.
*/
AStoreable(Location location, AExpression prefix) {
super(location);
this.prefix = Objects.requireNonNull(prefix);
}
/**
* Returns a value based on the number of elements previously placed on the
* stack to load/store a certain piece of a variable/method chain. This is
* used during the writing phase to dup stack values from this storeable as
* necessary during certain store operations.
* <p>
* Examples:
* {@link EVariable} returns 0 because it requires nothing extra to perform
* a load/store
* {@link PSubField} returns 1 because it requires the name of the field as
* an index on the stack to perform a load/store
* {@link PSubBrace} returns 2 because it requires both the variable slot and
* an index into the array on the stack to perform a
* load/store
*/
abstract int accessElementCount();
/**
* Returns true if this node or a sub-node of this node can be optimized with
* rhs actual type to avoid an unnecessary cast.
*/
abstract boolean isDefOptimized();
/**
* If this node or a sub-node of this node uses dynamic calls then
* actual will be set to this value. This is used for an optimization
* during assignment to def type targets.
*/
abstract void updateActual(Type actual);
/**
* Called before a storeable node is loaded or stored. Used to load prefixes and
* push load/store constants onto the stack if necessary.
*/
abstract void setup(MethodWriter writer, Globals globals);
/**
* Called to load a storable used for compound assignments.
*/
abstract void load(MethodWriter writer, Globals globals);
/**
* Called to store a storabable to local memory.
*/
abstract void store(MethodWriter writer, Globals globals);
}

View File

@ -0,0 +1,325 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.Operation;
import java.util.Objects;
import java.util.Set;
/**
* Represents an assignment with the lhs and rhs as child nodes.
*/
public final class EAssignment extends AExpression {
private AExpression lhs;
private AExpression rhs;
private final boolean pre;
private final boolean post;
private Operation operation;
private boolean cat = false;
private Type promote = null;
private Type shiftDistance; // for shifts, the RHS is promoted independently
private Cast there = null;
private Cast back = null;
public EAssignment(Location location, AExpression lhs, AExpression rhs, boolean pre, boolean post, Operation operation) {
super(location);
this.lhs = Objects.requireNonNull(lhs);
this.rhs = rhs;
this.pre = pre;
this.post = post;
this.operation = operation;
}
@Override
void extractVariables(Set<String> variables) {
lhs.extractVariables(variables);
rhs.extractVariables(variables);
}
@Override
void analyze(Locals locals) {
analyzeLHS(locals);
analyzeIncrDecr();
if (operation != null) {
analyzeCompound(locals);
} else if (rhs != null) {
analyzeSimple(locals);
} else {
throw new IllegalStateException("Illegal tree structure.");
}
}
private void analyzeLHS(Locals locals) {
if (lhs instanceof AStoreable) {
AStoreable lhs = (AStoreable)this.lhs;
lhs.read = read;
lhs.write = true;
lhs.analyze(locals);
} else {
throw new IllegalArgumentException("Left-hand side cannot be assigned a value.");
}
}
private void analyzeIncrDecr() {
if (pre && post) {
throw createError(new IllegalStateException("Illegal tree structure."));
} else if (pre || post) {
if (rhs != null) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
Sort sort = lhs.actual.sort;
if (operation == Operation.INCR) {
if (sort == Sort.DOUBLE) {
rhs = new EConstant(location, 1D);
} else if (sort == Sort.FLOAT) {
rhs = new EConstant(location, 1F);
} else if (sort == Sort.LONG) {
rhs = new EConstant(location, 1L);
} else {
rhs = new EConstant(location, 1);
}
operation = Operation.ADD;
} else if (operation == Operation.DECR) {
if (sort == Sort.DOUBLE) {
rhs = new EConstant(location, 1D);
} else if (sort == Sort.FLOAT) {
rhs = new EConstant(location, 1F);
} else if (sort == Sort.LONG) {
rhs = new EConstant(location, 1L);
} else {
rhs = new EConstant(location, 1);
}
operation = Operation.SUB;
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
}
private void analyzeCompound(Locals locals) {
rhs.analyze(locals);
boolean shift = false;
if (operation == Operation.MUL) {
promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true);
} else if (operation == Operation.DIV) {
promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true);
} else if (operation == Operation.REM) {
promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true);
} else if (operation == Operation.ADD) {
promote = AnalyzerCaster.promoteAdd(lhs.actual, rhs.actual);
} else if (operation == Operation.SUB) {
promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true);
} else if (operation == Operation.LSH) {
promote = AnalyzerCaster.promoteNumeric(lhs.actual, false);
shiftDistance = AnalyzerCaster.promoteNumeric(rhs.actual, false);
shift = true;
} else if (operation == Operation.RSH) {
promote = AnalyzerCaster.promoteNumeric(lhs.actual, false);
shiftDistance = AnalyzerCaster.promoteNumeric(rhs.actual, false);
shift = true;
} else if (operation == Operation.USH) {
promote = AnalyzerCaster.promoteNumeric(lhs.actual, false);
shiftDistance = AnalyzerCaster.promoteNumeric(rhs.actual, false);
shift = true;
} else if (operation == Operation.BWAND) {
promote = AnalyzerCaster.promoteXor(lhs.actual, rhs.actual);
} else if (operation == Operation.XOR) {
promote = AnalyzerCaster.promoteXor(lhs.actual, rhs.actual);
} else if (operation == Operation.BWOR) {
promote = AnalyzerCaster.promoteXor(lhs.actual, rhs.actual);
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
if (promote == null || (shift && shiftDistance == null)) {
throw createError(new ClassCastException("Cannot apply compound assignment " +
"[" + operation.symbol + "=] to types [" + lhs.actual + "] and [" + rhs.actual + "]."));
}
cat = operation == Operation.ADD && promote.sort == Sort.STRING;
if (cat) {
if (rhs instanceof EBinary && ((EBinary)rhs).operation == Operation.ADD && rhs.actual.sort == Sort.STRING) {
((EBinary)rhs).cat = true;
}
rhs.expected = rhs.actual;
} else if (shift) {
if (promote.sort == Sort.DEF) {
// shifts are promoted independently, but for the def type, we need object.
rhs.expected = promote;
} else if (shiftDistance.sort == Sort.LONG) {
rhs.expected = Definition.INT_TYPE;
rhs.explicit = true;
} else {
rhs.expected = shiftDistance;
}
} else {
rhs.expected = promote;
}
rhs = rhs.cast(locals);
there = AnalyzerCaster.getLegalCast(location, lhs.actual, promote, false, false);
back = AnalyzerCaster.getLegalCast(location, promote, lhs.actual, true, false);
this.statement = true;
this.actual = read ? lhs.actual : Definition.VOID_TYPE;
}
private void analyzeSimple(Locals locals) {
AStoreable lhs = (AStoreable)this.lhs;
// If the lhs node is a def optimized node we update the actual type to remove the need for a cast.
if (lhs.isDefOptimized()) {
rhs.analyze(locals);
rhs.expected = rhs.actual;
lhs.updateActual(rhs.actual);
// Otherwise, we must adapt the rhs type to the lhs type with a cast.
} else {
rhs.expected = lhs.actual;
rhs.analyze(locals);
}
rhs = rhs.cast(locals);
this.statement = true;
this.actual = read ? lhs.actual : Definition.VOID_TYPE;
}
/**
* Handles writing byte code for variable/method chains for all given possibilities
* including String concatenation, compound assignment, regular assignment, and simple
* reads. Includes proper duplication for chained assignments and assignments that are
* also read from.
*/
@Override
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
// For the case where the assignment represents a String concatenation
// we must, depending on the Java version, write a StringBuilder or
// track types going onto the stack. This must be done before the
// lhs is read because we need the StringBuilder to be placed on the
// stack ahead of any potential concatenation arguments.
int catElementStackSize = 0;
if (cat) {
catElementStackSize = writer.writeNewStrings();
}
// Cast the lhs to a storeable to perform the necessary operations to store the rhs.
AStoreable lhs = (AStoreable)this.lhs;
lhs.setup(writer, globals); // call the setup method on the lhs to prepare for a load/store operation
if (cat) {
// Handle the case where we are doing a compound assignment
// representing a String concatenation.
writer.writeDup(lhs.accessElementCount(), catElementStackSize); // dup the top element and insert it
// before concat helper on stack
lhs.load(writer, globals); // read the current lhs's value
writer.writeAppendStrings(lhs.actual); // append the lhs's value using the StringBuilder
rhs.write(writer, globals); // write the bytecode for the rhs
if (!(rhs instanceof EBinary) || ((EBinary)rhs).cat) {
writer.writeAppendStrings(rhs.actual); // append the rhs's value unless it's also a concatenation
}
writer.writeToStrings(); // put the value for string concat onto the stack
writer.writeCast(back); // if necessary, cast the String to the lhs actual type
if (lhs.read) {
writer.writeDup(lhs.actual.sort.size, lhs.accessElementCount()); // if this lhs is also read
// from dup the value onto the stack
}
lhs.store(writer, globals); // store the lhs's value from the stack in its respective variable/field/array
} else if (operation != null) {
// Handle the case where we are doing a compound assignment that
// does not represent a String concatenation.
writer.writeDup(lhs.accessElementCount(), 0); // if necessary, dup the previous lhs's value
// to be both loaded from and stored to
lhs.load(writer, globals); // load the current lhs's value
if (lhs.read && post) {
writer.writeDup(lhs.actual.sort.size, lhs.accessElementCount()); // dup the value if the lhs is also
// read from and is a post increment
}
writer.writeCast(there); // if necessary cast the current lhs's value
// to the promotion type between the lhs and rhs types
rhs.write(writer, globals); // write the bytecode for the rhs
// XXX: fix these types, but first we need def compound assignment tests.
// its tricky here as there are possibly explicit casts, too.
// write the operation instruction for compound assignment
if (promote.sort == Sort.DEF) {
writer.writeDynamicBinaryInstruction(location, promote,
Definition.DEF_TYPE, Definition.DEF_TYPE, operation, DefBootstrap.OPERATOR_COMPOUND_ASSIGNMENT);
} else {
writer.writeBinaryInstruction(location, promote, operation);
}
writer.writeCast(back); // if necessary cast the promotion type value back to the lhs's type
if (lhs.read && !post) {
writer.writeDup(lhs.actual.sort.size, lhs.accessElementCount()); // dup the value if the lhs is also
// read from and is not a post increment
}
lhs.store(writer, globals); // store the lhs's value from the stack in its respective variable/field/array
} else {
// Handle the case for a simple write.
rhs.write(writer, globals); // write the bytecode for the rhs rhs
if (lhs.read) {
writer.writeDup(lhs.actual.sort.size, lhs.accessElementCount()); // dup the value if the lhs is also read from
}
lhs.store(writer, globals); // store the lhs's value from the stack in its respective variable/field/array
}
}
}

View File

@ -22,18 +22,17 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import java.util.Objects;
import java.util.Set;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.Operation;
import org.elasticsearch.painless.WriterConstants;
import org.elasticsearch.painless.Locals;
import java.util.Objects;
import java.util.Set;
/**
* Represents a binary math expression.
@ -41,13 +40,13 @@ import org.elasticsearch.painless.Locals;
public final class EBinary extends AExpression {
final Operation operation;
AExpression left;
AExpression right;
Type promote; // promoted type
Type shiftDistance; // for shifts, the RHS is promoted independently
private AExpression left;
private AExpression right;
private Type promote = null; // promoted type
private Type shiftDistance = null; // for shifts, the rhs is promoted independently
boolean cat = false;
boolean originallyExplicit = false; // record whether there was originally an explicit cast
private boolean originallyExplicit = false; // record whether there was originally an explicit cast
public EBinary(Location location, Operation operation, AExpression left, AExpression right) {
super(location);
@ -66,6 +65,7 @@ public final class EBinary extends AExpression {
@Override
void analyze(Locals locals) {
originallyExplicit = explicit;
if (operation == Operation.MUL) {
analyzeMul(locals);
} else if (operation == Operation.DIV) {
@ -153,9 +153,11 @@ public final class EBinary extends AExpression {
}
actual = promote;
if (promote.sort == Sort.DEF) {
left.expected = left.actual;
right.expected = right.actual;
if (expected != null) {
actual = expected;
}
@ -182,8 +184,8 @@ public final class EBinary extends AExpression {
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
} catch (ArithmeticException e) {
throw createError(e);
} catch (ArithmeticException exception) {
throw createError(exception);
}
}
}
@ -204,6 +206,7 @@ public final class EBinary extends AExpression {
if (promote.sort == Sort.DEF) {
left.expected = left.actual;
right.expected = right.actual;
if (expected != null) {
actual = expected;
}
@ -230,8 +233,8 @@ public final class EBinary extends AExpression {
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
} catch (ArithmeticException e) {
throw createError(e);
} catch (ArithmeticException exception) {
throw createError(exception);
}
}
}
@ -266,6 +269,7 @@ public final class EBinary extends AExpression {
} else if (sort == Sort.DEF) {
left.expected = left.actual;
right.expected = right.actual;
if (expected != null) {
actual = expected;
}
@ -311,6 +315,7 @@ public final class EBinary extends AExpression {
if (promote.sort == Sort.DEF) {
left.expected = left.actual;
right.expected = right.actual;
if (expected != null) {
actual = expected;
}
@ -349,7 +354,6 @@ public final class EBinary extends AExpression {
left = left.cast(variables);
right = right.cast(variables);
// It'd be nice to be able to do constant folding here but we can't because constants aren't flowing through EChain
promote = Definition.BOOLEAN_TYPE;
actual = Definition.BOOLEAN_TYPE;
}
@ -372,11 +376,13 @@ public final class EBinary extends AExpression {
if (lhspromote.sort == Sort.DEF || rhspromote.sort == Sort.DEF) {
left.expected = left.actual;
right.expected = right.actual;
if (expected != null) {
actual = expected;
}
} else {
left.expected = lhspromote;
if (rhspromote.sort == Sort.LONG) {
right.expected = Definition.INT_TYPE;
right.explicit = true;
@ -419,11 +425,13 @@ public final class EBinary extends AExpression {
if (lhspromote.sort == Sort.DEF || rhspromote.sort == Sort.DEF) {
left.expected = left.actual;
right.expected = right.actual;
if (expected != null) {
actual = expected;
}
} else {
left.expected = lhspromote;
if (rhspromote.sort == Sort.LONG) {
right.expected = Definition.INT_TYPE;
right.explicit = true;
@ -466,11 +474,13 @@ public final class EBinary extends AExpression {
if (lhspromote.sort == Sort.DEF || rhspromote.sort == Sort.DEF) {
left.expected = left.actual;
right.expected = right.actual;
if (expected != null) {
actual = expected;
}
} else {
left.expected = lhspromote;
if (rhspromote.sort == Sort.LONG) {
right.expected = Definition.INT_TYPE;
right.explicit = true;
@ -511,6 +521,7 @@ public final class EBinary extends AExpression {
if (promote.sort == Sort.DEF) {
left.expected = left.actual;
right.expected = right.actual;
if (expected != null) {
actual = expected;
}
@ -628,25 +639,31 @@ public final class EBinary extends AExpression {
left.write(writer, globals);
if (!(left instanceof EBinary) || ((EBinary)left).operation != Operation.ADD || left.actual.sort != Sort.STRING) {
if (!(left instanceof EBinary) || !((EBinary)left).cat) {
writer.writeAppendStrings(left.actual);
}
right.write(writer, globals);
if (!(right instanceof EBinary) || ((EBinary)right).operation != Operation.ADD || right.actual.sort != Sort.STRING) {
if (!(right instanceof EBinary) || !((EBinary)right).cat) {
writer.writeAppendStrings(right.actual);
}
if (!cat) {
writer.writeToStrings();
}
} else if (operation == Operation.FIND) {
writeBuildMatcher(writer, globals);
} else if (operation == Operation.FIND || operation == Operation.MATCH) {
right.write(writer, globals);
left.write(writer, globals);
writer.invokeVirtual(Definition.PATTERN_TYPE.type, WriterConstants.PATTERN_MATCHER);
if (operation == Operation.FIND) {
writer.invokeVirtual(Definition.MATCHER_TYPE.type, WriterConstants.MATCHER_FIND);
} else if (operation == Operation.MATCH) {
writeBuildMatcher(writer, globals);
writer.invokeVirtual(Definition.MATCHER_TYPE.type, WriterConstants.MATCHER_MATCHES);
} else {
throw new IllegalStateException("Illegal tree structure.");
}
} else {
left.write(writer, globals);
right.write(writer, globals);
@ -663,13 +680,5 @@ public final class EBinary extends AExpression {
writer.writeBinaryInstruction(location, actual, operation);
}
}
writer.writeBranch(tru, fals);
}
private void writeBuildMatcher(MethodWriter writer, Globals globals) {
right.write(writer, globals);
left.write(writer, globals);
writer.invokeVirtual(Definition.PATTERN_TYPE.type, WriterConstants.PATTERN_MATCHER);
}
}

View File

@ -30,15 +30,16 @@ import java.util.Objects;
import java.util.Set;
import org.elasticsearch.painless.MethodWriter;
import org.objectweb.asm.Opcodes;
/**
* Represents a boolean expression.
*/
public final class EBool extends AExpression {
final Operation operation;
AExpression left;
AExpression right;
private final Operation operation;
private AExpression left;
private AExpression right;
public EBool(Location location, Operation operation, AExpression left, AExpression right) {
super(location);
@ -79,72 +80,38 @@ public final class EBool extends AExpression {
@Override
void write(MethodWriter writer, Globals globals) {
if (tru != null || fals != null) {
if (operation == Operation.AND) {
Label localfals = fals == null ? new Label() : fals;
left.fals = localfals;
right.tru = tru;
right.fals = fals;
left.write(writer, globals);
right.write(writer, globals);
if (fals == null) {
writer.mark(localfals);
}
} else if (operation == Operation.OR) {
Label localtru = tru == null ? new Label() : tru;
left.tru = localtru;
right.tru = tru;
right.fals = fals;
left.write(writer, globals);
right.write(writer, globals);
if (tru == null) {
writer.mark(localtru);
}
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
} else {
if (operation == Operation.AND) {
Label localfals = new Label();
Label fals = new Label();
Label end = new Label();
left.fals = localfals;
right.fals = localfals;
left.write(writer, globals);
writer.ifZCmp(Opcodes.IFEQ, fals);
right.write(writer, globals);
writer.ifZCmp(Opcodes.IFEQ, fals);
writer.push(true);
writer.goTo(end);
writer.mark(localfals);
writer.mark(fals);
writer.push(false);
writer.mark(end);
} else if (operation == Operation.OR) {
Label localtru = new Label();
Label localfals = new Label();
Label tru = new Label();
Label fals = new Label();
Label end = new Label();
left.tru = localtru;
right.fals = localfals;
left.write(writer, globals);
writer.ifZCmp(Opcodes.IFNE, tru);
right.write(writer, globals);
writer.ifZCmp(Opcodes.IFEQ, fals);
writer.mark(localtru);
writer.mark(tru);
writer.push(true);
writer.goTo(end);
writer.mark(localfals);
writer.mark(fals);
writer.push(false);
writer.mark(end);
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
}
}

View File

@ -39,10 +39,16 @@ public final class EBoolean extends AExpression {
}
@Override
void extractVariables(Set<String> variables) {}
void extractVariables(Set<String> variables) {
// Do nothing.
}
@Override
void analyze(Locals locals) {
if (!read) {
throw createError(new IllegalArgumentException("Must read from constant [" + constant + "]."));
}
actual = Definition.BOOLEAN_TYPE;
}

View File

@ -35,15 +35,15 @@ import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE;
/**
* Represents a user-defined call.
*/
public class LCallLocal extends ALink {
public final class ECallLocal extends AExpression {
final String name;
final List<AExpression> arguments;
private final String name;
private final List<AExpression> arguments;
Method method = null;
private Method method = null;
public LCallLocal(Location location, String name, List<AExpression> arguments) {
super(location, -1);
public ECallLocal(Location location, String name, List<AExpression> arguments) {
super(location);
this.name = Objects.requireNonNull(name);
this.arguments = Objects.requireNonNull(arguments);
@ -57,17 +57,14 @@ public class LCallLocal extends ALink {
}
@Override
ALink analyze(Locals locals) {
if (before != null) {
throw createError(new IllegalArgumentException("Illegal call [" + name + "] against an existing target."));
} else if (store) {
throw createError(new IllegalArgumentException("Cannot assign a value to a call [" + name + "]."));
}
void analyze(Locals locals) {
MethodKey methodKey = new MethodKey(name, arguments.size());
method = locals.getMethod(methodKey);
if (method != null) {
if (method == null) {
throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments."));
}
for (int argument = 0; argument < arguments.size(); ++argument) {
AExpression expression = arguments.get(argument);
@ -78,21 +75,11 @@ public class LCallLocal extends ALink {
}
statement = true;
after = method.rtn;
return this;
}
throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments."));
actual = method.rtn;
}
@Override
void write(MethodWriter writer, Globals globals) {
// Do nothing.
}
@Override
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
for (AExpression argument : arguments) {
@ -101,9 +88,4 @@ public class LCallLocal extends ALink {
writer.invokeStatic(CLASS_TYPE, method.method);
}
@Override
void store(MethodWriter writer, Globals globals) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}

View File

@ -23,29 +23,29 @@ import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.Variable;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
import java.lang.invoke.LambdaMetafactory;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
/**
* Represents a capturing function reference.
*/
public class ECapturingFunctionRef extends AExpression implements ILambda {
public final String variable;
public final String call;
public final class ECapturingFunctionRef extends AExpression implements ILambda {
private final String variable;
private final String call;
private FunctionRef ref;
Variable captured;
String defPointer;
private Variable captured;
private String defPointer;
public ECapturingFunctionRef(Location location, String variable, String call) {
super(location);

View File

@ -30,27 +30,23 @@ import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.MethodWriter;
/**
* Represents an implicit cast in most cases, though it will replace
* explicit casts in the tree for simplicity. (Internal only.)
* Represents a cast that is inserted into the tree replacing other casts. (Internal only.)
*/
final class ECast extends AExpression {
final String type;
AExpression child;
Cast cast = null;
private AExpression child;
private final Cast cast;
ECast(Location location, AExpression child, Cast cast) {
super(location);
this.type = null;
this.child = Objects.requireNonNull(child);
this.cast = Objects.requireNonNull(cast);
}
@Override
void extractVariables(Set<String> variables) {
child.extractVariables(variables);
throw new IllegalStateException("Illegal tree structure.");
}
@Override
@ -63,6 +59,5 @@ final class ECast extends AExpression {
child.write(writer, globals);
writer.writeDebugInfo(location);
writer.writeCast(cast);
writer.writeBranch(tru, fals);
}
}

View File

@ -1,405 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.Operation;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.MethodWriter;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Set;
/**
* Represents the entirety of a variable/method chain for read/write operations.
*/
public final class EChain extends AExpression {
final List<ALink> links;
final boolean pre;
final boolean post;
Operation operation;
AExpression expression;
boolean cat = false;
Type promote = null;
Type shiftDistance; // for shifts, the RHS is promoted independently
Cast there = null;
Cast back = null;
/** Creates a new RHS-only EChain */
public EChain(Location location, ALink link) {
this(location, Arrays.asList(link), false, false, null, null);
}
public EChain(Location location, List<ALink> links,
boolean pre, boolean post, Operation operation, AExpression expression) {
super(location);
this.links = Objects.requireNonNull(links);
this.pre = pre;
this.post = post;
this.operation = operation;
this.expression = expression;
}
@Override
void extractVariables(Set<String> variables) {
for (ALink link : links) {
link.extractVariables(variables);
}
if (expression != null) {
expression.extractVariables(variables);
}
}
@Override
void analyze(Locals locals) {
analyzeLinks(locals);
analyzeIncrDecr();
if (operation != null) {
analyzeCompound(locals);
} else if (expression != null) {
analyzeWrite(locals);
} else {
analyzeRead();
}
}
private void analyzeLinks(Locals variables) {
ALink previous = null;
int index = 0;
while (index < links.size()) {
ALink current = links.get(index);
if (previous != null) {
current.before = previous.after;
if (index == 1) {
current.statik = previous.statik;
}
}
if (index == links.size() - 1) {
current.load = read;
current.store = expression != null || pre || post;
}
ALink analyzed = current.analyze(variables);
if (analyzed == null) {
links.remove(index);
} else {
if (analyzed != current) {
links.set(index, analyzed);
}
previous = analyzed;
++index;
}
}
if (links.get(0).statik) {
links.remove(0);
}
}
private void analyzeIncrDecr() {
ALink last = links.get(links.size() - 1);
if (pre && post) {
throw createError(new IllegalStateException("Illegal tree structure."));
} else if (pre || post) {
if (expression != null) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
Sort sort = last.after.sort;
if (operation == Operation.INCR) {
if (sort == Sort.DOUBLE) {
expression = new EConstant(location, 1D);
} else if (sort == Sort.FLOAT) {
expression = new EConstant(location, 1F);
} else if (sort == Sort.LONG) {
expression = new EConstant(location, 1L);
} else {
expression = new EConstant(location, 1);
}
operation = Operation.ADD;
} else if (operation == Operation.DECR) {
if (sort == Sort.DOUBLE) {
expression = new EConstant(location, 1D);
} else if (sort == Sort.FLOAT) {
expression = new EConstant(location, 1F);
} else if (sort == Sort.LONG) {
expression = new EConstant(location, 1L);
} else {
expression = new EConstant(location, 1);
}
operation = Operation.SUB;
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
}
private void analyzeCompound(Locals variables) {
ALink last = links.get(links.size() - 1);
expression.analyze(variables);
boolean shift = false;
if (operation == Operation.MUL) {
promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true);
} else if (operation == Operation.DIV) {
promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true);
} else if (operation == Operation.REM) {
promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true);
} else if (operation == Operation.ADD) {
promote = AnalyzerCaster.promoteAdd(last.after, expression.actual);
} else if (operation == Operation.SUB) {
promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true);
} else if (operation == Operation.LSH) {
promote = AnalyzerCaster.promoteNumeric(last.after, false);
shiftDistance = AnalyzerCaster.promoteNumeric(expression.actual, false);
shift = true;
} else if (operation == Operation.RSH) {
promote = AnalyzerCaster.promoteNumeric(last.after, false);
shiftDistance = AnalyzerCaster.promoteNumeric(expression.actual, false);
shift = true;
} else if (operation == Operation.USH) {
promote = AnalyzerCaster.promoteNumeric(last.after, false);
shiftDistance = AnalyzerCaster.promoteNumeric(expression.actual, false);
shift = true;
} else if (operation == Operation.BWAND) {
promote = AnalyzerCaster.promoteXor(last.after, expression.actual);
} else if (operation == Operation.XOR) {
promote = AnalyzerCaster.promoteXor(last.after, expression.actual);
} else if (operation == Operation.BWOR) {
promote = AnalyzerCaster.promoteXor(last.after, expression.actual);
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
if (promote == null || (shift && shiftDistance == null)) {
throw createError(new ClassCastException("Cannot apply compound assignment " +
"[" + operation.symbol + "=] to types [" + last.after + "] and [" + expression.actual + "]."));
}
cat = operation == Operation.ADD && promote.sort == Sort.STRING;
if (cat) {
if (expression instanceof EBinary && ((EBinary)expression).operation == Operation.ADD &&
expression.actual.sort == Sort.STRING) {
((EBinary)expression).cat = true;
}
expression.expected = expression.actual;
} else if (shift) {
if (promote.sort == Sort.DEF) {
// shifts are promoted independently, but for the def type, we need object.
expression.expected = promote;
} else if (shiftDistance.sort == Sort.LONG) {
expression.expected = Definition.INT_TYPE;
expression.explicit = true;
} else {
expression.expected = shiftDistance;
}
} else {
expression.expected = promote;
}
expression = expression.cast(variables);
there = AnalyzerCaster.getLegalCast(location, last.after, promote, false, false);
back = AnalyzerCaster.getLegalCast(location, promote, last.after, true, false);
this.statement = true;
this.actual = read ? last.after : Definition.VOID_TYPE;
}
private void analyzeWrite(Locals variables) {
ALink last = links.get(links.size() - 1);
// If the store node is a def node, we remove the cast to def from the expression
// and promote the real type to it:
if (last instanceof IDefLink) {
expression.analyze(variables);
last.after = expression.expected = expression.actual;
} else {
// otherwise we adapt the type of the expression to the store type
expression.expected = last.after;
expression.analyze(variables);
}
expression = expression.cast(variables);
this.statement = true;
this.actual = read ? last.after : Definition.VOID_TYPE;
}
private void analyzeRead() {
ALink last = links.get(links.size() - 1);
// If the load node is a def node, we adapt its after type to use _this_ expected output type:
if (last instanceof IDefLink && this.expected != null) {
last.after = this.expected;
}
constant = last.string;
statement = last.statement;
actual = last.after;
}
/**
* Handles writing byte code for variable/method chains for all given possibilities
* including String concatenation, compound assignment, regular assignment, and simple
* reads. Includes proper duplication for chained assignments and assignments that are
* also read from.
*
* Example given 'x[0] += 5;' where x is an array of shorts and x[0] is 1.
* Note this example has two links -- x (LVariable) and [0] (LBrace).
* The following steps occur:
* 1. call link{x}.write(...) -- no op [...]
* 2. call link{x}.load(...) -- loads the address of the x array onto the stack [..., address(x)]
* 3. call writer.dup(...) -- dup's the address of the x array onto the stack for later use with store [..., address(x), address(x)]
* 4. call link{[0]}.write(...) -- load the array index value of the constant int 0 onto the stack [..., address(x), address(x), int(0)]
* 5. call link{[0]}.load(...) -- load the short value from x[0] onto the stack [..., address(x), short(1)]
* 6. call writer.writeCast(there) -- casts the short on the stack to an int so it can be added with the rhs [..., address(x), int(1)]
* 7. call expression.write(...) -- puts the expression's value of the constant int 5 onto the stack [..., address(x), int(1), int(5)]
* 8. call writer.writeBinaryInstruction(operation) -- writes the int addition instruction [..., address(x), int(6)]
* 9. call writer.writeCast(back) -- convert the value on the stack back into a short [..., address(x), short(6)]
* 10. call link{[0]}.store(...) -- store the value on the stack into the 0th index of the array x [...]
*/
@Override
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
// For the case where the chain represents a String concatenation
// we must, depending on the Java version, write a StringBuilder or
// track types going onto the stack. This must be done before the
// links in the chain are read because we need the StringBuilder to
// be placed on the stack ahead of any potential concatenation arguments.
int catElementStackSize = 0;
if (cat) {
catElementStackSize = writer.writeNewStrings();
}
ALink last = links.get(links.size() - 1);
// Go through all the links in the chain first calling write
// and then load, except for the final link which may be a store.
// See individual links for more information on what each of the
// write, load, and store methods do.
for (ALink link : links) {
link.write(writer, globals); // call the write method on the link to prepare for a load/store operation
if (link == last && link.store) {
if (cat) {
// Handle the case where we are doing a compound assignment
// representing a String concatenation.
writer.writeDup(link.size, catElementStackSize); // dup the top element and insert it before concat helper on stack
link.load(writer, globals); // read the current link's value
writer.writeAppendStrings(link.after); // append the link's value using the StringBuilder
expression.write(writer, globals); // write the bytecode for the rhs expression
if (!(expression instanceof EBinary) ||
((EBinary)expression).operation != Operation.ADD || expression.actual.sort != Sort.STRING) {
writer.writeAppendStrings(expression.actual); // append the expression's value unless it's also a concatenation
}
writer.writeToStrings(); // put the value for string concat onto the stack
writer.writeCast(back); // if necessary, cast the String to the lhs actual type
if (link.load) {
writer.writeDup(link.after.sort.size, link.size); // if this link is also read from dup the value onto the stack
}
link.store(writer, globals); // store the link's value from the stack in its respective variable/field/array
} else if (operation != null) {
// Handle the case where we are doing a compound assignment that
// does not represent a String concatenation.
writer.writeDup(link.size, 0); // if necessary, dup the previous link's value to be both loaded from and stored to
link.load(writer, globals); // load the current link's value
if (link.load && post) {
writer.writeDup(link.after.sort.size, link.size); // dup the value if the link is also
// read from and is a post increment
}
writer.writeCast(there); // if necessary cast the current link's value
// to the promotion type between the lhs and rhs types
expression.write(writer, globals); // write the bytecode for the rhs expression
// XXX: fix these types, but first we need def compound assignment tests.
// its tricky here as there are possibly explicit casts, too.
// write the operation instruction for compound assignment
if (promote.sort == Sort.DEF) {
writer.writeDynamicBinaryInstruction(location, promote,
Definition.DEF_TYPE, Definition.DEF_TYPE, operation, DefBootstrap.OPERATOR_COMPOUND_ASSIGNMENT);
} else {
writer.writeBinaryInstruction(location, promote, operation);
}
writer.writeCast(back); // if necessary cast the promotion type value back to the link's type
if (link.load && !post) {
writer.writeDup(link.after.sort.size, link.size); // dup the value if the link is also
// read from and is not a post increment
}
link.store(writer, globals); // store the link's value from the stack in its respective variable/field/array
} else {
// Handle the case for a simple write.
expression.write(writer, globals); // write the bytecode for the rhs expression
if (link.load) {
writer.writeDup(link.after.sort.size, link.size); // dup the value if the link is also read from
}
link.store(writer, globals); // store the link's value from the stack in its respective variable/field/array
}
} else {
// Handle the case for a simple read.
link.load(writer, globals); // read the link's value onto the stack
}
}
writer.writeBranch(tru, fals); // if this is a branch node, write the bytecode to make an appropiate jump
}
}

View File

@ -43,10 +43,11 @@ import static org.elasticsearch.painless.WriterConstants.EQUALS;
*/
public final class EComp extends AExpression {
final Operation operation;
AExpression left;
AExpression right;
Type promotedType;
private final Operation operation;
private AExpression left;
private AExpression right;
private Type promotedType;
public EComp(Location location, Operation operation, AExpression left, AExpression right) {
super(location);
@ -449,25 +450,21 @@ public final class EComp extends AExpression {
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
boolean branch = tru != null || fals != null;
left.write(writer, globals);
if (!right.isNull) {
right.write(writer, globals);
}
Label jump = tru != null ? tru : fals != null ? fals : new Label();
Label jump = new Label();
Label end = new Label();
boolean eq = (operation == Operation.EQ || operation == Operation.EQR) && (tru != null || fals == null) ||
(operation == Operation.NE || operation == Operation.NER) && fals != null;
boolean ne = (operation == Operation.NE || operation == Operation.NER) && (tru != null || fals == null) ||
(operation == Operation.EQ || operation == Operation.EQR) && fals != null;
boolean lt = operation == Operation.LT && (tru != null || fals == null) || operation == Operation.GTE && fals != null;
boolean lte = operation == Operation.LTE && (tru != null || fals == null) || operation == Operation.GT && fals != null;
boolean gt = operation == Operation.GT && (tru != null || fals == null) || operation == Operation.LTE && fals != null;
boolean gte = operation == Operation.GTE && (tru != null || fals == null) || operation == Operation.LT && fals != null;
boolean eq = (operation == Operation.EQ || operation == Operation.EQR);
boolean ne = (operation == Operation.NE || operation == Operation.NER);
boolean lt = operation == Operation.LT;
boolean lte = operation == Operation.LTE;
boolean gt = operation == Operation.GT;
boolean gte = operation == Operation.GTE;
boolean writejump = true;
@ -478,8 +475,8 @@ public final class EComp extends AExpression {
case CHAR:
throw createError(new IllegalStateException("Illegal tree structure."));
case BOOL:
if (eq) writer.ifZCmp(MethodWriter.EQ, jump);
else if (ne) writer.ifZCmp(MethodWriter.NE, jump);
if (eq) writer.ifCmp(promotedType.type, MethodWriter.EQ, jump);
else if (ne) writer.ifCmp(promotedType.type, MethodWriter.NE, jump);
else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
@ -503,10 +500,11 @@ public final class EComp extends AExpression {
case DEF:
org.objectweb.asm.Type booleanType = org.objectweb.asm.Type.getType(boolean.class);
org.objectweb.asm.Type descriptor = org.objectweb.asm.Type.getMethodType(booleanType, left.actual.type, right.actual.type);
if (eq) {
if (right.isNull) {
writer.ifNull(jump);
} else if (!left.isNull && (operation == Operation.EQ || operation == Operation.NE)) {
} else if (!left.isNull && operation == Operation.EQ) {
writer.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL);
writejump = false;
} else {
@ -515,7 +513,7 @@ public final class EComp extends AExpression {
} else if (ne) {
if (right.isNull) {
writer.ifNonNull(jump);
} else if (!left.isNull && (operation == Operation.EQ || operation == Operation.NE)) {
} else if (!left.isNull && operation == Operation.NE) {
writer.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL);
writer.ifZCmp(MethodWriter.EQ, jump);
} else {
@ -537,22 +535,13 @@ public final class EComp extends AExpression {
throw createError(new IllegalStateException("Illegal tree structure."));
}
if (branch && !writejump) {
writer.ifZCmp(MethodWriter.NE, jump);
}
break;
default:
if (eq) {
if (right.isNull) {
writer.ifNull(jump);
} else if (operation == Operation.EQ || operation == Operation.NE) {
} else if (operation == Operation.EQ) {
writer.invokeStatic(OBJECTS_TYPE, EQUALS);
if (branch) {
writer.ifZCmp(MethodWriter.NE, jump);
}
writejump = false;
} else {
writer.ifCmp(promotedType.type, MethodWriter.EQ, jump);
@ -560,7 +549,7 @@ public final class EComp extends AExpression {
} else if (ne) {
if (right.isNull) {
writer.ifNonNull(jump);
} else if (operation == Operation.EQ || operation == Operation.NE) {
} else if (operation == Operation.NE) {
writer.invokeStatic(OBJECTS_TYPE, EQUALS);
writer.ifZCmp(MethodWriter.EQ, jump);
} else {
@ -571,7 +560,7 @@ public final class EComp extends AExpression {
}
}
if (!branch && writejump) {
if (writejump) {
writer.push(false);
writer.goTo(end);
writer.mark(jump);

View File

@ -31,15 +31,16 @@ import java.util.Objects;
import java.util.Set;
import org.elasticsearch.painless.MethodWriter;
import org.objectweb.asm.Opcodes;
/**
* Respresents a conditional expression.
*/
public final class EConditional extends AExpression {
AExpression condition;
AExpression left;
AExpression right;
private AExpression condition;
private AExpression left;
private AExpression right;
public EConditional(Location location, AExpression condition, AExpression left, AExpression right) {
super(location);
@ -93,17 +94,15 @@ public final class EConditional extends AExpression {
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
Label localfals = new Label();
Label fals = new Label();
Label end = new Label();
condition.fals = localfals;
left.tru = right.tru = tru;
left.fals = right.fals = fals;
condition.write(writer, globals);
writer.ifZCmp(Opcodes.IFEQ, fals);
left.write(writer, globals);
writer.goTo(end);
writer.mark(localfals);
writer.mark(fals);
right.write(writer, globals);
writer.mark(end);
}

View File

@ -30,8 +30,8 @@ import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.MethodWriter;
/**
* Represents a constant. Note this replaces any other expression
* node with a constant value set during a cast. (Internal only.)
* Represents a constant inserted into the tree replacing
* other constants during constant folding. (Internal only.)
*/
final class EConstant extends AExpression {
@ -42,7 +42,9 @@ final class EConstant extends AExpression {
}
@Override
void extractVariables(Set<String> variables) {}
void extractVariables(Set<String> variables) {
throw new IllegalStateException("Illegal tree structure.");
}
@Override
void analyze(Locals locals) {
@ -82,22 +84,9 @@ final class EConstant extends AExpression {
case CHAR: writer.push((char)constant); break;
case SHORT: writer.push((short)constant); break;
case BYTE: writer.push((byte)constant); break;
case BOOL:
if (tru != null && (boolean)constant) {
writer.goTo(tru);
} else if (fals != null && !(boolean)constant) {
writer.goTo(fals);
} else if (tru == null && fals == null) {
writer.push((boolean)constant);
}
break;
case BOOL: writer.push((boolean)constant); break;
default:
throw createError(new IllegalStateException("Illegal tree structure."));
}
if (sort != Sort.BOOL) {
writer.writeBranch(tru, fals);
}
}
}

View File

@ -33,7 +33,7 @@ import java.util.Set;
*/
public final class EDecimal extends AExpression {
final String value;
private final String value;
public EDecimal(Location location, String value) {
super(location);
@ -46,6 +46,10 @@ public final class EDecimal extends AExpression {
@Override
void analyze(Locals locals) {
if (!read) {
throw createError(new IllegalArgumentException("Must read from constant [" + value + "]."));
}
if (value.endsWith("f") || value.endsWith("F")) {
try {
constant = Float.parseFloat(value.substring(0, value.length() - 1));

View File

@ -33,8 +33,8 @@ import java.util.Set;
*/
public final class EExplicit extends AExpression {
final String type;
AExpression child;
private final String type;
private AExpression child;
public EExplicit(Location location, String type, AExpression child) {
super(location);

View File

@ -20,30 +20,30 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.MethodKey;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.objectweb.asm.Type;
import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
import java.lang.invoke.LambdaMetafactory;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE;
/**
* Represents a function reference.
*/
public class EFunctionRef extends AExpression implements ILambda {
public final String type;
public final String call;
public final class EFunctionRef extends AExpression implements ILambda {
private final String type;
private final String call;
private FunctionRef ref;
String defPointer;
private String defPointer;
public EFunctionRef(Location location, String type, String call) {
super(location);

View File

@ -20,12 +20,12 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import java.lang.invoke.MethodType;
import java.util.Objects;
import java.util.Set;
@ -34,12 +34,13 @@ import java.util.Set;
* <p>
* Unlike java's, this works for primitive types too.
*/
public class EInstanceof extends AExpression {
AExpression expression;
final String type;
Class<?> resolvedType;
Class<?> expressionType;
boolean primitiveExpression;
public final class EInstanceof extends AExpression {
private AExpression expression;
private final String type;
private Class<?> resolvedType;
private Class<?> expressionType;
private boolean primitiveExpression;
public EInstanceof(Location location, AExpression expression, String type) {
super(location);
@ -54,20 +55,29 @@ public class EInstanceof extends AExpression {
@Override
void analyze(Locals locals) {
Definition.Type raw = Definition.getType(type);
// map to wrapped type for primitive types
resolvedType = MethodType.methodType(raw.clazz).wrap().returnType();
expression.analyze(locals);
actual = Definition.BOOLEAN_TYPE;
final Type type;
Definition.Type expressionRaw = expression.actual;
if (expressionRaw == null) {
expressionRaw = Definition.DEF_TYPE;
// ensure the specified type is part of the definition
try {
type = Definition.getType(this.type);
} catch (IllegalArgumentException exception) {
throw createError(new IllegalArgumentException("Not a type [" + this.type + "]."));
}
// record if the expression returns a primitive
primitiveExpression = expressionRaw.clazz.isPrimitive();
// map to wrapped type for primitive types
expressionType = MethodType.methodType(expressionRaw.clazz).wrap().returnType();
resolvedType = type.sort.primitive ? type.sort.boxed : type.clazz;
// analyze and cast the expression
expression.analyze(locals);
expression.expected = expression.actual;
expression = expression.cast(locals);
// record if the expression returns a primitive
primitiveExpression = expression.actual.sort.primitive;
// map to wrapped type for primitive types
expressionType = expression.actual.sort.primitive ? expression.actual.sort.boxed : type.clazz;
actual = Definition.BOOLEAN_TYPE;
}
@Override

View File

@ -19,6 +19,7 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.Variable;
import org.elasticsearch.painless.Location;
@ -29,7 +30,6 @@ import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import java.lang.invoke.LambdaMetafactory;
import java.util.ArrayList;
@ -64,21 +64,22 @@ import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE
* <br>
* {@code sort(list, lambda$0(capture))}
*/
public class ELambda extends AExpression implements ILambda {
final String name;
final FunctionReserved reserved;
final List<String> paramTypeStrs;
final List<String> paramNameStrs;
final List<AStatement> statements;
public final class ELambda extends AExpression implements ILambda {
private final String name;
private final FunctionReserved reserved;
private final List<String> paramTypeStrs;
private final List<String> paramNameStrs;
private final List<AStatement> statements;
// desugared synthetic method (lambda body)
SFunction desugared;
private SFunction desugared;
// captured variables
List<Variable> captures;
private List<Variable> captures;
// static parent, static lambda
FunctionRef ref;
private FunctionRef ref;
// dynamic parent, deferred until link time
String defPointer;
private String defPointer;
public ELambda(String name, FunctionReserved reserved,
Location location, List<String> paramTypes, List<String> paramNames,
@ -100,7 +101,7 @@ public class ELambda extends AExpression implements ILambda {
@Override
void analyze(Locals locals) {
final Definition.Type returnType;
final Type returnType;
final List<String> actualParamTypeStrs;
Method interfaceMethod;
// inspect the target first, set interface method if we know it.
@ -163,7 +164,7 @@ public class ELambda extends AExpression implements ILambda {
// desugar lambda body into a synthetic method
desugared = new SFunction(reserved, location, returnType.name, name,
paramTypes, paramNames, statements, true);
desugared.generate();
desugared.generateSignature();
desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, desugared.parameters,
captures.size(), reserved.getMaxLoopCounter()));
@ -195,8 +196,10 @@ public class ELambda extends AExpression implements ILambda {
}
// convert MethodTypes to asm Type for the constant pool.
String invokedType = ref.invokedType.toMethodDescriptorString();
Type samMethodType = Type.getMethodType(ref.samMethodType.toMethodDescriptorString());
Type interfaceType = Type.getMethodType(ref.interfaceMethodType.toMethodDescriptorString());
org.objectweb.asm.Type samMethodType =
org.objectweb.asm.Type.getMethodType(ref.samMethodType.toMethodDescriptorString());
org.objectweb.asm.Type interfaceType =
org.objectweb.asm.Type.getMethodType(ref.interfaceMethodType.toMethodDescriptorString());
if (ref.needsBridges()) {
writer.invokeDynamic(ref.invokedName,
invokedType,
@ -235,8 +238,8 @@ public class ELambda extends AExpression implements ILambda {
}
@Override
public Type[] getCaptures() {
Type[] types = new Type[captures.size()];
public org.objectweb.asm.Type[] getCaptures() {
org.objectweb.asm.Type[] types = new org.objectweb.asm.Type[captures.size()];
for (int i = 0; i < types.length; i++) {
types[i] = captures.get(i).type.type;
}

View File

@ -33,11 +33,11 @@ import java.util.Set;
/**
* Represents a list initialization shortcut.
*/
public class EListInit extends AExpression {
final List<AExpression> values;
public final class EListInit extends AExpression {
private final List<AExpression> values;
Method constructor = null;
Method method = null;
private Method constructor = null;
private Method method = null;
public EListInit(Location location, List<AExpression> values) {
super(location);
@ -54,6 +54,10 @@ public class EListInit extends AExpression {
@Override
void analyze(Locals locals) {
if (!read) {
throw createError(new IllegalArgumentException("Must read from list initializer."));
}
try {
actual = Definition.getType("ArrayList");
} catch (IllegalArgumentException exception) {

View File

@ -33,12 +33,12 @@ import java.util.Set;
/**
* Represents a map initialization shortcut.
*/
public class EMapInit extends AExpression {
final List<AExpression> keys;
final List<AExpression> values;
public final class EMapInit extends AExpression {
private final List<AExpression> keys;
private final List<AExpression> values;
Method constructor = null;
Method method = null;
private Method constructor = null;
private Method method = null;
public EMapInit(Location location, List<AExpression> keys, List<AExpression> values) {
super(location);
@ -60,6 +60,10 @@ public class EMapInit extends AExpression {
@Override
void analyze(Locals locals) {
if (!read) {
throw createError(new IllegalArgumentException("Must read from map initializer."));
}
try {
actual = Definition.getType("HashMap");
} catch (IllegalArgumentException exception) {

View File

@ -33,14 +33,14 @@ import java.util.Set;
/**
* Represents an array instantiation.
*/
public final class LNewArray extends ALink {
public final class ENewArray extends AExpression {
final String type;
final List<AExpression> arguments;
final boolean initialize;
private final String type;
private final List<AExpression> arguments;
private final boolean initialize;
public LNewArray(Location location, String type, List<AExpression> arguments, boolean initialize) {
super(location, -1);
public ENewArray(Location location, String type, List<AExpression> arguments, boolean initialize) {
super(location);
this.type = Objects.requireNonNull(type);
this.arguments = Objects.requireNonNull(arguments);
@ -55,13 +55,9 @@ public final class LNewArray extends ALink {
}
@Override
ALink analyze(Locals locals) {
if (before != null) {
throw createError(new IllegalArgumentException("Cannot create a new array with a target already defined."));
} else if (store) {
throw createError(new IllegalArgumentException("Cannot assign a value to a new array."));
} else if (!load) {
throw createError(new IllegalArgumentException("A newly created array must be read."));
void analyze(Locals locals) {
if (!read) {
throw createError(new IllegalArgumentException("A newly created array must be read from."));
}
final Type type;
@ -81,23 +77,16 @@ public final class LNewArray extends ALink {
arguments.set(argument, expression.cast(locals));
}
after = Definition.getType(type.struct, initialize ? 1 : arguments.size());
return this;
actual = Definition.getType(type.struct, initialize ? 1 : arguments.size());
}
@Override
void write(MethodWriter writer, Globals globals) {
// Do nothing.
}
@Override
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (initialize) {
writer.push(arguments.size());
writer.newArray(Definition.getType(after.struct, 0).type);
writer.newArray(Definition.getType(actual.struct, 0).type);
for (int index = 0; index < arguments.size(); ++index) {
AExpression argument = arguments.get(index);
@ -105,7 +94,7 @@ public final class LNewArray extends ALink {
writer.dup();
writer.push(index);
argument.write(writer, globals);
writer.arrayStore(Definition.getType(after.struct, 0).type);
writer.arrayStore(Definition.getType(actual.struct, 0).type);
}
} else {
for (AExpression argument : arguments) {
@ -113,15 +102,10 @@ public final class LNewArray extends ALink {
}
if (arguments.size() > 1) {
writer.visitMultiANewArrayInsn(after.type.getDescriptor(), after.type.getDimensions());
writer.visitMultiANewArrayInsn(actual.type.getDescriptor(), actual.type.getDimensions());
} else {
writer.newArray(Definition.getType(after.struct, 0).type);
writer.newArray(Definition.getType(actual.struct, 0).type);
}
}
}
@Override
void store(MethodWriter writer, Globals globals) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}

View File

@ -35,15 +35,15 @@ import java.util.Set;
/**
* Represents and object instantiation.
*/
public final class LNewObj extends ALink {
public final class ENewObj extends AExpression {
final String type;
final List<AExpression> arguments;
private final String type;
private final List<AExpression> arguments;
Method constructor;
private Method constructor;
public LNewObj(Location location, String type, List<AExpression> arguments) {
super(location, -1);
public ENewObj(Location location, String type, List<AExpression> arguments) {
super(location);
this.type = Objects.requireNonNull(type);
this.arguments = Objects.requireNonNull(arguments);
@ -57,13 +57,7 @@ public final class LNewObj extends ALink {
}
@Override
ALink analyze(Locals locals) {
if (before != null) {
throw createError(new IllegalArgumentException("Illegal new call with a target already defined."));
} else if (store) {
throw createError(new IllegalArgumentException("Cannot assign a value to a new call."));
}
void analyze(Locals locals) {
final Type type;
try {
@ -94,25 +88,19 @@ public final class LNewObj extends ALink {
}
statement = true;
after = type;
actual = type;
} else {
throw createError(new IllegalArgumentException("Unknown new call on type [" + struct.name + "]."));
}
return this;
}
@Override
void write(MethodWriter writer, Globals globals) {
// Do nothing.
}
@Override
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
writer.newInstance(after.type);
if (load) {
writer.newInstance(actual.type);
if (read) {
writer.dup();
}
@ -122,9 +110,4 @@ public final class LNewObj extends ALink {
writer.invokeConstructor(constructor.owner.type, constructor.method);
}
@Override
void store(MethodWriter writer, Globals globals) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}

View File

@ -39,10 +39,16 @@ public final class ENull extends AExpression {
}
@Override
void extractVariables(Set<String> variables) {}
void extractVariables(Set<String> variables) {
// Do nothing.
}
@Override
void analyze(Locals locals) {
if (!read) {
throw createError(new IllegalArgumentException("Must read from null constant."));
}
isNull = true;
if (expected != null) {

View File

@ -35,8 +35,8 @@ import org.elasticsearch.painless.MethodWriter;
*/
public final class ENumeric extends AExpression {
final String value;
int radix;
private final String value;
private int radix;
public ENumeric(Location location, String value, int radix) {
super(location);
@ -46,10 +46,16 @@ public final class ENumeric extends AExpression {
}
@Override
void extractVariables(Set<String> variables) {}
void extractVariables(Set<String> variables) {
// Do nothing.
}
@Override
void analyze(Locals locals) {
if (!read) {
throw createError(new IllegalArgumentException("Must read from constant [" + value + "]."));
}
if (value.endsWith("d") || value.endsWith("D")) {
if (radix != 10) {
throw createError(new IllegalStateException("Illegal tree structure."));

View File

@ -35,61 +35,53 @@ import org.elasticsearch.painless.WriterConstants;
/**
* Represents a regex constant. All regexes are constants.
*/
public final class LRegex extends ALink {
public final class ERegex extends AExpression {
private final String pattern;
private final int flags;
private Constant constant;
public LRegex(Location location, String pattern, String flagsString) {
super(location, 1);
public ERegex(Location location, String pattern, String flagsString) {
super(location);
this.pattern = pattern;
int flags = 0;
for (int c = 0; c < flagsString.length(); c++) {
flags |= flagForChar(flagsString.charAt(c));
}
this.flags = flags;
try {
// Compile the pattern early after parsing so we can throw an error to the user with the location
Pattern.compile(pattern, flags);
} catch (PatternSyntaxException e) {
throw createError(e);
}
}
@Override
void extractVariables(Set<String> variables) {}
@Override
ALink analyze(Locals locals) {
if (before != null) {
throw createError(new IllegalArgumentException("Illegal Regex constant [" + pattern + "]."));
} else if (store) {
throw createError(new IllegalArgumentException("Cannot write to Regex constant [" + pattern + "]."));
} else if (!load) {
throw createError(new IllegalArgumentException("Regex constant may only be read [" + pattern + "]."));
}
constant = new Constant(location, Definition.PATTERN_TYPE.type, "regexAt$" + location.getOffset(), this::initializeConstant);
after = Definition.PATTERN_TYPE;
return this;
}
@Override
void write(MethodWriter writer, Globals globals) {
void extractVariables(Set<String> variables) {
// Do nothing.
}
@Override
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
writer.getStatic(WriterConstants.CLASS_TYPE, constant.name, Definition.PATTERN_TYPE.type);
globals.addConstantInitializer(constant);
void analyze(Locals locals) {
if (!read) {
throw createError(new IllegalArgumentException("Regex constant may only be read [" + pattern + "]."));
}
try {
Pattern.compile(pattern, flags);
} catch (PatternSyntaxException exception) {
throw createError(exception);
}
constant = new Constant(location, Definition.PATTERN_TYPE.type, "regexAt$" + location.getOffset(), this::initializeConstant);
actual = Definition.PATTERN_TYPE;
}
@Override
void store(MethodWriter writer, Globals globals) {
throw createError(new IllegalStateException("Illegal tree structure."));
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
writer.getStatic(WriterConstants.CLASS_TYPE, constant.name, Definition.PATTERN_TYPE.type);
globals.addConstantInitializer(constant);
}
private void initializeConstant(MethodWriter writer) {
@ -108,7 +100,8 @@ public final class LRegex extends ALink {
case 'U': return Pattern.UNICODE_CHARACTER_CLASS;
case 'u': return Pattern.UNICODE_CASE;
case 'x': return Pattern.COMMENTS;
default: throw new IllegalArgumentException("Unknown flag [" + c + "]");
default:
throw new IllegalArgumentException("Unknown flag [" + c + "]");
}
}
}

View File

@ -32,47 +32,32 @@ import org.elasticsearch.painless.Locals;
/**
* Represents a static type target.
*/
public final class LStatic extends ALink {
public final class EStatic extends AExpression {
final String type;
private final String type;
public LStatic(Location location, String type) {
super(location, 0);
public EStatic(Location location, String type) {
super(location);
this.type = Objects.requireNonNull(type);
}
@Override
void extractVariables(Set<String> variables) {}
@Override
ALink analyze(Locals locals) {
if (before != null) {
throw createError(new IllegalArgumentException("Illegal static type [" + type + "] after target already defined."));
void extractVariables(Set<String> variables) {
// Do nothing.
}
@Override
void analyze(Locals locals) {
try {
after = Definition.getType(type);
statik = true;
actual = Definition.getType(type);
} catch (IllegalArgumentException exception) {
throw createError(new IllegalArgumentException("Not a type [" + type + "]."));
}
return this;
}
@Override
void write(MethodWriter writer, Globals globals) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
@Override
void load(MethodWriter writer, Globals globals) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
@Override
void store(MethodWriter writer, Globals globals) {
throw createError(new IllegalStateException("Illegal tree structure."));
// Do nothing.
}
}

View File

@ -31,44 +31,30 @@ import java.util.Set;
/**
* Represents a string constant.
*/
public final class LString extends ALink {
public final class EString extends AExpression {
public LString(Location location, String string) {
super(location, -1);
public EString(Location location, String string) {
super(location);
this.string = Objects.requireNonNull(string);
this.constant = Objects.requireNonNull(string);
}
@Override
void extractVariables(Set<String> variables) {}
@Override
ALink analyze(Locals locals) {
if (before != null) {
throw createError(new IllegalArgumentException("Illegal String constant [" + string + "]."));
} else if (store) {
throw createError(new IllegalArgumentException("Cannot write to read-only String constant [" + string + "]."));
} else if (!load) {
throw createError(new IllegalArgumentException("Must read String constant [" + string + "]."));
}
after = Definition.STRING_TYPE;
return this;
}
@Override
void write(MethodWriter writer, Globals globals) {
void extractVariables(Set<String> variables) {
// Do nothing.
}
@Override
void load(MethodWriter writer, Globals globals) {
writer.push(string);
void analyze(Locals locals) {
if (!read) {
throw createError(new IllegalArgumentException("Must read from constant [" + constant + "]."));
}
actual = Definition.STRING_TYPE;
}
@Override
void store(MethodWriter writer, Globals globals) {
throw createError(new IllegalStateException("Illegal tree structure."));
void write(MethodWriter writer, Globals globals) {
throw new IllegalStateException("Illegal tree structure.");
}
}

View File

@ -34,16 +34,18 @@ import java.util.Objects;
import java.util.Set;
import org.elasticsearch.painless.MethodWriter;
import org.objectweb.asm.Opcodes;
/**
* Represents a unary math expression.
*/
public final class EUnary extends AExpression {
final Operation operation;
AExpression child;
Type promote;
boolean originallyExplicit = false; // record whether there was originally an explicit cast
private final Operation operation;
private AExpression child;
private Type promote;
private boolean originallyExplicit = false; // record whether there was originally an explicit cast
public EUnary(Location location, Operation operation, AExpression child) {
super(location);
@ -60,6 +62,7 @@ public final class EUnary extends AExpression {
@Override
void analyze(Locals locals) {
originallyExplicit = explicit;
if (operation == Operation.NOT) {
analyzeNot(locals);
} else if (operation == Operation.BWNOT) {
@ -191,33 +194,29 @@ public final class EUnary extends AExpression {
writer.writeDebugInfo(location);
if (operation == Operation.NOT) {
if (tru == null && fals == null) {
Label localfals = new Label();
Label fals = new Label();
Label end = new Label();
child.fals = localfals;
child.write(writer, globals);
writer.ifZCmp(Opcodes.IFEQ, fals);
writer.push(false);
writer.goTo(end);
writer.mark(localfals);
writer.mark(fals);
writer.push(true);
writer.mark(end);
} else {
child.tru = fals;
child.fals = tru;
child.write(writer, globals);
}
} else {
Sort sort = promote.sort;
child.write(writer, globals);
// def calls adopt the wanted return value. if there was a narrowing cast,
// we need to flag that so that its done at runtime.
// Def calls adopt the wanted return value. If there was a narrowing cast,
// we need to flag that so that it's done at runtime.
int defFlags = 0;
if (originallyExplicit) {
defFlags |= DefBootstrap.OPERATOR_EXPLICIT_CAST;
}
if (operation == Operation.BWNOT) {
if (sort == Sort.DEF) {
org.objectweb.asm.Type descriptor = org.objectweb.asm.Type.getMethodType(actual.type, child.actual.type);
@ -248,8 +247,6 @@ public final class EUnary extends AExpression {
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
writer.writeBranch(tru, fals);
}
}
}

View File

@ -19,11 +19,12 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.Variable;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.objectweb.asm.Opcodes;
import java.util.Objects;
@ -32,14 +33,14 @@ import java.util.Set;
/**
* Represents a variable load/store.
*/
public final class LVariable extends ALink {
public final class EVariable extends AStoreable {
final String name;
private final String name;
Variable variable;
private Variable variable = null;
public LVariable(Location location, String name) {
super(location, 0);
public EVariable(Location location, String name) {
super(location);
this.name = Objects.requireNonNull(name);
}
@ -50,34 +51,48 @@ public final class LVariable extends ALink {
}
@Override
ALink analyze(Locals locals) {
if (before != null) {
throw createError(new IllegalArgumentException("Illegal variable [" + name + "] access with target already defined."));
}
void analyze(Locals locals) {
variable = locals.getVariable(location, name);
if (store && variable.readonly) {
if (write && variable.readonly) {
throw createError(new IllegalArgumentException("Variable [" + variable.name + "] is read-only."));
}
after = variable.type;
return this;
actual = variable.type;
}
@Override
void write(MethodWriter writer, Globals globals) {
writer.visitVarInsn(actual.type.getOpcode(Opcodes.ILOAD), variable.getSlot());
}
@Override
int accessElementCount() {
return 0;
}
@Override
boolean isDefOptimized() {
return false;
}
@Override
void updateActual(Type actual) {
throw new IllegalArgumentException("Illegal tree structure.");
}
@Override
void setup(MethodWriter writer, Globals globals) {
// Do nothing.
}
@Override
void load(MethodWriter writer, Globals globals) {
writer.visitVarInsn(after.type.getOpcode(Opcodes.ILOAD), variable.getSlot());
writer.visitVarInsn(actual.type.getOpcode(Opcodes.ILOAD), variable.getSlot());
}
@Override
void store(MethodWriter writer, Globals globals) {
writer.visitVarInsn(after.type.getOpcode(Opcodes.ISTORE), variable.getSlot());
writer.visitVarInsn(actual.type.getOpcode(Opcodes.ISTORE), variable.getSlot());
}
}

View File

@ -1,95 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.MethodWriter;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* Represents an array load/store or defers to possible shortcuts.
*/
public final class LBrace extends ALink {
AExpression index;
public LBrace(Location location, AExpression index) {
super(location, 2);
this.index = Objects.requireNonNull(index);
}
@Override
void extractVariables(Set<String> variables) {
index.extractVariables(variables);
}
@Override
ALink analyze(Locals locals) {
if (before == null) {
throw createError(new IllegalArgumentException("Illegal array access made without target."));
}
Sort sort = before.sort;
if (sort == Sort.ARRAY) {
index.expected = Definition.INT_TYPE;
index.analyze(locals);
index = index.cast(locals);
after = Definition.getType(before.struct, before.dimensions - 1);
return this;
} else if (sort == Sort.DEF) {
return new LDefArray(location, index).copy(this).analyze(locals);
} else if (Map.class.isAssignableFrom(before.clazz)) {
return new LMapShortcut(location, index).copy(this).analyze(locals);
} else if (List.class.isAssignableFrom(before.clazz)) {
return new LListShortcut(location, index).copy(this).analyze(locals);
}
throw createError(new IllegalArgumentException("Illegal array access on type [" + before.name + "]."));
}
@Override
void write(MethodWriter writer, Globals globals) {
index.write(writer, globals);
}
@Override
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
writer.arrayLoad(after.type);
}
@Override
void store(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
writer.arrayStore(after.type);
}
}

View File

@ -1,133 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition.MethodKey;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Struct;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.MethodWriter;
import java.lang.invoke.MethodType;
import java.util.List;
import java.util.Objects;
import java.util.Set;
/**
* Represents a method call or defers to a def call.
*/
public final class LCallInvoke extends ALink {
final String name;
final List<AExpression> arguments;
Method method = null;
boolean box = false; // true for primitive types
public LCallInvoke(Location location, String name, List<AExpression> arguments) {
super(location, -1);
this.name = Objects.requireNonNull(name);
this.arguments = Objects.requireNonNull(arguments);
}
@Override
void extractVariables(Set<String> variables) {
for (AExpression argument : arguments) {
argument.extractVariables(variables);
}
}
@Override
ALink analyze(Locals locals) {
if (before == null) {
throw createError(new IllegalArgumentException("Illegal call [" + name + "] made without target."));
} else if (before.sort == Sort.ARRAY) {
throw createError(new IllegalArgumentException("Illegal call [" + name + "] on array type."));
} else if (store) {
throw createError(new IllegalArgumentException("Cannot assign a value to a call [" + name + "]."));
}
MethodKey methodKey = new MethodKey(name, arguments.size());
Struct struct = before.struct;
if (before.clazz.isPrimitive()) {
Class<?> wrapper = MethodType.methodType(before.clazz).wrap().returnType();
Type boxed = Definition.getType(wrapper.getSimpleName());
struct = boxed.struct;
box = true;
}
method = statik ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey);
if (method != null) {
for (int argument = 0; argument < arguments.size(); ++argument) {
AExpression expression = arguments.get(argument);
expression.expected = method.arguments.get(argument);
expression.internal = true;
expression.analyze(locals);
arguments.set(argument, expression.cast(locals));
}
statement = true;
after = method.rtn;
return this;
} else if (before.sort == Sort.DEF) {
ALink link = new LDefCall(location, name, arguments);
link.copy(this);
return link.analyze(locals);
}
throw createError(new IllegalArgumentException(
"Unknown call [" + name + "] with [" + arguments.size() + "] arguments on type [" + struct.name + "]."));
}
@Override
void write(MethodWriter writer, Globals globals) {
// Do nothing.
}
@Override
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (box) {
writer.box(before.type);
}
for (AExpression argument : arguments) {
argument.write(writer, globals);
}
method.write(writer);
}
@Override
void store(MethodWriter writer, Globals globals) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}

View File

@ -1,134 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.Definition.Field;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Struct;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.MethodWriter;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* Represents a field load/store or defers to a possible shortcuts.
*/
public final class LField extends ALink {
final String value;
Field field;
public LField(Location location, String value) {
super(location, 1);
this.value = Objects.requireNonNull(value);
}
@Override
void extractVariables(Set<String> variables) {}
@Override
ALink analyze(Locals locals) {
if (before == null) {
throw createError(new IllegalArgumentException("Illegal field [" + value + "] access made without target."));
}
Sort sort = before.sort;
if (sort == Sort.ARRAY) {
return new LArrayLength(location, value).copy(this).analyze(locals);
} else if (sort == Sort.DEF) {
return new LDefField(location, value).copy(this).analyze(locals);
}
Struct struct = before.struct;
field = statik ? struct.staticMembers.get(value) : struct.members.get(value);
if (field != null) {
if (store && java.lang.reflect.Modifier.isFinal(field.modifiers)) {
throw createError(new IllegalArgumentException(
"Cannot write to read-only field [" + value + "] for type [" + struct.name + "]."));
}
after = field.type;
return this;
} else {
boolean shortcut =
struct.methods.containsKey(new Definition.MethodKey("get" +
Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)) ||
struct.methods.containsKey(new Definition.MethodKey("is" +
Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)) ||
struct.methods.containsKey(new Definition.MethodKey("set" +
Character.toUpperCase(value.charAt(0)) + value.substring(1), 1));
if (shortcut) {
return new LShortcut(location, value).copy(this).analyze(locals);
} else {
EConstant index = new EConstant(location, value);
index.analyze(locals);
if (Map.class.isAssignableFrom(before.clazz)) {
return new LMapShortcut(location, index).copy(this).analyze(locals);
}
if (List.class.isAssignableFrom(before.clazz)) {
return new LListShortcut(location, index).copy(this).analyze(locals);
}
}
}
throw createError(new IllegalArgumentException("Unknown field [" + value + "] for type [" + struct.name + "]."));
}
@Override
void write(MethodWriter writer, Globals globals) {
// Do nothing.
}
@Override
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isStatic(field.modifiers)) {
writer.getStatic(field.owner.type, field.javaName, field.type.type);
} else {
writer.getField(field.owner.type, field.javaName, field.type.type);
}
}
@Override
void store(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isStatic(field.modifiers)) {
writer.putStatic(field.owner.type, field.javaName, field.type.type);
} else {
writer.putField(field.owner.type, field.javaName, field.type.type);
}
}
}

Some files were not shown because too many files have changed in this diff Show More