diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 5eea0b8c163..af46c2e567b 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -201,7 +201,7 @@ gradle test -Dtests.timeoutSuite=5000! ... Change the logging level of ES (not gradle) -------------------------------- -gradle test -Des.logger.level=DEBUG +gradle test -Dtests.logger.level=DEBUG -------------------------------- Print all the logging output from the test runs to the commandline diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 8e1afd7d1a2..455f16d123a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -456,7 +456,7 @@ class BuildPlugin implements Plugin { // default test sysprop values systemProperty 'tests.ifNoTests', 'fail' // TODO: remove setting logging level via system property - systemProperty 'es.logger.level', 'WARN' + systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('tests.') || property.getKey().startsWith('es.')) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 2ff5e333139..cd0475d2aea 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -129,7 +129,11 @@ class NodeInfo { } env = [ 'JAVA_HOME' : project.javaHome ] - args.addAll("-E", "es.node.portsfile=true") + args.addAll("-E", "node.portsfile=true") + String loggerLevel = System.getProperty("tests.logger.level") + if (loggerLevel != null) { + args.addAll("-E", "logger.level=${loggerLevel}") + } String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs env.put('ES_JAVA_OPTS', esJavaOpts) @@ -140,7 +144,7 @@ class NodeInfo { } } env.put('ES_JVM_OPTIONS', new File(confDir, 'jvm.options')) - args.addAll("-E", "es.path.conf=${confDir}") + args.addAll("-E", "path.conf=${confDir}") if (Os.isFamily(Os.FAMILY_WINDOWS)) { args.add('"') // end the entire command, quoted } diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 48f07b1a2d5..5c776083279 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -1335,7 +1335,6 @@ - diff --git a/buildSrc/version.properties b/buildSrc/version.properties index fee8404080a..b6e64a3c263 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -13,9 +13,7 @@ jna = 4.1.0 # test dependencies randomizedrunner = 2.3.2 junit = 4.11 -# TODO: Upgrade httpclient to a version > 4.5.1 once released. Then remove o.e.test.rest.client.StrictHostnameVerifier* and use -# DefaultHostnameVerifier instead since we no longer need to workaround https://issues.apache.org/jira/browse/HTTPCLIENT-1698 -httpclient = 4.3.6 -httpcore = 4.3.3 +httpclient = 4.5.2 +httpcore = 4.4.4 commonslogging = 1.1.3 commonscodec = 1.10 diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java index 28b62083d42..f986a5679a9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java @@ -250,7 +250,7 @@ public class TransportClusterAllocationExplainAction final ActionListener listener) { final RoutingNodes routingNodes = state.getRoutingNodes(); final RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, state, - clusterInfoService.getClusterInfo(), System.nanoTime()); + clusterInfoService.getClusterInfo(), System.nanoTime(), false); ShardRouting foundShard = null; if (request.useAnyUnassignedShard()) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java index a241f01ea28..dcc45ab21b9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java @@ -38,9 +38,10 @@ import java.io.IOException; * Request to submit cluster reroute allocation commands */ public class ClusterRerouteRequest extends AcknowledgedRequest { - AllocationCommands commands = new AllocationCommands(); - boolean dryRun; - boolean explain; + private AllocationCommands commands = new AllocationCommands(); + private boolean dryRun; + private boolean explain; + private boolean retryFailed; public ClusterRerouteRequest() { } @@ -81,6 +82,15 @@ public class ClusterRerouteRequest extends AcknowledgedRequestfalse). If true, the + * request will retry allocating shards that can't currently be allocated due to too many allocation failures. + */ + public ClusterRerouteRequest setRetryFailed(boolean retryFailed) { + this.retryFailed = retryFailed; + return this; + } + /** * Returns the current explain flag */ @@ -88,6 +98,14 @@ public class ClusterRerouteRequest extends AcknowledgedRequestfalse). If true, the + * request will retry allocating shards that can't currently be allocated due to too many allocation failures. + */ + public ClusterRerouteRequestBuilder setRetryFailed(boolean retryFailed) { + request.setRetryFailed(retryFailed); + return this; + } + /** * Sets the commands for the request to execute. */ diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java index e6116dbfbc4..b0b676f6e2e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -68,38 +69,55 @@ public class TransportClusterRerouteAction extends TransportMasterNodeAction listener) { - clusterService.submitStateUpdateTask("cluster_reroute (api)", new AckedClusterStateUpdateTask(Priority.IMMEDIATE, request, listener) { - - private volatile ClusterState clusterStateToSend; - private volatile RoutingExplanations explanations; - - @Override - protected ClusterRerouteResponse newResponse(boolean acknowledged) { - return new ClusterRerouteResponse(acknowledged, clusterStateToSend, explanations); - } - - @Override - public void onAckTimeout() { - listener.onResponse(new ClusterRerouteResponse(false, clusterStateToSend, new RoutingExplanations())); - } - - @Override - public void onFailure(String source, Throwable t) { - logger.debug("failed to perform [{}]", t, source); - super.onFailure(source, t); - } - - @Override - public ClusterState execute(ClusterState currentState) { - RoutingAllocation.Result routingResult = allocationService.reroute(currentState, request.commands, request.explain()); - ClusterState newState = ClusterState.builder(currentState).routingResult(routingResult).build(); - clusterStateToSend = newState; - explanations = routingResult.explanations(); - if (request.dryRun) { - return currentState; - } - return newState; - } - }); + clusterService.submitStateUpdateTask("cluster_reroute (api)", new ClusterRerouteResponseAckedClusterStateUpdateTask(logger, + allocationService, request, listener)); } -} \ No newline at end of file + + static class ClusterRerouteResponseAckedClusterStateUpdateTask extends AckedClusterStateUpdateTask { + + private final ClusterRerouteRequest request; + private final ActionListener listener; + private final ESLogger logger; + private final AllocationService allocationService; + private volatile ClusterState clusterStateToSend; + private volatile RoutingExplanations explanations; + + ClusterRerouteResponseAckedClusterStateUpdateTask(ESLogger logger, AllocationService allocationService, ClusterRerouteRequest request, + ActionListener listener) { + super(Priority.IMMEDIATE, request, listener); + this.request = request; + this.listener = listener; + this.logger = logger; + this.allocationService = allocationService; + } + + @Override + protected ClusterRerouteResponse newResponse(boolean acknowledged) { + return new ClusterRerouteResponse(acknowledged, clusterStateToSend, explanations); + } + + @Override + public void onAckTimeout() { + listener.onResponse(new ClusterRerouteResponse(false, clusterStateToSend, new RoutingExplanations())); + } + + @Override + public void onFailure(String source, Throwable t) { + logger.debug("failed to perform [{}]", t, source); + super.onFailure(source, t); + } + + @Override + public ClusterState execute(ClusterState currentState) { + RoutingAllocation.Result routingResult = allocationService.reroute(currentState, request.getCommands(), request.explain(), + request.isRetryFailed()); + ClusterState newState = ClusterState.builder(currentState).routingResult(routingResult).build(); + clusterStateToSend = newState; + explanations = routingResult.explanations(); + if (request.dryRun()) { + return currentState; + } + return newState; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index 0a7a8a9ce80..d49be2d294e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -81,18 +81,13 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent return snapshotInfo.status(); } - static final class Fields { - static final String SNAPSHOT = "snapshot"; - static final String ACCEPTED = "accepted"; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (snapshotInfo != null) { - builder.field(Fields.SNAPSHOT); + builder.field("snapshot"); snapshotInfo.toExternalXContent(builder, params); } else { - builder.field(Fields.ACCEPTED, true); + builder.field("accepted", true); } return builder; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java index a5db19684b2..7c4b255ff7c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java @@ -74,13 +74,9 @@ public class GetSnapshotsResponse extends ActionResponse implements ToXContent { } } - static final class Fields { - static final String SNAPSHOTS = "snapshots"; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startArray(Fields.SNAPSHOTS); + builder.startArray("snapshots"); for (SnapshotInfo snapshotInfo : snapshots) { snapshotInfo.toExternalXContent(builder, params); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java index a54c01ed15a..70f4f2aa4f2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java @@ -73,18 +73,13 @@ public class RestoreSnapshotResponse extends ActionResponse implements ToXConten return restoreInfo.status(); } - static final class Fields { - static final String SNAPSHOT = "snapshot"; - static final String ACCEPTED = "accepted"; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { if (restoreInfo != null) { - builder.field(Fields.SNAPSHOT); + builder.field("snapshot"); restoreInfo.toXContent(builder, params); } else { - builder.field(Fields.ACCEPTED, true); + builder.field("accepted", true); } return builder; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java index 34e503224ce..b9800a2d9ed 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java @@ -73,13 +73,9 @@ public class SnapshotsStatusResponse extends ActionResponse implements ToXConten } } - static final class Fields { - static final String SNAPSHOTS = "snapshots"; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray(Fields.SNAPSHOTS); + builder.startArray("snapshots"); for (SnapshotStatus snapshot : snapshots) { snapshot.toXContent(builder, params); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java index 8c0c427beea..9a7bb5c8f3d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.fielddata.FieldDataStats; -import org.elasticsearch.index.percolator.PercolatorQueryCacheStats; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.search.suggest.completion.CompletionStats; @@ -45,7 +44,6 @@ public class ClusterStatsIndices implements ToXContent { private QueryCacheStats queryCache; private CompletionStats completion; private SegmentsStats segments; - private PercolatorQueryCacheStats percolatorCache; public ClusterStatsIndices(List nodeResponses) { ObjectObjectHashMap countsPerIndex = new ObjectObjectHashMap<>(); @@ -56,7 +54,6 @@ public class ClusterStatsIndices implements ToXContent { this.queryCache = new QueryCacheStats(); this.completion = new CompletionStats(); this.segments = new SegmentsStats(); - this.percolatorCache = new PercolatorQueryCacheStats(); for (ClusterStatsNodeResponse r : nodeResponses) { for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) { @@ -79,7 +76,6 @@ public class ClusterStatsIndices implements ToXContent { queryCache.add(shardCommonStats.queryCache); completion.add(shardCommonStats.completion); segments.add(shardCommonStats.segments); - percolatorCache.add(shardCommonStats.percolatorCache); } } @@ -122,10 +118,6 @@ public class ClusterStatsIndices implements ToXContent { return segments; } - public PercolatorQueryCacheStats getPercolatorCache() { - return percolatorCache; - } - static final class Fields { static final String COUNT = "count"; } @@ -140,7 +132,6 @@ public class ClusterStatsIndices implements ToXContent { queryCache.toXContent(builder, params); completion.toXContent(builder, params); segments.toXContent(builder, params); - percolatorCache.toXContent(builder, params); return builder; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index bae7b20694d..3a0b1455209 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -55,8 +55,7 @@ public class TransportClusterStatsAction extends TransportNodesAction { private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, - CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.QueryCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments, - CommonStatsFlags.Flag.PercolatorCache); + CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.QueryCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments); private final NodeService nodeService; private final IndicesService indicesService; @@ -100,7 +99,7 @@ public class TransportClusterStatsAction extends TransportNodesAction { return flags.isSet(Flag.FieldData); } - public IndicesStatsRequest percolate(boolean percolate) { - flags.set(Flag.PercolatorCache, percolate); - return this; - } - - public boolean percolate() { - return flags.isSet(Flag.PercolatorCache); - } - public IndicesStatsRequest segments(boolean segments) { flags.set(Flag.Segments, segments); return this; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java index cad919cbd18..8e7afe3e7e3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java @@ -127,11 +127,6 @@ public class IndicesStatsRequestBuilder extends BroadcastOperationRequestBuilder return this; } - public IndicesStatsRequestBuilder setPercolate(boolean percolate) { - request.percolate(percolate); - return this; - } - public IndicesStatsRequestBuilder setSegments(boolean segments) { request.segments(segments); return this; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 8c12dfa9fda..7e8ccd30a8a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -139,9 +139,6 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< flags.set(CommonStatsFlags.Flag.FieldData); flags.fieldDataFields(request.fieldDataFields()); } - if (request.percolate()) { - flags.set(CommonStatsFlags.Flag.PercolatorCache); - } if (request.segments()) { flags.set(CommonStatsFlags.Flag.Segments); flags.includeSegmentFileSizes(request.includeSegmentFileSizes()); @@ -163,6 +160,6 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< flags.set(CommonStatsFlags.Flag.Recovery); } - return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats()); + return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), indexShard.commitStats()); } } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java index 9286601da69..9490abd0b68 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; @@ -165,9 +165,9 @@ public class PercolateRequestBuilder extends ActionRequestBuilder aggregationBuilder) { + public PercolateRequestBuilder addAggregation(AggregationBuilder aggregationBuilder) { sourceBuilder().addAggregation(aggregationBuilder); return this; } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java index a6ee99a476c..5c69d3be50b 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -53,7 +53,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes { private List> sorts; private Boolean trackScores; private HighlightBuilder highlightBuilder; - private List> aggregationBuilders; + private List> aggregationBuilders; private List> pipelineAggregationBuilders; /** @@ -126,7 +126,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes { /** * Add an aggregation definition. */ - public PercolateSourceBuilder addAggregation(AggregatorBuilder aggregationBuilder) { + public PercolateSourceBuilder addAggregation(AggregationBuilder aggregationBuilder) { if (aggregationBuilders == null) { aggregationBuilders = new ArrayList<>(); } @@ -175,7 +175,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes { builder.field("aggregations"); builder.startObject(); if (aggregationBuilders != null) { - for (AggregatorBuilder aggregation : aggregationBuilders) { + for (AggregationBuilder aggregation : aggregationBuilders) { aggregation.toXContent(builder, params); } } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 9830f7be203..5732d43b4c7 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -28,7 +28,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -373,7 +373,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder aggregation) { + public SearchRequestBuilder addAggregation(AggregationBuilder aggregation) { sourceBuilder().aggregation(aggregation); return this; } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 6d35cafd088..305a4fd30ae 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -177,15 +177,7 @@ final class Bootstrap { // install SM after natives, shutdown hooks, etc. Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings)); - // We do not need to reload system properties here as we have already applied them in building the settings and - // reloading could cause multiple prompts to the user for values if a system property was specified with a prompt - // placeholder - Settings nodeSettings = Settings.builder() - .put(settings) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) - .build(); - - node = new Node(nodeSettings) { + node = new Node(settings) { @Override protected void validateNodeBeforeAcceptingRequests(Settings settings, BoundTransportAddress boundTransportAddress) { BootstrapCheck.check(settings, boundTransportAddress); @@ -193,13 +185,13 @@ final class Bootstrap { }; } - private static Environment initialSettings(boolean foreground, String pidFile) { + private static Environment initialSettings(boolean foreground, String pidFile, Map esSettings) { Terminal terminal = foreground ? Terminal.DEFAULT : null; Settings.Builder builder = Settings.builder(); if (Strings.hasLength(pidFile)) { builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile); } - return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal); + return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal, esSettings); } private void start() { @@ -233,11 +225,13 @@ final class Bootstrap { // Set the system property before anything has a chance to trigger its use initLoggerPrefix(); - elasticsearchSettings(esSettings); + // force the class initializer for BootstrapInfo to run before + // the security manager is installed + BootstrapInfo.init(); INSTANCE = new Bootstrap(); - Environment environment = initialSettings(foreground, pidFile); + Environment environment = initialSettings(foreground, pidFile, esSettings); Settings settings = environment.settings(); LogConfigurator.configure(settings, true); checkForCustomConfFile(); @@ -295,13 +289,6 @@ final class Bootstrap { } } - @SuppressForbidden(reason = "Sets system properties passed as CLI parameters") - private static void elasticsearchSettings(Map esSettings) { - for (Map.Entry esSetting : esSettings.entrySet()) { - System.setProperty(esSetting.getKey(), esSetting.getValue()); - } - } - @SuppressForbidden(reason = "System#out") private static void closeSystOut() { System.out.close(); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java index bd693951eb2..791836bf8a4 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java @@ -120,4 +120,8 @@ public final class BootstrapInfo { } return SYSTEM_PROPERTIES; } + + public static void init() { + } + } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index bb1f6cc87d5..b3259129473 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -21,28 +21,25 @@ package org.elasticsearch.bootstrap; import joptsimple.OptionSet; import joptsimple.OptionSpec; -import joptsimple.util.KeyValuePair; import org.elasticsearch.Build; -import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.SettingCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserError; import org.elasticsearch.monitor.jvm.JvmInfo; import java.io.IOException; import java.util.Arrays; -import java.util.HashMap; import java.util.Map; /** * This class starts elasticsearch. */ -class Elasticsearch extends Command { +class Elasticsearch extends SettingCommand { private final OptionSpec versionOption; private final OptionSpec daemonizeOption; private final OptionSpec pidfileOption; - private final OptionSpec propertyOption; // visible for testing Elasticsearch() { @@ -56,7 +53,6 @@ class Elasticsearch extends Command { pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"), "Creates a pid file in the specified path on start") .withRequiredArg(); - propertyOption = parser.accepts("E", "Configure an Elasticsearch setting").withRequiredArg().ofType(KeyValuePair.class); } /** @@ -75,7 +71,7 @@ class Elasticsearch extends Command { } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { if (options.nonOptionArguments().isEmpty() == false) { throw new UserError(ExitCodes.USAGE, "Positional arguments not allowed, found " + options.nonOptionArguments()); } @@ -84,26 +80,15 @@ class Elasticsearch extends Command { throw new UserError(ExitCodes.USAGE, "Elasticsearch version option is mutually exclusive with any other option"); } terminal.println("Version: " + org.elasticsearch.Version.CURRENT - + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() - + ", JVM: " + JvmInfo.jvmInfo().version()); + + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + + ", JVM: " + JvmInfo.jvmInfo().version()); return; } final boolean daemonize = options.has(daemonizeOption); final String pidFile = pidfileOption.value(options); - final Map esSettings = new HashMap<>(); - for (final KeyValuePair kvp : propertyOption.values(options)) { - if (!kvp.key.startsWith("es.")) { - throw new UserError(ExitCodes.USAGE, "Elasticsearch settings must be prefixed with [es.] but was [" + kvp.key + "]"); - } - if (kvp.value.isEmpty()) { - throw new UserError(ExitCodes.USAGE, "Elasticsearch setting [" + kvp.key + "] must not be empty"); - } - esSettings.put(kvp.key, kvp.value); - } - - init(daemonize, pidFile, esSettings); + init(daemonize, pidFile, settings); } void init(final boolean daemonize, final String pidFile, final Map esSettings) { diff --git a/core/src/main/java/org/elasticsearch/cli/Command.java b/core/src/main/java/org/elasticsearch/cli/Command.java index 1fc7c9fe74f..3e2faf13657 100644 --- a/core/src/main/java/org/elasticsearch/cli/Command.java +++ b/core/src/main/java/org/elasticsearch/cli/Command.java @@ -19,15 +19,15 @@ package org.elasticsearch.cli; -import java.io.IOException; -import java.util.Arrays; - import joptsimple.OptionException; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.elasticsearch.common.SuppressForbidden; +import java.io.IOException; +import java.util.Arrays; + /** * An action to execute within a cli. */ @@ -112,4 +112,5 @@ public abstract class Command { * * Any runtime user errors (like an input file that does not exist), should throw a {@link UserError}. */ protected abstract void execute(Terminal terminal, OptionSet options) throws Exception; + } diff --git a/core/src/main/java/org/elasticsearch/cli/SettingCommand.java b/core/src/main/java/org/elasticsearch/cli/SettingCommand.java new file mode 100644 index 00000000000..868975ac6ff --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cli/SettingCommand.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +import joptsimple.OptionSet; +import joptsimple.OptionSpec; +import joptsimple.util.KeyValuePair; + +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +public abstract class SettingCommand extends Command { + + private final OptionSpec settingOption; + + public SettingCommand(String description) { + super(description); + this.settingOption = parser.accepts("E", "Configure a setting").withRequiredArg().ofType(KeyValuePair.class); + } + + @Override + protected void execute(Terminal terminal, OptionSet options) throws Exception { + final Map settings = new HashMap<>(); + for (final KeyValuePair kvp : settingOption.values(options)) { + if (kvp.value.isEmpty()) { + throw new UserError(ExitCodes.USAGE, "Setting [" + kvp.key + "] must not be empty"); + } + settings.put(kvp.key, kvp.value); + } + + putSystemPropertyIfSettingIsMissing(settings, "path.conf", "es.path.conf"); + putSystemPropertyIfSettingIsMissing(settings, "path.data", "es.path.data"); + putSystemPropertyIfSettingIsMissing(settings, "path.home", "es.path.home"); + putSystemPropertyIfSettingIsMissing(settings, "path.logs", "es.path.logs"); + + execute(terminal, options, settings); + } + + protected static void putSystemPropertyIfSettingIsMissing(final Map settings, final String setting, final String key) { + final String value = System.getProperty(key); + if (value != null) { + if (settings.containsKey(setting)) { + final String message = + String.format( + Locale.ROOT, + "duplicate setting [%s] found via command-line [%s] and system property [%s]", + setting, + settings.get(setting), + value); + throw new IllegalArgumentException(message); + } else { + settings.put(setting, value); + } + } + } + + protected abstract void execute(Terminal terminal, OptionSet options, Map settings) throws Exception; + +} diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 47dd2ce9ae6..a02e399ac0c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -49,6 +49,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDeci import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.RebalanceOnlyWhenActiveAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; @@ -79,6 +80,7 @@ public class ClusterModule extends AbstractModule { new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), Property.NodeScope); public static final List> DEFAULT_ALLOCATION_DECIDERS = Collections.unmodifiableList(Arrays.asList( + MaxRetryAllocationDecider.class, SameShardAllocationDecider.class, FilterAllocationDecider.class, ReplicaAfterPrimaryActiveAllocationDecider.class, diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 311ffae71fd..d3b5e7ecbad 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -281,8 +281,11 @@ public class MetaDataMappingService extends AbstractComponent { // Also the order of the mappings may be backwards. if (newMapper.parentFieldMapper().active()) { for (ObjectCursor mapping : indexMetaData.getMappings().values()) { - if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) { - throw new IllegalArgumentException("can't add a _parent field that points to an already existing type"); + String parentType = newMapper.parentFieldMapper().type(); + if (parentType.equals(mapping.value.type()) && + indexService.mapperService().getParentTypes().contains(parentType) == false) { + throw new IllegalArgumentException("can't add a _parent field that points to an " + + "already existing type, that isn't already a parent"); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index 2670363364d..bc44cd1701c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -48,7 +48,6 @@ public final class UnassignedInfo implements ToXContent, Writeable { public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, Property.Dynamic, Property.IndexScope); - /** * Reason why the shard is in unassigned state. *

@@ -103,7 +102,11 @@ public final class UnassignedInfo implements ToXContent, Writeable { /** * A better replica location is identified and causes the existing replica allocation to be cancelled. */ - REALLOCATED_REPLICA; + REALLOCATED_REPLICA, + /** + * Unassigned as a result of a failed primary while the replica was initializing. + */ + PRIMARY_FAILED; } private final Reason reason; @@ -112,6 +115,7 @@ public final class UnassignedInfo implements ToXContent, Writeable { private final long lastComputedLeftDelayNanos; // how long to delay shard allocation, not serialized (always positive, 0 means no delay) private final String message; private final Throwable failure; + private final int failedAllocations; /** * creates an UnassingedInfo object based **current** time @@ -120,7 +124,7 @@ public final class UnassignedInfo implements ToXContent, Writeable { * @param message more information about cause. **/ public UnassignedInfo(Reason reason, String message) { - this(reason, message, null, System.nanoTime(), System.currentTimeMillis()); + this(reason, message, null, reason == Reason.ALLOCATION_FAILED ? 1 : 0, System.nanoTime(), System.currentTimeMillis()); } /** @@ -130,13 +134,16 @@ public final class UnassignedInfo implements ToXContent, Writeable { * @param unassignedTimeNanos the time to use as the base for any delayed re-assignment calculation * @param unassignedTimeMillis the time of unassignment used to display to in our reporting. */ - public UnassignedInfo(Reason reason, @Nullable String message, @Nullable Throwable failure, long unassignedTimeNanos, long unassignedTimeMillis) { + public UnassignedInfo(Reason reason, @Nullable String message, @Nullable Throwable failure, int failedAllocations, long unassignedTimeNanos, long unassignedTimeMillis) { this.reason = reason; this.unassignedTimeMillis = unassignedTimeMillis; this.unassignedTimeNanos = unassignedTimeNanos; this.lastComputedLeftDelayNanos = 0L; this.message = message; this.failure = failure; + this.failedAllocations = failedAllocations; + assert (failedAllocations > 0) == (reason == Reason.ALLOCATION_FAILED): + "failedAllocations: " + failedAllocations + " for reason " + reason; assert !(message == null && failure != null) : "provide a message if a failure exception is provided"; } @@ -147,17 +154,19 @@ public final class UnassignedInfo implements ToXContent, Writeable { this.lastComputedLeftDelayNanos = newComputedLeftDelayNanos; this.message = unassignedInfo.message; this.failure = unassignedInfo.failure; + this.failedAllocations = unassignedInfo.failedAllocations; } public UnassignedInfo(StreamInput in) throws IOException { this.reason = Reason.values()[(int) in.readByte()]; this.unassignedTimeMillis = in.readLong(); // As System.nanoTime() cannot be compared across different JVMs, reset it to now. - // This means that in master failover situations, elapsed delay time is forgotten. + // This means that in master fail-over situations, elapsed delay time is forgotten. this.unassignedTimeNanos = System.nanoTime(); this.lastComputedLeftDelayNanos = 0L; this.message = in.readOptionalString(); this.failure = in.readThrowable(); + this.failedAllocations = in.readVInt(); } public void writeTo(StreamOutput out) throws IOException { @@ -166,12 +175,18 @@ public final class UnassignedInfo implements ToXContent, Writeable { // Do not serialize unassignedTimeNanos as System.nanoTime() cannot be compared across different JVMs out.writeOptionalString(message); out.writeThrowable(failure); + out.writeVInt(failedAllocations); } public UnassignedInfo readFrom(StreamInput in) throws IOException { return new UnassignedInfo(in); } + /** + * Returns the number of previously failed allocations of this shard. + */ + public int getNumFailedAllocations() { return failedAllocations; } + /** * The reason why the shard is unassigned. */ @@ -325,7 +340,11 @@ public final class UnassignedInfo implements ToXContent, Writeable { StringBuilder sb = new StringBuilder(); sb.append("[reason=").append(reason).append("]"); sb.append(", at[").append(DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)).append("]"); + if (failedAllocations > 0) { + sb.append(", failed_attempts[").append(failedAllocations).append("]"); + } String details = getDetails(); + if (details != null) { sb.append(", details[").append(details).append("]"); } @@ -342,6 +361,9 @@ public final class UnassignedInfo implements ToXContent, Writeable { builder.startObject("unassigned_info"); builder.field("reason", reason); builder.field("at", DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)); + if (failedAllocations > 0) { + builder.field("failed_attempts", failedAllocations); + } String details = getDetails(); if (details != null) { builder.field("details", details); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index e1bbbb7f4ab..d59113675d8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -222,8 +222,10 @@ public class AllocationService extends AbstractComponent { List orderedFailedShards = new ArrayList<>(failedShards); orderedFailedShards.sort(Comparator.comparing(failedShard -> failedShard.shard.primary())); for (FailedRerouteAllocation.FailedShard failedShard : orderedFailedShards) { + UnassignedInfo unassignedInfo = failedShard.shard.unassignedInfo(); + final int failedAllocations = unassignedInfo != null ? unassignedInfo.getNumFailedAllocations() : 0; changed |= applyFailedShard(allocation, failedShard.shard, true, new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, failedShard.message, failedShard.failure, - System.nanoTime(), System.currentTimeMillis())); + failedAllocations + 1, System.nanoTime(), System.currentTimeMillis())); } if (!changed) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); @@ -257,16 +259,13 @@ public class AllocationService extends AbstractComponent { .collect(Collectors.joining(", ")); } - public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands) { - return reroute(clusterState, commands, false); - } - - public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands, boolean explain) { + public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands, boolean explain, boolean retryFailed) { RoutingNodes routingNodes = getMutableRoutingNodes(clusterState); // we don't shuffle the unassigned shards here, to try and get as close as possible to // a consistent result of the effect the commands have on the routing // this allows systems to dry run the commands, see the resulting cluster state, and act on it - RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, clusterInfoService.getClusterInfo(), currentNanoTime()); + RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, + clusterInfoService.getClusterInfo(), currentNanoTime(), retryFailed); // don't short circuit deciders, we want a full explanation allocation.debugDecision(true); // we ignore disable allocation, because commands are explicit @@ -305,7 +304,8 @@ public class AllocationService extends AbstractComponent { RoutingNodes routingNodes = getMutableRoutingNodes(clusterState); // shuffle the unassigned nodes, just so we won't have things like poison failed shards routingNodes.unassigned().shuffle(); - RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, clusterInfoService.getClusterInfo(), currentNanoTime()); + RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, + clusterInfoService.getClusterInfo(), currentNanoTime(), false); allocation.debugDecision(debug); if (!reroute(allocation)) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); @@ -437,7 +437,7 @@ public class AllocationService extends AbstractComponent { // now, go over all the shards routing on the node, and fail them for (ShardRouting shardRouting : node.copyShards()) { UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "node_left[" + node.nodeId() + "]", null, - allocation.getCurrentNanoTime(), System.currentTimeMillis()); + 0, allocation.getCurrentNanoTime(), System.currentTimeMillis()); applyFailedShard(allocation, shardRouting, false, unassignedInfo); } // its a dead node, remove it, note, its important to remove it *after* we apply failed shard @@ -457,8 +457,8 @@ public class AllocationService extends AbstractComponent { boolean changed = false; for (ShardRouting routing : replicas) { changed |= applyFailedShard(allocation, routing, false, - new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "primary failed while replica initializing", - null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); + new UnassignedInfo(UnassignedInfo.Reason.PRIMARY_FAILED, "primary failed while replica initializing", + null, 0, allocation.getCurrentNanoTime(), System.currentTimeMillis())); } return changed; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/FailedRerouteAllocation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/FailedRerouteAllocation.java index a13862fed26..ef2e42eed76 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/FailedRerouteAllocation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/FailedRerouteAllocation.java @@ -58,7 +58,7 @@ public class FailedRerouteAllocation extends RoutingAllocation { private final List failedShards; public FailedRerouteAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, List failedShards, ClusterInfo clusterInfo) { - super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime()); + super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime(), false); this.failedShards = failedShards; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java index 60ca3a8d5fd..0df8074e14c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java @@ -134,6 +134,8 @@ public class RoutingAllocation { private boolean ignoreDisable = false; + private final boolean retryFailed; + private boolean debugDecision = false; private boolean hasPendingAsyncFetch = false; @@ -148,7 +150,7 @@ public class RoutingAllocation { * @param clusterState cluster state before rerouting * @param currentNanoTime the nano time to use for all delay allocation calculation (typically {@link System#nanoTime()}) */ - public RoutingAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, ClusterInfo clusterInfo, long currentNanoTime) { + public RoutingAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, ClusterInfo clusterInfo, long currentNanoTime, boolean retryFailed) { this.deciders = deciders; this.routingNodes = routingNodes; this.metaData = clusterState.metaData(); @@ -156,6 +158,7 @@ public class RoutingAllocation { this.customs = clusterState.customs(); this.clusterInfo = clusterInfo; this.currentNanoTime = currentNanoTime; + this.retryFailed = retryFailed; } /** returns the nano time captured at the beginning of the allocation. used to make sure all time based decisions are aligned */ @@ -297,4 +300,8 @@ public class RoutingAllocation { public void setHasPendingAsyncFetch() { this.hasPendingAsyncFetch = true; } + + public boolean isRetryFailed() { + return retryFailed; + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/StartedRerouteAllocation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/StartedRerouteAllocation.java index e9570edd9c3..0f55ab4fda1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/StartedRerouteAllocation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/StartedRerouteAllocation.java @@ -36,7 +36,7 @@ public class StartedRerouteAllocation extends RoutingAllocation { private final List startedShards; public StartedRerouteAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, List startedShards, ClusterInfo clusterInfo) { - super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime()); + super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime(), false); this.startedShards = startedShards; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java index d4191292cfc..c80afde3086 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java @@ -125,7 +125,7 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation // we need to move the unassigned info back to treat it as if it was index creation unassignedInfoToUpdate = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "force empty allocation from previous reason " + shardRouting.unassignedInfo().getReason() + ", " + shardRouting.unassignedInfo().getMessage(), - shardRouting.unassignedInfo().getFailure(), System.nanoTime(), System.currentTimeMillis()); + shardRouting.unassignedInfo().getFailure(), 0, System.nanoTime(), System.currentTimeMillis()); } initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, unassignedInfoToUpdate); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java new file mode 100644 index 00000000000..6a8a0ccc5fa --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.decider; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; + +/** + * An allocation decider that prevents shards from being allocated on any node if the shards allocation has been retried N times without + * success. This means if a shard has been INITIALIZING N times in a row without being moved to STARTED the shard will be ignored until + * the setting for index.allocation.max_retry is raised. The default value is 5. + * Note: This allocation decider also allows allocation of repeatedly failing shards when the /_cluster/reroute?retry_failed=true + * API is manually invoked. This allows single retries without raising the limits. + * + * @see RoutingAllocation#isRetryFailed() + */ +public class MaxRetryAllocationDecider extends AllocationDecider { + + public static final Setting SETTING_ALLOCATION_MAX_RETRY = Setting.intSetting("index.allocation.max_retries", 5, 0, + Setting.Property.Dynamic, Setting.Property.IndexScope); + + public static final String NAME = "max_retry"; + + /** + * Initializes a new {@link MaxRetryAllocationDecider} + * + * @param settings {@link Settings} used by this {@link AllocationDecider} + */ + @Inject + public MaxRetryAllocationDecider(Settings settings) { + super(settings); + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { + UnassignedInfo unassignedInfo = shardRouting.unassignedInfo(); + if (unassignedInfo != null && unassignedInfo.getNumFailedAllocations() > 0) { + final IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(shardRouting.index()); + final int maxRetry = SETTING_ALLOCATION_MAX_RETRY.get(indexMetaData.getSettings()); + if (allocation.isRetryFailed()) { // manual allocation - retry + // if we are called via the _reroute API we ignore the failure counter and try to allocate + // this improves the usability since people don't need to raise the limits to issue retries since a simple _reroute call is + // enough to manually retry. + return allocation.decision(Decision.YES, NAME, "shard has already failed allocating [" + + unassignedInfo.getNumFailedAllocations() + "] times vs. [" + maxRetry + "] retries allowed " + + unassignedInfo.toString() + " - retrying once on manual allocation"); + } else if (unassignedInfo.getNumFailedAllocations() >= maxRetry) { + return allocation.decision(Decision.NO, NAME, "shard has already failed allocating [" + + unassignedInfo.getNumFailedAllocations() + "] times vs. [" + maxRetry + "] retries allowed " + + unassignedInfo.toString() + " - manually call [/_cluster/reroute?retry_failed=true] to retry"); + } + } + return allocation.decision(Decision.YES, NAME, "shard has no previous failures"); + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return canAllocate(shardRouting, allocation); + } +} diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index e903c963023..c49b0364e28 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -23,7 +23,6 @@ import org.apache.log4j.Java9Hack; import org.apache.log4j.PropertyConfigurator; import org.apache.lucene.util.Constants; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; @@ -99,8 +98,7 @@ public class LogConfigurator { /** * Consolidates settings and converts them into actual log4j settings, then initializes loggers and appenders. - * - * @param settings custom settings that should be applied + * @param settings custom settings that should be applied * @param resolveConfig controls whether the logging conf file should be read too or not. */ public static void configure(Settings settings, boolean resolveConfig) { @@ -115,7 +113,7 @@ public class LogConfigurator { if (resolveConfig) { resolveConfig(environment, settingsBuilder); } - settingsBuilder.putProperties("es.", BootstrapInfo.getSystemProperties()); + // add custom settings after config was added so that they are not overwritten by config settingsBuilder.put(settings); settingsBuilder.replacePropertyPlaceholders(); diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index e66534a4feb..36ee01484e6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -87,6 +87,7 @@ import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.repositories.uri.URLRepository; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; @@ -374,7 +375,6 @@ public final class ClusterSettings extends AbstractScopedSettings { BaseRestHandler.MULTI_ALLOW_EXPLICIT_INDEX, ClusterName.CLUSTER_NAME_SETTING, Client.CLIENT_TYPE_SETTING_S, - InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING, EsExecutors.PROCESSORS_SETTING, ThreadContext.DEFAULT_HEADERS_SETTING, @@ -420,6 +420,7 @@ public final class ClusterSettings extends AbstractScopedSettings { ResourceWatcherService.ENABLED, ResourceWatcherService.RELOAD_INTERVAL_HIGH, ResourceWatcherService.RELOAD_INTERVAL_MEDIUM, - ResourceWatcherService.RELOAD_INTERVAL_LOW + ResourceWatcherService.RELOAD_INTERVAL_LOW, + SearchModule.INDICES_MAX_CLAUSE_COUNT_SETTING ))); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 1b795239457..203d1db76b3 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.gateway.PrimaryShardAllocator; @@ -35,12 +36,11 @@ import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.percolator.PercolatorQueryCache; +import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; -import org.elasticsearch.index.IndexWarmer; import org.elasticsearch.indices.IndicesRequestCache; import java.util.Arrays; @@ -59,6 +59,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { public static final Predicate INDEX_SETTINGS_KEY_PREDICATE = (s) -> s.startsWith(IndexMetaData.INDEX_SETTING_PREFIX); public static final Set> BUILT_IN_INDEX_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY, IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING, IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, @@ -126,7 +127,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FieldMapper.IGNORE_MALFORMED_SETTING, FieldMapper.COERCE_SETTING, Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING, - PercolatorQueryCache.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING, + PercolatorFieldMapper.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING, MapperService.INDEX_MAPPER_DYNAMIC_SETTING, MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING, MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 1efb65c18b1..1be1fa8f3f4 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -537,6 +537,10 @@ public class Setting extends ToXContentToBytes { return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, properties); } + public static Setting boolSetting(String key, Function defaultValueFn, Property... properties) { + return new Setting<>(key, defaultValueFn, Booleans::parseBooleanExact, properties); + } + public static Setting byteSizeSetting(String key, String percentage, Property... properties) { return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), properties); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 8488ca75c73..15554e5ccaa 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -58,9 +58,11 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; import static org.elasticsearch.common.unit.SizeValue.parseSizeValue; @@ -942,89 +944,54 @@ public final class Settings implements ToXContent { return this; } - /** - * Puts all the properties with keys starting with the provided prefix. - * - * @param prefix The prefix to filter property key by - * @param properties The properties to put - * @return The builder - */ - public Builder putProperties(String prefix, Dictionary properties) { - for (Object property : Collections.list(properties.keys())) { - String key = Objects.toString(property); - String value = Objects.toString(properties.get(property)); - if (key.startsWith(prefix)) { - map.put(key.substring(prefix.length()), value); + public Builder putProperties(Map esSettings, Predicate keyPredicate, Function keyFunction) { + for (final Map.Entry esSetting : esSettings.entrySet()) { + final String key = esSetting.getKey(); + if (keyPredicate.test(key)) { + map.put(keyFunction.apply(key), esSetting.getValue()); } } return this; } /** - * Puts all the properties with keys starting with the provided prefix. - * - * @param prefix The prefix to filter property key by - * @param properties The properties to put - * @return The builder - */ - public Builder putProperties(String prefix, Dictionary properties, String ignorePrefix) { - for (Object property : Collections.list(properties.keys())) { - String key = Objects.toString(property); - String value = Objects.toString(properties.get(property)); - if (key.startsWith(prefix)) { - if (!key.startsWith(ignorePrefix)) { - map.put(key.substring(prefix.length()), value); - } - } - } - return this; - } - - /** - * Runs across all the settings set on this builder and replaces ${...} elements in the - * each setting value according to the following logic: - *

- * First, tries to resolve it against a System property ({@link System#getProperty(String)}), next, - * tries and resolve it against an environment variable ({@link System#getenv(String)}), and last, tries - * and replace it with another setting already set on this builder. + * Runs across all the settings set on this builder and + * replaces ${...} elements in each setting with + * another setting already set on this builder. */ public Builder replacePropertyPlaceholders() { + return replacePropertyPlaceholders(System::getenv); + } + + // visible for testing + Builder replacePropertyPlaceholders(Function getenv) { PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new PropertyPlaceholder.PlaceholderResolver() { - @Override - public String resolvePlaceholder(String placeholderName) { - if (placeholderName.startsWith("env.")) { - // explicit env var prefix - return System.getenv(placeholderName.substring("env.".length())); - } - String value = System.getProperty(placeholderName); - if (value != null) { - return value; - } - value = System.getenv(placeholderName); - if (value != null) { - return value; - } - return map.get(placeholderName); + @Override + public String resolvePlaceholder(String placeholderName) { + final String value = getenv.apply(placeholderName); + if (value != null) { + return value; } + return map.get(placeholderName); + } - @Override - public boolean shouldIgnoreMissing(String placeholderName) { - // if its an explicit env var, we are ok with not having a value for it and treat it as optional - if (placeholderName.startsWith("env.") || placeholderName.startsWith("prompt.")) { - return true; - } - return false; - } - - @Override - public boolean shouldRemoveMissingPlaceholder(String placeholderName) { - if (placeholderName.startsWith("prompt.")) { - return false; - } + @Override + public boolean shouldIgnoreMissing(String placeholderName) { + if (placeholderName.startsWith("prompt.")) { return true; } - }; + return false; + } + + @Override + public boolean shouldRemoveMissingPlaceholder(String placeholderName) { + if (placeholderName.startsWith("prompt.")) { + return false; + } + return true; + } + }; for (Map.Entry entry : new HashMap<>(map).entrySet()) { String value = propertyPlaceholder.replacePlaceholders(entry.getKey(), entry.getValue(), placeholderResolver); // if the values exists and has length, we should maintain it in the map diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 2e7acd6ae8c..2ed5ffd86cd 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -65,7 +65,12 @@ public class SettingsModule extends AbstractModule { protected void configure() { final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values())); final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values())); - Settings indexSettings = settings.filter((s) -> s.startsWith("index.") && clusterSettings.get(s) == null); + Settings indexSettings = settings.filter((s) -> (s.startsWith("index.") && + // special case - we want to get Did you mean indices.query.bool.max_clause_count + // which means we need to by-pass this check for this setting + // TODO remove in 6.0!! + "index.query.bool.max_clause_count".equals(s) == false) + && clusterSettings.get(s) == null); if (indexSettings.isEmpty() == false) { try { String separator = IntStream.range(0, 85).mapToObj(s -> "*").collect(Collectors.joining("")).trim(); diff --git a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java b/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java deleted file mode 100644 index 6c471cddb55..00000000000 --- a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java +++ /dev/null @@ -1,629 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.util; - -import org.apache.lucene.store.DataInput; -import org.apache.lucene.store.DataOutput; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.hash.MurmurHash3; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.unit.SizeValue; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; - -/** - * A bloom filter. Inspired by Guava bloom filter implementation though with some optimizations. - */ -public class BloomFilter { - - /** - * A factory that can use different fpp based on size. - */ - public static class Factory { - - public static final Factory DEFAULT = buildDefault(); - - private static Factory buildDefault() { - // Some numbers: - // 10k =0.001: 140.4kb , 10 Hashes - // 10k =0.01 : 93.6kb , 6 Hashes - // 100k=0.01 : 936.0kb , 6 Hashes - // 100k=0.03 : 712.7kb , 5 Hashes - // 500k=0.01 : 4.5mb , 6 Hashes - // 500k=0.03 : 3.4mb , 5 Hashes - // 500k=0.05 : 2.9mb , 4 Hashes - // 1m=0.01 : 9.1mb , 6 Hashes - // 1m=0.03 : 6.9mb , 5 Hashes - // 1m=0.05 : 5.9mb , 4 Hashes - // 5m=0.01 : 45.7mb , 6 Hashes - // 5m=0.03 : 34.8mb , 5 Hashes - // 5m=0.05 : 29.7mb , 4 Hashes - // 50m=0.01 : 457.0mb , 6 Hashes - // 50m=0.03 : 297.3mb , 4 Hashes - // 50m=0.10 : 228.5mb , 3 Hashes - return buildFromString("10k=0.01,1m=0.03"); - } - - /** - * Supports just passing fpp, as in "0.01", and also ranges, like "50k=0.01,1m=0.05". If - * its null, returns {@link #buildDefault()}. - */ - public static Factory buildFromString(@Nullable String config) { - if (config == null) { - return buildDefault(); - } - String[] sEntries = config.split(","); - if (sEntries.length == 0) { - if (config.length() > 0) { - return new Factory(new Entry[]{new Entry(0, Double.parseDouble(config))}); - } - return buildDefault(); - } - Entry[] entries = new Entry[sEntries.length]; - for (int i = 0; i < sEntries.length; i++) { - int index = sEntries[i].indexOf('='); - entries[i] = new Entry( - (int) SizeValue.parseSizeValue(sEntries[i].substring(0, index).trim()).singles(), - Double.parseDouble(sEntries[i].substring(index + 1).trim()) - ); - } - return new Factory(entries); - } - - private final Entry[] entries; - - public Factory(Entry[] entries) { - this.entries = entries; - // the order is from the upper most expected insertions to the lowest - Arrays.sort(this.entries, new Comparator() { - @Override - public int compare(Entry o1, Entry o2) { - return o2.expectedInsertions - o1.expectedInsertions; - } - }); - } - - public BloomFilter createFilter(int expectedInsertions) { - for (Entry entry : entries) { - if (expectedInsertions > entry.expectedInsertions) { - return BloomFilter.create(expectedInsertions, entry.fpp); - } - } - return BloomFilter.create(expectedInsertions, 0.03); - } - - public static class Entry { - public final int expectedInsertions; - public final double fpp; - - Entry(int expectedInsertions, double fpp) { - this.expectedInsertions = expectedInsertions; - this.fpp = fpp; - } - } - } - - /** - * Creates a bloom filter based on the with the expected number - * of insertions and expected false positive probability. - * - * @param expectedInsertions the number of expected insertions to the constructed - * @param fpp the desired false positive probability (must be positive and less than 1.0) - */ - public static BloomFilter create(int expectedInsertions, double fpp) { - return create(expectedInsertions, fpp, -1); - } - - /** - * Creates a bloom filter based on the expected number of insertions, expected false positive probability, - * and number of hash functions. - * - * @param expectedInsertions the number of expected insertions to the constructed - * @param fpp the desired false positive probability (must be positive and less than 1.0) - * @param numHashFunctions the number of hash functions to use (must be less than or equal to 255) - */ - public static BloomFilter create(int expectedInsertions, double fpp, int numHashFunctions) { - if (expectedInsertions == 0) { - expectedInsertions = 1; - } - /* - * TODO(user): Put a warning in the javadoc about tiny fpp values, - * since the resulting size is proportional to -log(p), but there is not - * much of a point after all, e.g. optimalM(1000, 0.0000000000000001) = 76680 - * which is less that 10kb. Who cares! - */ - long numBits = optimalNumOfBits(expectedInsertions, fpp); - - // calculate the optimal number of hash functions - if (numHashFunctions == -1) { - numHashFunctions = optimalNumOfHashFunctions(expectedInsertions, numBits); - } - - try { - return new BloomFilter(new BitArray(numBits), numHashFunctions, Hashing.DEFAULT); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("Could not create BloomFilter of " + numBits + " bits", e); - } - } - - public static void skipBloom(IndexInput in) throws IOException { - int version = in.readInt(); // we do nothing with this now..., defaults to 0 - final int numLongs = in.readInt(); - in.seek(in.getFilePointer() + (numLongs * 8) + 4 + 4); // filter + numberOfHashFunctions + hashType - } - - public static BloomFilter deserialize(DataInput in) throws IOException { - int version = in.readInt(); // we do nothing with this now..., defaults to 0 - int numLongs = in.readInt(); - long[] data = new long[numLongs]; - for (int i = 0; i < numLongs; i++) { - data[i] = in.readLong(); - } - int numberOfHashFunctions = in.readInt(); - int hashType = in.readInt(); - return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType)); - } - - public static void serilaize(BloomFilter filter, DataOutput out) throws IOException { - out.writeInt(0); // version - BitArray bits = filter.bits; - out.writeInt(bits.data.length); - for (long l : bits.data) { - out.writeLong(l); - } - out.writeInt(filter.numHashFunctions); - out.writeInt(filter.hashing.type()); // hashType - } - - public static BloomFilter readFrom(StreamInput in) throws IOException { - int version = in.readVInt(); // we do nothing with this now..., defaults to 0 - int numLongs = in.readVInt(); - long[] data = new long[numLongs]; - for (int i = 0; i < numLongs; i++) { - data[i] = in.readLong(); - } - int numberOfHashFunctions = in.readVInt(); - int hashType = in.readVInt(); // again, nothing to do now... - return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType)); - } - - public static void writeTo(BloomFilter filter, StreamOutput out) throws IOException { - out.writeVInt(0); // version - BitArray bits = filter.bits; - out.writeVInt(bits.data.length); - for (long l : bits.data) { - out.writeLong(l); - } - out.writeVInt(filter.numHashFunctions); - out.writeVInt(filter.hashing.type()); // hashType - } - - /** - * The bit set of the BloomFilter (not necessarily power of 2!) - */ - final BitArray bits; - /** - * Number of hashes per element - */ - final int numHashFunctions; - - final Hashing hashing; - - BloomFilter(BitArray bits, int numHashFunctions, Hashing hashing) { - this.bits = bits; - this.numHashFunctions = numHashFunctions; - this.hashing = hashing; - /* - * This only exists to forbid BFs that cannot use the compact persistent representation. - * If it ever throws, at a user who was not intending to use that representation, we should - * reconsider - */ - if (numHashFunctions > 255) { - throw new IllegalArgumentException("Currently we don't allow BloomFilters that would use more than 255 hash functions"); - } - } - - public boolean put(BytesRef value) { - return hashing.put(value, numHashFunctions, bits); - } - - public boolean mightContain(BytesRef value) { - return hashing.mightContain(value, numHashFunctions, bits); - } - - public int getNumHashFunctions() { - return this.numHashFunctions; - } - - public long getSizeInBytes() { - return bits.ramBytesUsed(); - } - - @Override - public int hashCode() { - return bits.hashCode() + numHashFunctions; - } - - /* - * Cheat sheet: - * - * m: total bits - * n: expected insertions - * b: m/n, bits per insertion - - * p: expected false positive probability - * - * 1) Optimal k = b * ln2 - * 2) p = (1 - e ^ (-kn/m))^k - * 3) For optimal k: p = 2 ^ (-k) ~= 0.6185^b - * 4) For optimal k: m = -nlnp / ((ln2) ^ 2) - */ - - /** - * Computes the optimal k (number of hashes per element inserted in Bloom filter), given the - * expected insertions and total number of bits in the Bloom filter. - *

- * See http://en.wikipedia.org/wiki/File:Bloom_filter_fp_probability.svg for the formula. - * - * @param n expected insertions (must be positive) - * @param m total number of bits in Bloom filter (must be positive) - */ - static int optimalNumOfHashFunctions(long n, long m) { - return Math.max(1, (int) Math.round(m / n * Math.log(2))); - } - - /** - * Computes m (total bits of Bloom filter) which is expected to achieve, for the specified - * expected insertions, the required false positive probability. - *

- * See http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives for the formula. - * - * @param n expected insertions (must be positive) - * @param p false positive rate (must be 0 < p < 1) - */ - static long optimalNumOfBits(long n, double p) { - if (p == 0) { - p = Double.MIN_VALUE; - } - return (long) (-n * Math.log(p) / (Math.log(2) * Math.log(2))); - } - - // Note: We use this instead of java.util.BitSet because we need access to the long[] data field - static final class BitArray { - final long[] data; - final long bitSize; - long bitCount; - - BitArray(long bits) { - this(new long[size(bits)]); - } - - private static int size(long bits) { - long quotient = bits / 64; - long remainder = bits - quotient * 64; - return Math.toIntExact(remainder == 0 ? quotient : 1 + quotient); - } - - // Used by serialization - BitArray(long[] data) { - this.data = data; - long bitCount = 0; - for (long value : data) { - bitCount += Long.bitCount(value); - } - this.bitCount = bitCount; - this.bitSize = data.length * Long.SIZE; - } - - /** Returns true if the bit changed value. */ - boolean set(long index) { - if (!get(index)) { - data[(int) (index >>> 6)] |= (1L << index); - bitCount++; - return true; - } - return false; - } - - boolean get(long index) { - return (data[(int) (index >>> 6)] & (1L << index)) != 0; - } - - /** Number of bits */ - long bitSize() { - return bitSize; - } - - /** Number of set bits (1s) */ - long bitCount() { - return bitCount; - } - - BitArray copy() { - return new BitArray(data.clone()); - } - - /** Combines the two BitArrays using bitwise OR. */ - void putAll(BitArray array) { - bitCount = 0; - for (int i = 0; i < data.length; i++) { - data[i] |= array.data[i]; - bitCount += Long.bitCount(data[i]); - } - } - - @Override public boolean equals(Object o) { - if (o instanceof BitArray) { - BitArray bitArray = (BitArray) o; - return Arrays.equals(data, bitArray.data); - } - return false; - } - - @Override public int hashCode() { - return Arrays.hashCode(data); - } - - public long ramBytesUsed() { - return Long.BYTES * data.length + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 16; - } - } - - static enum Hashing { - - V0() { - @Override - protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) { - long bitSize = bits.bitSize(); - long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0); - int hash1 = (int) hash64; - int hash2 = (int) (hash64 >>> 32); - boolean bitsChanged = false; - for (int i = 1; i <= numHashFunctions; i++) { - int nextHash = hash1 + i * hash2; - if (nextHash < 0) { - nextHash = ~nextHash; - } - bitsChanged |= bits.set(nextHash % bitSize); - } - return bitsChanged; - } - - @Override - protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) { - long bitSize = bits.bitSize(); - long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0); - int hash1 = (int) hash64; - int hash2 = (int) (hash64 >>> 32); - for (int i = 1; i <= numHashFunctions; i++) { - int nextHash = hash1 + i * hash2; - if (nextHash < 0) { - nextHash = ~nextHash; - } - if (!bits.get(nextHash % bitSize)) { - return false; - } - } - return true; - } - - @Override - protected int type() { - return 0; - } - }, - V1() { - @Override - protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) { - long bitSize = bits.bitSize(); - MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128()); - - boolean bitsChanged = false; - long combinedHash = hash128.h1; - for (int i = 0; i < numHashFunctions; i++) { - // Make the combined hash positive and indexable - bitsChanged |= bits.set((combinedHash & Long.MAX_VALUE) % bitSize); - combinedHash += hash128.h2; - } - return bitsChanged; - } - - @Override - protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) { - long bitSize = bits.bitSize(); - MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128()); - - long combinedHash = hash128.h1; - for (int i = 0; i < numHashFunctions; i++) { - // Make the combined hash positive and indexable - if (!bits.get((combinedHash & Long.MAX_VALUE) % bitSize)) { - return false; - } - combinedHash += hash128.h2; - } - return true; - } - - @Override - protected int type() { - return 1; - } - } - ; - - protected abstract boolean put(BytesRef value, int numHashFunctions, BitArray bits); - - protected abstract boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits); - - protected abstract int type(); - - public static final Hashing DEFAULT = Hashing.V1; - - public static Hashing fromType(int type) { - if (type == 0) { - return Hashing.V0; - } if (type == 1) { - return Hashing.V1; - } else { - throw new IllegalArgumentException("no hashing type matching " + type); - } - } - } - - // START : MURMUR 3_128 USED FOR Hashing.V0 - // NOTE: don't replace this code with the o.e.common.hashing.MurmurHash3 method which returns a different hash - - protected static long getblock(byte[] key, int offset, int index) { - int i_8 = index << 3; - int blockOffset = offset + i_8; - return ((long) key[blockOffset + 0] & 0xff) + (((long) key[blockOffset + 1] & 0xff) << 8) + - (((long) key[blockOffset + 2] & 0xff) << 16) + (((long) key[blockOffset + 3] & 0xff) << 24) + - (((long) key[blockOffset + 4] & 0xff) << 32) + (((long) key[blockOffset + 5] & 0xff) << 40) + - (((long) key[blockOffset + 6] & 0xff) << 48) + (((long) key[blockOffset + 7] & 0xff) << 56); - } - - protected static long rotl64(long v, int n) { - return ((v << n) | (v >>> (64 - n))); - } - - protected static long fmix(long k) { - k ^= k >>> 33; - k *= 0xff51afd7ed558ccdL; - k ^= k >>> 33; - k *= 0xc4ceb9fe1a85ec53L; - k ^= k >>> 33; - - return k; - } - - @SuppressWarnings("fallthrough") // Uses fallthrough to implement a well know hashing algorithm - public static long hash3_x64_128(byte[] key, int offset, int length, long seed) { - final int nblocks = length >> 4; // Process as 128-bit blocks. - - long h1 = seed; - long h2 = seed; - - long c1 = 0x87c37b91114253d5L; - long c2 = 0x4cf5ad432745937fL; - - //---------- - // body - - for (int i = 0; i < nblocks; i++) { - long k1 = getblock(key, offset, i * 2 + 0); - long k2 = getblock(key, offset, i * 2 + 1); - - k1 *= c1; - k1 = rotl64(k1, 31); - k1 *= c2; - h1 ^= k1; - - h1 = rotl64(h1, 27); - h1 += h2; - h1 = h1 * 5 + 0x52dce729; - - k2 *= c2; - k2 = rotl64(k2, 33); - k2 *= c1; - h2 ^= k2; - - h2 = rotl64(h2, 31); - h2 += h1; - h2 = h2 * 5 + 0x38495ab5; - } - - //---------- - // tail - - // Advance offset to the unprocessed tail of the data. - offset += nblocks * 16; - - long k1 = 0; - long k2 = 0; - - switch (length & 15) { - case 15: - k2 ^= ((long) key[offset + 14]) << 48; - case 14: - k2 ^= ((long) key[offset + 13]) << 40; - case 13: - k2 ^= ((long) key[offset + 12]) << 32; - case 12: - k2 ^= ((long) key[offset + 11]) << 24; - case 11: - k2 ^= ((long) key[offset + 10]) << 16; - case 10: - k2 ^= ((long) key[offset + 9]) << 8; - case 9: - k2 ^= ((long) key[offset + 8]) << 0; - k2 *= c2; - k2 = rotl64(k2, 33); - k2 *= c1; - h2 ^= k2; - - case 8: - k1 ^= ((long) key[offset + 7]) << 56; - case 7: - k1 ^= ((long) key[offset + 6]) << 48; - case 6: - k1 ^= ((long) key[offset + 5]) << 40; - case 5: - k1 ^= ((long) key[offset + 4]) << 32; - case 4: - k1 ^= ((long) key[offset + 3]) << 24; - case 3: - k1 ^= ((long) key[offset + 2]) << 16; - case 2: - k1 ^= ((long) key[offset + 1]) << 8; - case 1: - k1 ^= (key[offset]); - k1 *= c1; - k1 = rotl64(k1, 31); - k1 *= c2; - h1 ^= k1; - } - - //---------- - // finalization - - h1 ^= length; - h2 ^= length; - - h1 += h2; - h2 += h1; - - h1 = fmix(h1); - h2 = fmix(h2); - - h1 += h2; - h2 += h1; - - //return (new long[]{h1, h2}); - // SAME AS GUAVA, they take the first long out of the 128bit - return h1; - } - - // END: MURMUR 3_128 -} diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index d2e3d7f42cf..8b6e425c26a 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -108,7 +108,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { currentNode, nodeWithHighestMatch); it.moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.REALLOCATED_REPLICA, "existing allocation of replica to [" + currentNode + "] cancelled, sync id match found on node [" + nodeWithHighestMatch + "]", - null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); + null, 0, allocation.getCurrentNanoTime(), System.currentTimeMillis())); changed = true; } } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 60e3250e49d..f5e5ce91d80 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -50,7 +50,6 @@ import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexEventListener; @@ -151,11 +150,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC this.indexStore = indexStore; indexFieldData.setListener(new FieldDataCacheListener(this)); this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this)); - PercolatorQueryCache percolatorQueryCache = new PercolatorQueryCache(indexSettings, IndexService.this::newQueryShardContext); this.warmer = new IndexWarmer(indexSettings.getSettings(), threadPool, - bitsetFilterCache.createListener(threadPool), - percolatorQueryCache.createListener(threadPool)); - this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache, percolatorQueryCache); + bitsetFilterCache.createListener(threadPool)); + this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache); this.engineFactory = engineFactory; // initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE this.searcherWrapper = wrapperFactory.newWrapper(this); @@ -239,8 +236,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC } } } finally { - IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, analysisService, refreshTask, fsyncTask, - cache().getPercolatorQueryCache()); + IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, analysisService, refreshTask, fsyncTask); } } } @@ -443,7 +439,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC return new QueryShardContext( indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(), similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry(), - nodeServicesProvider.getClient(), indexCache.getPercolatorQueryCache(), indexReader, + nodeServicesProvider.getClient(), indexReader, nodeServicesProvider.getClusterService().state() ); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index c33eef4ac61..2d73df76f07 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -321,7 +321,7 @@ public final class AnalysisRegistry implements Closeable { if (currentSettings.get("tokenizer") != null) { factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings); } else { - throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it"); + throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer"); } } else if (typeName.equals("custom")) { factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings); @@ -335,7 +335,7 @@ public final class AnalysisRegistry implements Closeable { factories.put(name, factory); } else { if (typeName == null) { - throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it"); + throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer"); } AnalysisModule.AnalysisProvider type = providerMap.get(typeName); if (type == null) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java index f7bf44256cc..985a081ccc8 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java @@ -33,13 +33,11 @@ import org.apache.lucene.analysis.util.CharArraySet; public final class FingerprintAnalyzer extends Analyzer { private final char separator; private final int maxOutputSize; - private final boolean preserveOriginal; private final CharArraySet stopWords; - public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize, boolean preserveOriginal) { + public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize) { this.separator = separator; this.maxOutputSize = maxOutputSize; - this.preserveOriginal = preserveOriginal; this.stopWords = stopWords; } @@ -48,7 +46,7 @@ public final class FingerprintAnalyzer extends Analyzer { final Tokenizer tokenizer = new StandardTokenizer(); TokenStream stream = tokenizer; stream = new LowerCaseFilter(stream); - stream = new ASCIIFoldingFilter(stream, preserveOriginal); + stream = new ASCIIFoldingFilter(stream, false); stream = new StopFilter(stream, stopWords); stream = new FingerprintFilter(stream, maxOutputSize, separator); return new TokenStreamComponents(tokenizer, stream); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java index 897068cbf8b..bb8a51e0969 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java @@ -34,10 +34,8 @@ import org.elasticsearch.index.IndexSettings; public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider { public static ParseField MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.MAX_OUTPUT_SIZE; - public static ParseField PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.PRESERVE_ORIGINAL; public static int DEFAULT_MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.DEFAULT_MAX_OUTPUT_SIZE; - public static boolean DEFAULT_PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.DEFAULT_PRESERVE_ORIGINAL; public static CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET; private final FingerprintAnalyzer analyzer; @@ -47,10 +45,9 @@ public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = + Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope); @Deprecated public static final String LEGACY_TYPE_NAME = ".percolator"; public static final String CONTENT_TYPE = "percolator"; private static final PercolatorFieldType FIELD_TYPE = new PercolatorFieldType(); - private static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; - private static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; - static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; + public static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; + public static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; + public static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; public static class Builder extends FieldMapper.Builder { @@ -172,7 +178,7 @@ public class PercolatorFieldMapper extends FieldMapper { this.queryTermsField = queryTermsField; this.unknownQueryField = unknownQueryField; this.queryBuilderField = queryBuilderField; - this.mapUnmappedFieldAsString = PercolatorQueryCache.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); + this.mapUnmappedFieldAsString = INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); } @Override @@ -196,7 +202,7 @@ public class PercolatorFieldMapper extends FieldMapper { // Fetching of terms, shapes and indexed scripts happen during this rewrite: queryBuilder = queryBuilder.rewrite(queryShardContext); - try (XContentBuilder builder = XContentFactory.contentBuilder(PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE)) { + try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); byte[] queryBuilderAsBytes = builder.bytes().toBytes(); diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java index 9fb6934cca4..2c231593860 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java @@ -40,6 +40,7 @@ import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SubSearchContext; +import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; @@ -69,8 +70,8 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase { } List ctxs = context.searcher().getIndexReader().leaves(); - PercolatorQueryCache queriesRegistry = context.percolatorQueryCache(); IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher(); + PercolateQuery.QueryStore queryStore = percolateQuery.getQueryStore(); LeafReaderContext percolatorLeafReaderContext = percolatorIndexSearcher.getIndexReader().leaves().get(0); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); @@ -78,9 +79,14 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase { createSubSearchContext(context, percolatorLeafReaderContext, percolateQuery.getDocumentSource()); for (InternalSearchHit hit : hits) { - LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); - int segmentDocId = hit.docId() - ctx.docBase; - Query query = queriesRegistry.getQueries(ctx).getQuery(segmentDocId); + final Query query; + try { + LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); + int segmentDocId = hit.docId() - ctx.docBase; + query = queryStore.getQueries(ctx).getQuery(segmentDocId); + } catch (IOException e) { + throw new RuntimeException(e); + } if (query != null) { subSearchContext.parsedQuery(new ParsedQuery(query)); hitContext.reset( diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java deleted file mode 100644 index e7ca6c3d427..00000000000 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java +++ /dev/null @@ -1,294 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.percolator; - -import com.carrotsearch.hppc.IntObjectHashMap; -import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.StoredFieldVisitor; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.cache.Cache; -import org.elasticsearch.common.cache.CacheBuilder; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContent; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.AbstractIndexComponent; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexWarmer; -import org.elasticsearch.index.IndexWarmer.TerminationHandle; -import org.elasticsearch.index.engine.Engine.Searcher; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.internal.SourceFieldMapper; -import org.elasticsearch.index.mapper.internal.TypeFieldMapper; -import org.elasticsearch.index.query.PercolateQuery; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardUtils; -import org.elasticsearch.threadpool.ThreadPool; - -import java.io.Closeable; -import java.io.IOException; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executor; -import java.util.function.Supplier; - -import static org.elasticsearch.index.percolator.PercolatorFieldMapper.LEGACY_TYPE_NAME; -import static org.elasticsearch.index.percolator.PercolatorFieldMapper.PercolatorFieldType; -import static org.elasticsearch.index.percolator.PercolatorFieldMapper.parseQuery; - -public final class PercolatorQueryCache extends AbstractIndexComponent - implements Closeable, LeafReader.CoreClosedListener, PercolateQuery.QueryRegistry { - - public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = - Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope); - - public final static XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE; - - private final Supplier queryShardContextSupplier; - private final Cache cache; - private final boolean mapUnmappedFieldsAsString; - - public PercolatorQueryCache(IndexSettings indexSettings, Supplier queryShardContextSupplier) { - super(indexSettings); - this.queryShardContextSupplier = queryShardContextSupplier; - cache = CacheBuilder.builder().build(); - this.mapUnmappedFieldsAsString = indexSettings.getValue(INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING); - } - - @Override - public Leaf getQueries(LeafReaderContext ctx) { - QueriesLeaf percolatorQueries = cache.get(ctx.reader().getCoreCacheKey()); - if (percolatorQueries == null) { - throw new IllegalStateException("queries not loaded, queries should be have been preloaded during index warming..."); - } - return percolatorQueries; - } - - public IndexWarmer.Listener createListener(ThreadPool threadPool) { - return new IndexWarmer.Listener() { - - final Executor executor = threadPool.executor(ThreadPool.Names.WARMER); - - @Override - public TerminationHandle warmReader(IndexShard indexShard, Searcher searcher) { - final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size()); - for (final LeafReaderContext ctx : searcher.reader().leaves()) { - if (cache.get(ctx.reader().getCoreCacheKey()) != null) { - latch.countDown(); - continue; - } - executor.execute(() -> { - try { - final long start = System.nanoTime(); - QueriesLeaf queries = loadQueries(ctx, indexShard); - cache.put(ctx.reader().getCoreCacheKey(), queries); - if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace( - "loading percolator queries took [{}]", - TimeValue.timeValueNanos(System.nanoTime() - start) - ); - } - } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to load percolator queries", t); - } finally { - latch.countDown(); - } - }); - } - return () -> latch.await(); - } - }; - } - - QueriesLeaf loadQueries(LeafReaderContext context, IndexShard indexShard) throws IOException { - Version indexVersionCreated = indexShard.indexSettings().getIndexVersionCreated(); - MapperService mapperService = indexShard.mapperService(); - LeafReader leafReader = context.reader(); - ShardId shardId = ShardUtils.extractShardId(leafReader); - if (shardId == null) { - throw new IllegalStateException("can't resolve shard id"); - } - if (indexSettings.getIndex().equals(shardId.getIndex()) == false) { - // percolator cache insanity - String message = "Trying to load queries for index " + shardId.getIndex() + " with cache of index " + - indexSettings.getIndex(); - throw new IllegalStateException(message); - } - - IntObjectHashMap queries = new IntObjectHashMap<>(); - boolean legacyLoading = indexVersionCreated.before(Version.V_5_0_0_alpha1); - if (legacyLoading) { - PostingsEnum postings = leafReader.postings(new Term(TypeFieldMapper.NAME, LEGACY_TYPE_NAME), PostingsEnum.NONE); - if (postings != null) { - LegacyQueryFieldVisitor visitor = new LegacyQueryFieldVisitor(); - for (int docId = postings.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = postings.nextDoc()) { - leafReader.document(docId, visitor); - queries.put(docId, parseLegacyPercolatorDocument(docId, visitor.source)); - visitor.source = null; // reset - } - } - } else { - // Each type can have one percolator field mapper, - // So for each type we check if there is a percolator field mapper - // and parse all the queries for the documents of that type. - IndexSearcher indexSearcher = new IndexSearcher(leafReader); - for (DocumentMapper documentMapper : mapperService.docMappers(false)) { - Weight queryWeight = indexSearcher.createNormalizedWeight(documentMapper.typeFilter(), false); - for (FieldMapper fieldMapper : documentMapper.mappers()) { - if (fieldMapper instanceof PercolatorFieldMapper) { - PercolatorFieldType fieldType = (PercolatorFieldType) fieldMapper.fieldType(); - BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.getQueryBuilderFieldName()); - if (binaryDocValues != null) { - // use the same leaf reader context the indexSearcher is using too: - Scorer scorer = queryWeight.scorer(leafReader.getContext()); - if (scorer != null) { - DocIdSetIterator iterator = scorer.iterator(); - for (int docId = iterator.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { - BytesRef qbSource = binaryDocValues.get(docId); - if (qbSource.length > 0) { - queries.put(docId, parseQueryBuilder(docId, qbSource)); - } - } - } - } - break; - } - } - } - } - leafReader.addCoreClosedListener(this); - return new QueriesLeaf(shardId, queries); - } - - private Query parseQueryBuilder(int docId, BytesRef qbSource) { - XContent xContent = QUERY_BUILDER_CONTENT_TYPE.xContent(); - try (XContentParser sourceParser = xContent.createParser(qbSource.bytes, qbSource.offset, qbSource.length)) { - QueryShardContext context = queryShardContextSupplier.get(); - return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); - } catch (IOException e) { - throw new PercolatorException(index(), "failed to parse query builder for document [" + docId + "]", e); - } - } - - private Query parseLegacyPercolatorDocument(int docId, BytesReference source) { - try (XContentParser sourceParser = XContentHelper.createParser(source)) { - String currentFieldName = null; - XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT - if (token != XContentParser.Token.START_OBJECT) { - throw new ElasticsearchException("failed to parse query [" + docId + "], not starting with OBJECT"); - } - while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = sourceParser.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - if ("query".equals(currentFieldName)) { - QueryShardContext context = queryShardContextSupplier.get(); - return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); - } else { - sourceParser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - sourceParser.skipChildren(); - } - } - } catch (Exception e) { - throw new PercolatorException(index(), "failed to parse query [" + docId + "]", e); - } - return null; - } - - public PercolatorQueryCacheStats getStats(ShardId shardId) { - int numberOfQueries = 0; - for (QueriesLeaf queries : cache.values()) { - if (shardId.equals(queries.shardId)) { - numberOfQueries += queries.queries.size(); - } - } - return new PercolatorQueryCacheStats(numberOfQueries); - } - - @Override - public void onClose(Object cacheKey) throws IOException { - cache.invalidate(cacheKey); - } - - @Override - public void close() throws IOException { - cache.invalidateAll(); - } - - final static class LegacyQueryFieldVisitor extends StoredFieldVisitor { - - private BytesArray source; - - @Override - public void binaryField(FieldInfo fieldInfo, byte[] bytes) throws IOException { - source = new BytesArray(bytes); - } - - @Override - public Status needsField(FieldInfo fieldInfo) throws IOException { - if (source != null) { - return Status.STOP; - } - if (SourceFieldMapper.NAME.equals(fieldInfo.name)) { - return Status.YES; - } else { - return Status.NO; - } - } - - } - - final static class QueriesLeaf implements Leaf { - - final ShardId shardId; - final IntObjectHashMap queries; - - QueriesLeaf(ShardId shardId, IntObjectHashMap queries) { - this.shardId = shardId; - this.queries = queries; - } - - @Override - public Query getQuery(int docId) { - return queries.get(docId); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java deleted file mode 100644 index cbc21286e20..00000000000 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.percolator; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - -/** - * Exposes percolator query cache statistics. - */ -public class PercolatorQueryCacheStats implements Streamable, ToXContent { - - private long numQueries; - - /** - * Noop constructor for serialization purposes. - */ - public PercolatorQueryCacheStats() { - } - - PercolatorQueryCacheStats(long numQueries) { - this.numQueries = numQueries; - } - - /** - * @return The total number of loaded percolate queries. - */ - public long getNumQueries() { - return numQueries; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(Fields.PERCOLATOR); - builder.field(Fields.QUERIES, getNumQueries()); - builder.endObject(); - return builder; - } - - public void add(PercolatorQueryCacheStats percolate) { - if (percolate == null) { - return; - } - - numQueries += percolate.getNumQueries(); - } - - static final class Fields { - static final String PERCOLATOR = "percolator"; - static final String QUERIES = "num_queries"; - } - - public static PercolatorQueryCacheStats readPercolateStats(StreamInput in) throws IOException { - PercolatorQueryCacheStats stats = new PercolatorQueryCacheStats(); - stats.readFrom(in); - return stats; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - numQueries = in.readVLong(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVLong(numQueries); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index 58207f7ca54..6c867590653 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -267,7 +267,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { */ public MatchQueryBuilder prefixLength(int prefixLength) { if (prefixLength < 0 ) { - throw new IllegalArgumentException("No negative prefix length allowed."); + throw new IllegalArgumentException("[" + NAME + "] requires prefix length to be non-negative."); } this.prefixLength = prefixLength; return this; @@ -284,8 +284,8 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { * When using fuzzy or prefix type query, the number of term expansions to use. */ public MatchQueryBuilder maxExpansions(int maxExpansions) { - if (maxExpansions < 0 ) { - throw new IllegalArgumentException("No negative maxExpansions allowed."); + if (maxExpansions <= 0 ) { + throw new IllegalArgumentException("[" + NAME + "] requires maxExpansions to be positive."); } this.maxExpansions = maxExpansions; return this; diff --git a/core/src/main/java/org/elasticsearch/index/query/PercolateQuery.java b/core/src/main/java/org/elasticsearch/index/query/PercolateQuery.java index e99687422db..41448e14ef7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PercolateQuery.java +++ b/core/src/main/java/org/elasticsearch/index/query/PercolateQuery.java @@ -50,7 +50,7 @@ public final class PercolateQuery extends Query implements Accountable { public static class Builder { private final String docType; - private final QueryRegistry queryRegistry; + private final QueryStore queryStore; private final BytesReference documentSource; private final IndexSearcher percolatorIndexSearcher; @@ -59,15 +59,15 @@ public final class PercolateQuery extends Query implements Accountable { /** * @param docType The type of the document being percolated - * @param queryRegistry The registry holding all the percolator queries as Lucene queries. + * @param queryStore The lookup holding all the percolator queries as Lucene queries. * @param documentSource The source of the document being percolated * @param percolatorIndexSearcher The index searcher on top of the in-memory index that holds the document being percolated */ - public Builder(String docType, QueryRegistry queryRegistry, BytesReference documentSource, IndexSearcher percolatorIndexSearcher) { + public Builder(String docType, QueryStore queryStore, BytesReference documentSource, IndexSearcher percolatorIndexSearcher) { this.docType = Objects.requireNonNull(docType); this.documentSource = Objects.requireNonNull(documentSource); this.percolatorIndexSearcher = Objects.requireNonNull(percolatorIndexSearcher); - this.queryRegistry = Objects.requireNonNull(queryRegistry); + this.queryStore = Objects.requireNonNull(queryStore); } /** @@ -94,7 +94,6 @@ public final class PercolateQuery extends Query implements Accountable { if (percolateTypeQuery != null && queriesMetaDataQuery != null) { throw new IllegalStateException("Either filter by deprecated percolator type or by query metadata"); } - // The query that selects which percolator queries will be evaluated by MemoryIndex: BooleanQuery.Builder builder = new BooleanQuery.Builder(); if (percolateTypeQuery != null) { @@ -103,24 +102,23 @@ public final class PercolateQuery extends Query implements Accountable { if (queriesMetaDataQuery != null) { builder.add(queriesMetaDataQuery, FILTER); } - - return new PercolateQuery(docType, queryRegistry, documentSource, builder.build(), percolatorIndexSearcher); + return new PercolateQuery(docType, queryStore, documentSource, builder.build(), percolatorIndexSearcher); } } private final String documentType; - private final QueryRegistry queryRegistry; + private final QueryStore queryStore; private final BytesReference documentSource; private final Query percolatorQueriesQuery; private final IndexSearcher percolatorIndexSearcher; - private PercolateQuery(String documentType, QueryRegistry queryRegistry, BytesReference documentSource, + private PercolateQuery(String documentType, QueryStore queryStore, BytesReference documentSource, Query percolatorQueriesQuery, IndexSearcher percolatorIndexSearcher) { this.documentType = documentType; this.documentSource = documentSource; this.percolatorQueriesQuery = percolatorQueriesQuery; - this.queryRegistry = queryRegistry; + this.queryStore = queryStore; this.percolatorIndexSearcher = percolatorIndexSearcher; } @@ -128,7 +126,7 @@ public final class PercolateQuery extends Query implements Accountable { public Query rewrite(IndexReader reader) throws IOException { Query rewritten = percolatorQueriesQuery.rewrite(reader); if (rewritten != percolatorQueriesQuery) { - return new PercolateQuery(documentType, queryRegistry, documentSource, rewritten, percolatorIndexSearcher); + return new PercolateQuery(documentType, queryStore, documentSource, rewritten, percolatorIndexSearcher); } else { return this; } @@ -151,7 +149,7 @@ public final class PercolateQuery extends Query implements Accountable { if (result == docId) { if (twoPhaseIterator.matches()) { if (needsScores) { - QueryRegistry.Leaf percolatorQueries = queryRegistry.getQueries(leafReaderContext); + QueryStore.Leaf percolatorQueries = queryStore.getQueries(leafReaderContext); Query query = percolatorQueries.getQuery(docId); Explanation detail = percolatorIndexSearcher.explain(query, 0); return Explanation.match(scorer.score(), "PercolateQuery", detail); @@ -181,9 +179,9 @@ public final class PercolateQuery extends Query implements Accountable { return null; } - final QueryRegistry.Leaf percolatorQueries = queryRegistry.getQueries(leafReaderContext); + final QueryStore.Leaf queries = queryStore.getQueries(leafReaderContext); if (needsScores) { - return new BaseScorer(this, approximation, percolatorQueries, percolatorIndexSearcher) { + return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) { float score; @@ -209,7 +207,7 @@ public final class PercolateQuery extends Query implements Accountable { } }; } else { - return new BaseScorer(this, approximation, percolatorQueries, percolatorIndexSearcher) { + return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) { @Override public float score() throws IOException { @@ -238,6 +236,10 @@ public final class PercolateQuery extends Query implements Accountable { return documentSource; } + public QueryStore getQueryStore() { + return queryStore; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -276,13 +278,15 @@ public final class PercolateQuery extends Query implements Accountable { return sizeInBytes; } - public interface QueryRegistry { + @FunctionalInterface + public interface QueryStore { - Leaf getQueries(LeafReaderContext ctx); + Leaf getQueries(LeafReaderContext ctx) throws IOException; + @FunctionalInterface interface Leaf { - Query getQuery(int docId); + Query getQuery(int docId) throws IOException; } @@ -291,10 +295,10 @@ public final class PercolateQuery extends Query implements Accountable { static abstract class BaseScorer extends Scorer { final Scorer approximation; - final QueryRegistry.Leaf percolatorQueries; + final QueryStore.Leaf percolatorQueries; final IndexSearcher percolatorIndexSearcher; - BaseScorer(Weight weight, Scorer approximation, QueryRegistry.Leaf percolatorQueries, IndexSearcher percolatorIndexSearcher) { + BaseScorer(Weight weight, Scorer approximation, QueryStore.Leaf percolatorQueries, IndexSearcher percolatorIndexSearcher) { super(weight); this.approximation = approximation; this.percolatorQueries = percolatorQueries; diff --git a/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java index 8341459b155..59be8d7a2d0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java @@ -21,10 +21,13 @@ package org.elasticsearch.index.query; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.SlowCompositeReaderWrapper; +import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.BooleanClause; @@ -33,23 +36,27 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.Weight; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperForType; @@ -57,15 +64,16 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.percolator.PercolatorFieldMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import java.io.IOException; import java.util.List; import java.util.Objects; import static org.elasticsearch.index.mapper.SourceToParse.source; +import static org.elasticsearch.index.percolator.PercolatorFieldMapper.parseQuery; public class PercolateQueryBuilder extends AbstractQueryBuilder { @@ -388,16 +396,14 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder { + LeafReader leafReader = ctx.reader(); + BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.getQueryBuilderFieldName()); + if (binaryDocValues == null) { + return docId -> null; + } + + Bits bits = leafReader.getDocsWithField(fieldType.getQueryBuilderFieldName()); + return docId -> { + if (bits.get(docId)) { + BytesRef qbSource = binaryDocValues.get(docId); + if (qbSource.length > 0) { + XContent xContent = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent(); + try (XContentParser sourceParser = xContent.createParser(qbSource.bytes, qbSource.offset, qbSource.length)) { + return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); + } + } else { + return null; + } + } else { + return null; + } + }; + }; + } + + private static PercolateQuery.QueryStore createLegacyStore(QueryShardContext context, boolean mapUnmappedFieldsAsString) { + return ctx -> { + LeafReader leafReader = ctx.reader(); + return docId -> { + LegacyQueryFieldVisitor visitor = new LegacyQueryFieldVisitor(); + leafReader.document(docId, visitor); + if (visitor.source == null) { + throw new IllegalStateException("No source found for document with docid [" + docId + "]"); + } + + try (XContentParser sourceParser = XContentHelper.createParser(visitor.source)) { + String currentFieldName = null; + XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchException("failed to parse query [" + docId + "], not starting with OBJECT"); + } + while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = sourceParser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if ("query".equals(currentFieldName)) { + return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); + } else { + sourceParser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + sourceParser.skipChildren(); + } + } + } + return null; + }; + }; + } + + private final static class LegacyQueryFieldVisitor extends StoredFieldVisitor { + + private BytesArray source; + + @Override + public void binaryField(FieldInfo fieldInfo, byte[] bytes) throws IOException { + source = new BytesArray(bytes); + } + + @Override + public Status needsField(FieldInfo fieldInfo) throws IOException { + if (source != null) { + return Status.STOP; + } + if (SourceFieldMapper.NAME.equals(fieldInfo.name)) { + return Status.YES; + } else { + return Status.NO; + } + } + + } + } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 4aa72728bc8..3cdabb74d7e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -51,7 +51,6 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.query.IndicesQueriesRegistry; @@ -82,7 +81,6 @@ public class QueryShardContext extends QueryRewriteContext { private final Map namedQueries = new HashMap<>(); private final MapperQueryParser queryParser = new MapperQueryParser(this); private final IndicesQueriesRegistry indicesQueriesRegistry; - private final PercolatorQueryCache percolatorQueryCache; private boolean allowUnmappedFields; private boolean mapUnmappedFieldAsString; private NestedScope nestedScope; @@ -90,7 +88,7 @@ public class QueryShardContext extends QueryRewriteContext { public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService, ScriptService scriptService, - final IndicesQueriesRegistry indicesQueriesRegistry, Client client, PercolatorQueryCache percolatorQueryCache, + final IndicesQueriesRegistry indicesQueriesRegistry, Client client, IndexReader reader, ClusterState clusterState) { super(indexSettings, mapperService, scriptService, indicesQueriesRegistry, client, reader, clusterState); this.indexSettings = indexSettings; @@ -100,14 +98,13 @@ public class QueryShardContext extends QueryRewriteContext { this.indexFieldDataService = indexFieldDataService; this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields(); this.indicesQueriesRegistry = indicesQueriesRegistry; - this.percolatorQueryCache = percolatorQueryCache; this.nestedScope = new NestedScope(); } public QueryShardContext(QueryShardContext source) { this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, source.similarityService, source.scriptService, source.indicesQueriesRegistry, source.client, - source.percolatorQueryCache, source.reader, source.clusterState); + source.reader, source.clusterState); this.types = source.getTypes(); } @@ -123,10 +120,6 @@ public class QueryShardContext extends QueryRewriteContext { return mapperService.analysisService(); } - public PercolatorQueryCache getPercolatorQueryCache() { - return percolatorQueryCache; - } - public Similarity getSearchSimilarity() { return similarityService != null ? similarityService.similarity(mapperService) : null; } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index a503b708633..cebc72c077b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Objects; @@ -64,7 +65,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clauses() { - return this.clauses; + return Collections.unmodifiableList(this.clauses); } /** @@ -198,7 +202,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { if (initialClause == null) { - throw new IllegalArgumentException("query must include at least one clause"); + throw new IllegalArgumentException("[" + NAME + "] must include at least one clause"); } clauses.add(initialClause); } @@ -68,9 +69,12 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder writeQueries(out, clauses); } - public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { + /** + * Add a span clause to the current list of clauses + */ + public SpanOrQueryBuilder addClause(SpanQueryBuilder clause) { if (clause == null) { - throw new IllegalArgumentException("inner bool query clause cannot be null"); + throw new IllegalArgumentException("[" + NAME + "] inner clause cannot be null"); } clauses.add(clause); return this; @@ -80,7 +84,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder * @return the {@link SpanQueryBuilder} clauses that were set for this query */ public List clauses() { - return this.clauses; + return Collections.unmodifiableList(this.clauses); } @Override @@ -137,7 +141,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder(clauses.get(0)); for (int i = 1; i < clauses.size(); i++) { - queryBuilder.clause(clauses.get(i)); + queryBuilder.addClause(clauses.get(i)); } queryBuilder.boost(boost); queryBuilder.queryName(queryName); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 768db935308..2cadc1ad24b 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -576,16 +576,22 @@ public class IndexShard extends AbstractIndexShardComponent { long bytes = getEngine().getIndexBufferRAMBytesUsed(); writingBytes.addAndGet(bytes); try { - logger.debug("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); + if (logger.isTraceEnabled()) { + logger.trace("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); + } long time = System.nanoTime(); getEngine().refresh(source); refreshMetric.inc(System.nanoTime() - time); } finally { - logger.debug("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); + if (logger.isTraceEnabled()) { + logger.trace("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); + } writingBytes.addAndGet(-bytes); } } else { - logger.debug("refresh with source [{}]", source); + if (logger.isTraceEnabled()) { + logger.trace("refresh with source [{}]", source); + } long time = System.nanoTime(); getEngine().refresh(source); refreshMetric.inc(System.nanoTime() - time); diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index e215669761c..0109995f80f 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -60,7 +60,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { private volatile long totalOffset; protected final AtomicBoolean closed = new AtomicBoolean(false); - + // lock order synchronized(syncLock) -> synchronized(this) + private final Object syncLock = new Object(); public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException { super(generation, channel, path, channel.position()); @@ -146,29 +147,15 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { * raising the exception. */ public void sync() throws IOException { - if (syncNeeded()) { - synchronized (this) { - ensureOpen(); - final long offsetToSync; - final int opsCounter; - try { - outputStream.flush(); - offsetToSync = totalOffset; - opsCounter = operationCounter; - checkpoint(offsetToSync, opsCounter, generation, channel, path); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - lastSyncedOffset = offsetToSync; - } - } + syncUpTo(Long.MAX_VALUE); } /** * returns true if there are buffered ops */ - public boolean syncNeeded() { return totalOffset != lastSyncedOffset; } + public boolean syncNeeded() { + return totalOffset != lastSyncedOffset; + } @Override public int totalOperations() { @@ -183,40 +170,55 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { /** * closes this writer and transfers it's underlying file channel to a new immutable reader */ - public synchronized TranslogReader closeIntoReader() throws IOException { - try { - sync(); // sync before we close.. - } catch (IOException e) { - closeWithTragicEvent(e); - throw e; - } - if (closed.compareAndSet(false, true)) { - boolean success = false; - try { - final TranslogReader reader = new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); - success = true; - return reader; - } finally { - if (success == false) { - // close the channel, as we are closed and failed to create a new reader - IOUtils.closeWhileHandlingException(channel); + public TranslogReader closeIntoReader() throws IOException { + // make sure to acquire the sync lock first, to prevent dead locks with threads calling + // syncUpTo() , where the sync lock is acquired first, following by the synchronize(this) + // + // Note: While this is not strictly needed as this method is called while blocking all ops on the translog, + // we do this to for correctness and preventing future issues. + synchronized (syncLock) { + synchronized (this) { + try { + sync(); // sync before we close.. + } catch (IOException e) { + closeWithTragicEvent(e); + throw e; + } + if (closed.compareAndSet(false, true)) { + boolean success = false; + try { + final TranslogReader reader = new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); + success = true; + return reader; + } finally { + if (success == false) { + // close the channel, as we are closed and failed to create a new reader + IOUtils.closeWhileHandlingException(channel); + } + } + } else { + throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy); } } - } else { - throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy); } } @Override - public synchronized Translog.Snapshot newSnapshot() { - ensureOpen(); - try { - sync(); - } catch (IOException e) { - throw new TranslogException(shardId, "exception while syncing before creating a snapshot", e); + public Translog.Snapshot newSnapshot() { + // make sure to acquire the sync lock first, to prevent dead locks with threads calling + // syncUpTo() , where the sync lock is acquired first, following by the synchronize(this) + synchronized (syncLock) { + synchronized (this) { + ensureOpen(); + try { + sync(); + } catch (IOException e) { + throw new TranslogException(shardId, "exception while syncing before creating a snapshot", e); + } + return super.newSnapshot(); + } } - return super.newSnapshot(); } private long getWrittenOffset() throws IOException { @@ -229,9 +231,38 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { * @return true if this call caused an actual sync operation */ public boolean syncUpTo(long offset) throws IOException { - if (lastSyncedOffset < offset) { - sync(); - return true; + if (lastSyncedOffset < offset && syncNeeded()) { + synchronized (syncLock) { // only one sync/checkpoint should happen concurrently but we wait + if (lastSyncedOffset < offset && syncNeeded()) { + // double checked locking - we don't want to fsync unless we have to and now that we have + // the lock we should check again since if this code is busy we might have fsynced enough already + final long offsetToSync; + final int opsCounter; + synchronized (this) { + ensureOpen(); + try { + outputStream.flush(); + offsetToSync = totalOffset; + opsCounter = operationCounter; + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + } + // now do the actual fsync outside of the synchronized block such that + // we can continue writing to the buffer etc. + try { + channel.force(false); + writeCheckpoint(offsetToSync, opsCounter, path.getParent(), generation, StandardOpenOption.WRITE); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + assert lastSyncedOffset <= offsetToSync : "illegal state: " + lastSyncedOffset + " <= " + offsetToSync; + lastSyncedOffset = offsetToSync; // write protected by syncLock + return true; + } + } } return false; } @@ -254,11 +285,6 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); } - private synchronized void checkpoint(long lastSyncPosition, int operationCounter, long generation, FileChannel translogFileChannel, Path translogFilePath) throws IOException { - translogFileChannel.force(false); - writeCheckpoint(lastSyncPosition, operationCounter, translogFilePath.getParent(), generation, StandardOpenOption.WRITE); - } - private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException { final Path checkpointFile = translogFile.resolve(Translog.CHECKPOINT_FILE_NAME); Checkpoint checkpoint = new Checkpoint(syncPosition, numOperations, generation); @@ -269,7 +295,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { static final ChannelFactory DEFAULT = new ChannelFactory(); - // only for testing until we have a disk-full FileSystemt + // only for testing until we have a disk-full FileSystem public FileChannel open(Path file) throws IOException { return FileChannel.open(file, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW); } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 3b9b186ddd8..6b1d24e47f1 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -280,7 +280,7 @@ public class IndicesService extends AbstractLifecycleComponent i if (indexShard.routingEntry() == null) { continue; } - IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesQueryCache, indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats()) }); + IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesQueryCache, indexShard, flags), indexShard.commitStats()) }); if (!statsByShard.containsKey(indexService.index())) { statsByShard.put(indexService.index(), arrayAsArrayList(indexShardStats)); } else { diff --git a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java index f4736f0332e..f205cdd8562 100644 --- a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java +++ b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java @@ -37,7 +37,6 @@ import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.PercolatorQueryCacheStats; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; @@ -102,11 +101,6 @@ public class NodeIndicesStats implements Streamable, ToXContent { return stats.getSearch(); } - @Nullable - public PercolatorQueryCacheStats getPercolate() { - return stats.getPercolatorCache(); - } - @Nullable public MergeStats getMerge() { return stats.getMerge(); diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 124c8b2dbdd..24a9cf589bd 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -19,14 +19,11 @@ package org.elasticsearch.node.internal; -import org.elasticsearch.bootstrap.BootstrapInfo; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; -import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; @@ -39,10 +36,13 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; import static org.elasticsearch.common.Strings.cleanPath; @@ -52,20 +52,18 @@ import static org.elasticsearch.common.Strings.cleanPath; public class InternalSettingsPreparer { private static final String[] ALLOWED_SUFFIXES = {".yml", ".yaml", ".json", ".properties"}; - static final String PROPERTY_PREFIX = "es."; - static final String PROPERTY_DEFAULTS_PREFIX = "es.default."; + static final String PROPERTY_DEFAULTS_PREFIX = "default."; + static final Predicate PROPERTY_DEFAULTS_PREDICATE = key -> key.startsWith(PROPERTY_DEFAULTS_PREFIX); public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; - public static final Setting IGNORE_SYSTEM_PROPERTIES_SETTING = - Setting.boolSetting("config.ignore_system_properties", false, Property.NodeScope); /** * Prepares the settings by gathering all elasticsearch system properties and setting defaults. */ public static Settings prepareSettings(Settings input) { Settings.Builder output = Settings.builder(); - initializeSettings(output, input, true); + initializeSettings(output, input, true, Collections.emptyMap()); finalizeSettings(output, null, null); return output.build(); } @@ -80,9 +78,23 @@ public class InternalSettingsPreparer { * @return the {@link Settings} and {@link Environment} as a {@link Tuple} */ public static Environment prepareEnvironment(Settings input, Terminal terminal) { + return prepareEnvironment(input, terminal, Collections.emptyMap()); + } + + /** + * Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings, + * and then replacing all property placeholders. If a {@link Terminal} is provided and configuration settings are loaded, + * settings with a value of ${prompt.text} or ${prompt.secret} will result in a prompt for + * the setting to the user. + * @param input The custom settings to use. These are not overwritten by settings in the configuration file. + * @param terminal the Terminal to use for input/output + * @param properties Map of properties key/value pairs (usually from the command-line) + * @return the {@link Settings} and {@link Environment} as a {@link Tuple} + */ + public static Environment prepareEnvironment(Settings input, Terminal terminal, Map properties) { // just create enough settings to build the environment, to get the config dir Settings.Builder output = Settings.builder(); - initializeSettings(output, input, true); + initializeSettings(output, input, true, properties); Environment environment = new Environment(output.build()); boolean settingsFileFound = false; @@ -103,7 +115,7 @@ public class InternalSettingsPreparer { // re-initialize settings now that the config file has been loaded // TODO: only re-initialize if a config file was actually loaded - initializeSettings(output, input, false); + initializeSettings(output, input, false, properties); finalizeSettings(output, terminal, environment.configFile()); environment = new Environment(output.build()); @@ -113,22 +125,16 @@ public class InternalSettingsPreparer { return new Environment(output.build()); } - private static boolean useSystemProperties(Settings input) { - return !IGNORE_SYSTEM_PROPERTIES_SETTING.get(input); - } - /** * Initializes the builder with the given input settings, and loads system properties settings if allowed. * If loadDefaults is true, system property default settings are loaded. */ - private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults) { + private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults, Map esSettings) { output.put(input); - if (useSystemProperties(input)) { - if (loadDefaults) { - output.putProperties(PROPERTY_DEFAULTS_PREFIX, BootstrapInfo.getSystemProperties()); - } - output.putProperties(PROPERTY_PREFIX, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIX); + if (loadDefaults) { + output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE, key -> key.substring(PROPERTY_DEFAULTS_PREFIX.length())); } + output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE.negate(), Function.identity()); output.replacePropertyPlaceholders(); } diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 2e5faa6bfae..645d07bfb64 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -27,11 +27,14 @@ import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.SettingCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import java.io.BufferedReader; import java.io.IOException; @@ -56,6 +59,7 @@ import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.zip.ZipEntry; @@ -95,7 +99,7 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet; * elasticsearch config directory, using the name of the plugin. If any files to be installed * already exist, they will be skipped. */ -class InstallPluginCommand extends Command { +class InstallPluginCommand extends SettingCommand { private static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging"; @@ -126,12 +130,12 @@ class InstallPluginCommand extends Command { "mapper-murmur3", "mapper-size", "repository-azure", + "repository-gcs", "repository-hdfs", "repository-s3", "store-smb", "x-pack"))); - private final Environment env; private final OptionSpec batchOption; private final OptionSpec arguments; @@ -159,9 +163,8 @@ class InstallPluginCommand extends Command { FILE_PERMS = Collections.unmodifiableSet(filePerms); } - InstallPluginCommand(Environment env) { + InstallPluginCommand() { super("Install a plugin"); - this.env = env; this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"), "Enable batch mode explicitly, automatic confirmation of security permission"); this.arguments = parser.nonOptions("plugin id"); @@ -177,7 +180,7 @@ class InstallPluginCommand extends Command { } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args List args = arguments.values(options); if (args.size() != 1) { @@ -185,12 +188,12 @@ class InstallPluginCommand extends Command { } String pluginId = args.get(0); boolean isBatch = options.has(batchOption) || System.console() == null; - execute(terminal, pluginId, isBatch); + execute(terminal, pluginId, isBatch, settings); } // pkg private for testing - void execute(Terminal terminal, String pluginId, boolean isBatch) throws Exception { - + void execute(Terminal terminal, String pluginId, boolean isBatch, Map settings) throws Exception { + final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); // TODO: remove this leniency!! is it needed anymore? if (Files.exists(env.pluginsFile()) == false) { terminal.println("Plugins directory [" + env.pluginsFile() + "] does not exist. Creating..."); @@ -199,7 +202,7 @@ class InstallPluginCommand extends Command { Path pluginZip = download(terminal, pluginId, env.tmpFile()); Path extractedZip = unzip(pluginZip, env.pluginsFile()); - install(terminal, isBatch, extractedZip); + install(terminal, isBatch, extractedZip, env); } /** Downloads the plugin and returns the file it was downloaded to. */ @@ -348,7 +351,7 @@ class InstallPluginCommand extends Command { } /** Load information about the plugin, and verify it can be installed with no errors. */ - private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch) throws Exception { + private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch, Environment env) throws Exception { // read and validate the plugin descriptor PluginInfo info = PluginInfo.readFromProperties(pluginRoot); terminal.println(VERBOSE, info.toString()); @@ -397,12 +400,12 @@ class InstallPluginCommand extends Command { * Installs the plugin from {@code tmpRoot} into the plugins dir. * If the plugin has a bin dir and/or a config dir, those are copied. */ - private void install(Terminal terminal, boolean isBatch, Path tmpRoot) throws Exception { + private void install(Terminal terminal, boolean isBatch, Path tmpRoot, Environment env) throws Exception { List deleteOnFailure = new ArrayList<>(); deleteOnFailure.add(tmpRoot); try { - PluginInfo info = verify(terminal, tmpRoot, isBatch); + PluginInfo info = verify(terminal, tmpRoot, isBatch, env); final Path destination = env.pluginsFile().resolve(info.getName()); if (Files.exists(destination)) { diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index c03e70ad4da..bd2f853bac0 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -19,6 +19,13 @@ package org.elasticsearch.plugins; +import joptsimple.OptionSet; +import org.elasticsearch.cli.SettingCommand; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; + import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; @@ -26,26 +33,20 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; - -import joptsimple.OptionSet; -import org.elasticsearch.cli.Command; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.env.Environment; +import java.util.Map; /** * A command for the plugin cli to list plugins installed in elasticsearch. */ -class ListPluginsCommand extends Command { +class ListPluginsCommand extends SettingCommand { - private final Environment env; - - ListPluginsCommand(Environment env) { + ListPluginsCommand() { super("Lists installed elasticsearch plugins"); - this.env = env; } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { + final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); if (Files.exists(env.pluginsFile()) == false) { throw new IOException("Plugins directory missing: " + env.pluginsFile()); } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java index be06ea7db1c..3a88c4d0083 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java @@ -26,21 +26,24 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; +import java.util.Collections; + /** * A cli tool for adding, removing and listing plugins for elasticsearch. */ public class PluginCli extends MultiCommand { - public PluginCli(Environment env) { + public PluginCli() { super("A tool for managing installed elasticsearch plugins"); - subcommands.put("list", new ListPluginsCommand(env)); - subcommands.put("install", new InstallPluginCommand(env)); - subcommands.put("remove", new RemovePluginCommand(env)); + subcommands.put("list", new ListPluginsCommand()); + subcommands.put("install", new InstallPluginCommand()); + subcommands.put("remove", new RemovePluginCommand()); } public static void main(String[] args) throws Exception { // initialize default for es.logger.level because we will not read the logging.yml String loggerLevel = System.getProperty("es.logger.level", "INFO"); + String pathHome = System.getProperty("es.path.home"); // Set the appender for all potential log files to terminal so that other components that use the logger print out the // same terminal. // The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is @@ -48,12 +51,14 @@ public class PluginCli extends MultiCommand { // is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs. // Therefore we print to Terminal. Environment loggingEnvironment = InternalSettingsPreparer.prepareEnvironment(Settings.builder() + .put("path.home", pathHome) .put("appender.terminal.type", "terminal") - .put("rootLogger", "${es.logger.level}, terminal") - .put("es.logger.level", loggerLevel) + .put("rootLogger", "${logger.level}, terminal") + .put("logger.level", loggerLevel) .build(), Terminal.DEFAULT); LogConfigurator.configure(loggingEnvironment.settings(), false); - Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, Terminal.DEFAULT); - exit(new PluginCli(env).main(args, Terminal.DEFAULT)); + + exit(new PluginCli().main(args, Terminal.DEFAULT)); } + } diff --git a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java index a3e6c375f83..af48c1d8207 100644 --- a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java @@ -24,45 +24,49 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; +import java.util.Map; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.lucene.util.IOUtils; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.SettingCommand; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.Strings; import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; /** * A command for the plugin cli to remove a plugin from elasticsearch. */ -class RemovePluginCommand extends Command { +class RemovePluginCommand extends SettingCommand { - private final Environment env; private final OptionSpec arguments; - RemovePluginCommand(Environment env) { + RemovePluginCommand() { super("Removes a plugin from elasticsearch"); - this.env = env; this.arguments = parser.nonOptions("plugin name"); } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args List args = arguments.values(options); if (args.size() != 1) { throw new UserError(ExitCodes.USAGE, "Must supply a single plugin id argument"); } - execute(terminal, args.get(0)); + execute(terminal, args.get(0), settings); } // pkg private for testing - void execute(Terminal terminal, String pluginName) throws Exception { + void execute(Terminal terminal, String pluginName, Map settings) throws Exception { + final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); + terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "..."); Path pluginDir = env.pluginsFile().resolve(pluginName); diff --git a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index 2194732445d..8cdbca512e7 100644 --- a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -123,9 +123,9 @@ public class BytesRestResponse extends RestResponse { params = new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"), channel.request()); } else { if (status.getStatus() < 500) { - SUPPRESSED_ERROR_LOGGER.debug("{} Params: {}", t, channel.request().path(), channel.request().params()); + SUPPRESSED_ERROR_LOGGER.debug("path: {}, params: {}", t, channel.request().rawPath(), channel.request().params()); } else { - SUPPRESSED_ERROR_LOGGER.warn("{} Params: {}", t, channel.request().path(), channel.request().params()); + SUPPRESSED_ERROR_LOGGER.warn("path: {}, params: {}", t, channel.request().rawPath(), channel.request().params()); } params = channel.request(); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java index 8a4afd89ac4..b34c6726c09 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java @@ -64,6 +64,7 @@ public class RestClusterRerouteAction extends BaseRestHandler { public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws Exception { final ClusterRerouteRequest clusterRerouteRequest = Requests.clusterRerouteRequest(); clusterRerouteRequest.dryRun(request.paramAsBoolean("dry_run", clusterRerouteRequest.dryRun())); + clusterRerouteRequest.setRetryFailed(request.paramAsBoolean("retry_failed", clusterRerouteRequest.isRetryFailed())); clusterRerouteRequest.explain(request.paramAsBoolean("explain", clusterRerouteRequest.explain())); clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout())); clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout())); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java index dbda83709ba..55ed1d8dda4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java @@ -78,7 +78,6 @@ public class RestIndicesStatsAction extends BaseRestHandler { indicesStatsRequest.flush(metrics.contains("flush")); indicesStatsRequest.warmer(metrics.contains("warmer")); indicesStatsRequest.queryCache(metrics.contains("query_cache")); - indicesStatsRequest.percolate(metrics.contains("percolator_cache")); indicesStatsRequest.segments(metrics.contains("segments")); indicesStatsRequest.fieldData(metrics.contains("fielddata")); indicesStatsRequest.completion(metrics.contains("completion")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 958fa40b54b..e266efa4357 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -222,9 +222,6 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell("merges.total_time", "sibling:pri;alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); table.addCell("pri.merges.total_time", "default:false;text-align:right;desc:time spent in merges"); - table.addCell("percolate.queries", "sibling:pri;alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("pri.percolate.queries", "default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("refresh.total", "sibling:pri;alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("pri.refresh.total", "default:false;text-align:right;desc:total refreshes"); @@ -424,9 +421,6 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getTotalTime()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getTotalTime()); - table.addCell(indexStats == null ? null : indexStats.getTotal().getPercolatorCache().getNumQueries()); - table.addCell(indexStats == null ? null : indexStats.getPrimaries().getPercolatorCache().getNumQueries()); - table.addCell(indexStats == null ? null : indexStats.getTotal().getRefresh().getTotal()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRefresh().getTotal()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 15656733baf..f23f0e0a029 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.PercolatorQueryCacheStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.shard.IndexingStats; @@ -185,8 +184,6 @@ public class RestNodesAction extends AbstractCatAction { table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged"); table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); - table.addCell("percolate.queries", "alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); @@ -338,9 +335,6 @@ public class RestNodesAction extends AbstractCatAction { table.addCell(mergeStats == null ? null : mergeStats.getTotalSize()); table.addCell(mergeStats == null ? null : mergeStats.getTotalTime()); - PercolatorQueryCacheStats percolatorQueryCacheStats = indicesStats == null ? null : indicesStats.getPercolate(); - table.addCell(percolatorQueryCacheStats == null ? null : percolatorQueryCacheStats.getNumQueries()); - RefreshStats refreshStats = indicesStats == null ? null : indicesStats.getRefresh(); table.addCell(refreshStats == null ? null : refreshStats.getTotal()); table.addCell(refreshStats == null ? null : refreshStats.getTotalTime()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 9954bd3098b..6538b405fce 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -139,8 +139,6 @@ public class RestShardsAction extends AbstractCatAction { table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged"); table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); - table.addCell("percolate.queries", "alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); @@ -278,8 +276,6 @@ public class RestShardsAction extends AbstractCatAction { table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalSize()); table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalTime()); - table.addCell(commonStats == null ? null : commonStats.getPercolatorCache().getNumQueries()); - table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotal()); table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotalTime()); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java b/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java index 226fd7b6c34..31ff9ac0b61 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java @@ -30,13 +30,13 @@ public class ScriptEngineRegistry { private final Map, String> registeredScriptEngineServices; private final Map> registeredLanguages; - private final Map defaultInlineScriptModes; + private final Map defaultInlineScriptEnableds; public ScriptEngineRegistry(Iterable registrations) { Objects.requireNonNull(registrations); Map, String> registeredScriptEngineServices = new HashMap<>(); Map> registeredLanguages = new HashMap<>(); - Map inlineScriptModes = new HashMap<>(); + Map inlineScriptEnableds = new HashMap<>(); for (ScriptEngineRegistration registration : registrations) { String oldLanguage = registeredScriptEngineServices.putIfAbsent(registration.getScriptEngineService(), registration.getScriptEngineLanguage()); @@ -51,12 +51,12 @@ public class ScriptEngineRegistry { throw new IllegalArgumentException("scripting language [" + language + "] already registered for script engine service [" + scriptEngineServiceClazz.getCanonicalName() + "]"); } - inlineScriptModes.put(language, registration.getDefaultInlineScriptMode()); + inlineScriptEnableds.put(language, registration.getDefaultInlineScriptEnabled()); } this.registeredScriptEngineServices = Collections.unmodifiableMap(registeredScriptEngineServices); this.registeredLanguages = Collections.unmodifiableMap(registeredLanguages); - this.defaultInlineScriptModes = Collections.unmodifiableMap(inlineScriptModes); + this.defaultInlineScriptEnableds = Collections.unmodifiableMap(inlineScriptEnableds); } Iterable> getRegisteredScriptEngineServices() { @@ -72,27 +72,27 @@ public class ScriptEngineRegistry { return registeredLanguages; } - Map getDefaultInlineScriptModes() { - return this.defaultInlineScriptModes; + Map getDefaultInlineScriptEnableds() { + return this.defaultInlineScriptEnableds; } public static class ScriptEngineRegistration { private final Class scriptEngineService; private final String scriptEngineLanguage; - private final ScriptMode defaultInlineScriptMode; + private final boolean defaultInlineScriptEnabled; /** * Register a script engine service with the default of inline scripts disabled */ public ScriptEngineRegistration(Class scriptEngineService, String scriptEngineLanguage) { - this(scriptEngineService, scriptEngineLanguage, ScriptMode.OFF); + this(scriptEngineService, scriptEngineLanguage, false); } /** * Register a script engine service with the given default mode for inline scripts */ public ScriptEngineRegistration(Class scriptEngineService, String scriptEngineLanguage, - ScriptMode defaultInlineScriptMode) { + boolean defaultInlineScriptEnabled) { Objects.requireNonNull(scriptEngineService); if (Strings.hasText(scriptEngineLanguage) == false) { throw new IllegalArgumentException("languages for script engine service [" + @@ -100,7 +100,7 @@ public class ScriptEngineRegistry { } this.scriptEngineService = scriptEngineService; this.scriptEngineLanguage = scriptEngineLanguage; - this.defaultInlineScriptMode = defaultInlineScriptMode; + this.defaultInlineScriptEnabled = defaultInlineScriptEnabled; } Class getScriptEngineService() { @@ -111,8 +111,8 @@ public class ScriptEngineRegistry { return scriptEngineLanguage; } - ScriptMode getDefaultInlineScriptMode() { - return defaultInlineScriptMode; + boolean getDefaultInlineScriptEnabled() { + return defaultInlineScriptEnabled; } } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptMode.java b/core/src/main/java/org/elasticsearch/script/ScriptMode.java deleted file mode 100644 index 6508d2f1015..00000000000 --- a/core/src/main/java/org/elasticsearch/script/ScriptMode.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script; - -import java.util.HashMap; -import java.util.Map; - -/** - * Mode for a specific script, used for script settings. - * Defines whether a certain script or category of scripts can be executed or not. - */ -public enum ScriptMode { - ON("true"), - OFF("false"); - - private final String mode; - - ScriptMode(String mode) { - this.mode = mode; - } - - private static final Map SCRIPT_MODES; - - static { - SCRIPT_MODES = new HashMap<>(); - for (ScriptMode scriptMode : ScriptMode.values()) { - SCRIPT_MODES.put(scriptMode.mode, scriptMode); - } - } - - static ScriptMode parse(String input) { - ScriptMode scriptMode = SCRIPT_MODES.get(input); - if (scriptMode == null) { - throw new IllegalArgumentException("script mode [" + input + "] not supported"); - } - return scriptMode; - } - - public String getMode() { - return mode; - } - - @Override - public String toString() { - return mode; - } -} diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModes.java b/core/src/main/java/org/elasticsearch/script/ScriptModes.java index d07a4073ba8..46ab2a44d21 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptModes.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptModes.java @@ -29,22 +29,22 @@ import java.util.Map; import java.util.TreeMap; /** - * Holds the {@link org.elasticsearch.script.ScriptMode}s for each of the different scripting languages available, - * each script source and each scripted operation. + * Holds the boolean indicating the enabled mode for each of the different scripting languages available, each script source and each + * scripted operation. */ public class ScriptModes { private static final String SCRIPT_SETTINGS_PREFIX = "script"; private static final String ENGINE_SETTINGS_PREFIX = "script.engine"; - final Map scriptModes; + final Map scriptEnabled; ScriptModes(ScriptSettings scriptSettings, Settings settings) { - HashMap scriptModes = new HashMap<>(); - for (Setting scriptModeSetting : scriptSettings.getScriptLanguageSettings()) { + HashMap scriptModes = new HashMap<>(); + for (Setting scriptModeSetting : scriptSettings.getScriptLanguageSettings()) { scriptModes.put(scriptModeSetting.getKey(), scriptModeSetting.get(settings)); } - this.scriptModes = Collections.unmodifiableMap(scriptModes); + this.scriptEnabled = Collections.unmodifiableMap(scriptModes); } /** @@ -54,14 +54,14 @@ public class ScriptModes { * @param lang the language that the script is written in * @param scriptType the type of the script * @param scriptContext the operation that requires the execution of the script - * @return whether scripts are on or off + * @return whether scripts are enabled (true) or disabled (false) */ - public ScriptMode getScriptMode(String lang, ScriptType scriptType, ScriptContext scriptContext) { - //native scripts are always on as they are static by definition + public boolean getScriptEnabled(String lang, ScriptType scriptType, ScriptContext scriptContext) { + //native scripts are always enabled as they are static by definition if (NativeScriptEngineService.NAME.equals(lang)) { - return ScriptMode.ON; + return true; } - ScriptMode scriptMode = scriptModes.get(getKey(lang, scriptType, scriptContext)); + Boolean scriptMode = scriptEnabled.get(getKey(lang, scriptType, scriptContext)); if (scriptMode == null) { throw new IllegalArgumentException("script mode not found for lang [" + lang + "], script_type [" + scriptType + "], operation [" + scriptContext.getKey() + "]"); } @@ -87,10 +87,10 @@ public class ScriptModes { @Override public String toString() { //order settings by key before printing them out, for readability - TreeMap scriptModesTreeMap = new TreeMap<>(); - scriptModesTreeMap.putAll(scriptModes); + TreeMap scriptModesTreeMap = new TreeMap<>(); + scriptModesTreeMap.putAll(scriptEnabled); StringBuilder stringBuilder = new StringBuilder(); - for (Map.Entry stringScriptModeEntry : scriptModesTreeMap.entrySet()) { + for (Map.Entry stringScriptModeEntry : scriptModesTreeMap.entrySet()) { stringBuilder.append(stringScriptModeEntry.getKey()).append(": ").append(stringScriptModeEntry.getValue()).append("\n"); } return stringBuilder.toString(); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModule.java b/core/src/main/java/org/elasticsearch/script/ScriptModule.java index 0fa4d9ea731..c08c31c9bf2 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.script.ScriptMode; import java.util.ArrayList; import java.util.HashMap; @@ -42,7 +41,7 @@ public class ScriptModule extends AbstractModule { { scriptEngineRegistrations.add(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, - NativeScriptEngineService.NAME, ScriptMode.ON)); + NativeScriptEngineService.NAME, true)); } private final Map> scripts = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index f77a0aae330..cf0ba1ca941 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -473,15 +473,7 @@ public class ScriptService extends AbstractComponent implements Closeable { if (scriptContextRegistry.isSupportedContext(scriptContext) == false) { throw new IllegalArgumentException("script context [" + scriptContext.getKey() + "] not supported"); } - ScriptMode mode = scriptModes.getScriptMode(lang, scriptType, scriptContext); - switch (mode) { - case ON: - return true; - case OFF: - return false; - default: - throw new IllegalArgumentException("script mode [" + mode + "] not supported"); - } + return scriptModes.getScriptEnabled(lang, scriptType, scriptContext); } public ScriptStats stats() { @@ -610,14 +602,14 @@ public class ScriptService extends AbstractComponent implements Closeable { */ public enum ScriptType { - INLINE(0, "inline", "inline", ScriptMode.OFF), - STORED(1, "id", "stored", ScriptMode.OFF), - FILE(2, "file", "file", ScriptMode.ON); + INLINE(0, "inline", "inline", false), + STORED(1, "id", "stored", false), + FILE(2, "file", "file", true); private final int val; private final ParseField parseField; private final String scriptType; - private final ScriptMode defaultScriptMode; + private final boolean defaultScriptEnabled; public static ScriptType readFrom(StreamInput in) throws IOException { int scriptTypeVal = in.readVInt(); @@ -638,19 +630,19 @@ public class ScriptService extends AbstractComponent implements Closeable { } } - ScriptType(int val, String name, String scriptType, ScriptMode defaultScriptMode) { + ScriptType(int val, String name, String scriptType, boolean defaultScriptEnabled) { this.val = val; this.parseField = new ParseField(name); this.scriptType = scriptType; - this.defaultScriptMode = defaultScriptMode; + this.defaultScriptEnabled = defaultScriptEnabled; } public ParseField getParseField() { return parseField; } - public ScriptMode getDefaultScriptMode() { - return defaultScriptMode; + public boolean getDefaultScriptEnabled() { + return defaultScriptEnabled; } public String getScriptType() { diff --git a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java index 433912e47e5..71013f28a06 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java @@ -37,29 +37,28 @@ public class ScriptSettings { public final static String DEFAULT_LANG = "groovy"; - private final static Map> SCRIPT_TYPE_SETTING_MAP; + private final static Map> SCRIPT_TYPE_SETTING_MAP; static { - Map> scriptTypeSettingMap = new HashMap<>(); + Map> scriptTypeSettingMap = new HashMap<>(); for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { - scriptTypeSettingMap.put(scriptType, new Setting<>( + scriptTypeSettingMap.put(scriptType, Setting.boolSetting( ScriptModes.sourceKey(scriptType), - scriptType.getDefaultScriptMode().getMode(), - ScriptMode::parse, + scriptType.getDefaultScriptEnabled(), Property.NodeScope)); } SCRIPT_TYPE_SETTING_MAP = Collections.unmodifiableMap(scriptTypeSettingMap); } - private final Map> scriptContextSettingMap; - private final List> scriptLanguageSettings; + private final Map> scriptContextSettingMap; + private final List> scriptLanguageSettings; private final Setting defaultScriptLanguageSetting; public ScriptSettings(ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry) { - Map> scriptContextSettingMap = contextSettings(scriptContextRegistry); + Map> scriptContextSettingMap = contextSettings(scriptContextRegistry); this.scriptContextSettingMap = Collections.unmodifiableMap(scriptContextSettingMap); - List> scriptLanguageSettings = languageSettings(SCRIPT_TYPE_SETTING_MAP, scriptContextSettingMap, scriptEngineRegistry, scriptContextRegistry); + List> scriptLanguageSettings = languageSettings(SCRIPT_TYPE_SETTING_MAP, scriptContextSettingMap, scriptEngineRegistry, scriptContextRegistry); this.scriptLanguageSettings = Collections.unmodifiableList(scriptLanguageSettings); this.defaultScriptLanguageSetting = new Setting<>("script.default_lang", DEFAULT_LANG, setting -> { @@ -70,24 +69,20 @@ public class ScriptSettings { }, Property.NodeScope); } - private static Map> contextSettings(ScriptContextRegistry scriptContextRegistry) { - Map> scriptContextSettingMap = new HashMap<>(); + private static Map> contextSettings(ScriptContextRegistry scriptContextRegistry) { + Map> scriptContextSettingMap = new HashMap<>(); for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { - scriptContextSettingMap.put(scriptContext, new Setting<>( - ScriptModes.operationKey(scriptContext), - ScriptMode.OFF.getMode(), - ScriptMode::parse, - Property.NodeScope - )); + scriptContextSettingMap.put(scriptContext, + Setting.boolSetting(ScriptModes.operationKey(scriptContext), false, Property.NodeScope)); } return scriptContextSettingMap; } - private static List> languageSettings(Map> scriptTypeSettingMap, - Map> scriptContextSettingMap, + private static List> languageSettings(Map> scriptTypeSettingMap, + Map> scriptContextSettingMap, ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry) { - final List> scriptModeSettings = new ArrayList<>(); + final List> scriptModeSettings = new ArrayList<>(); for (final Class scriptEngineService : scriptEngineRegistry.getRegisteredScriptEngineServices()) { if (scriptEngineService == NativeScriptEngineService.class) { @@ -97,17 +92,17 @@ public class ScriptSettings { final String language = scriptEngineRegistry.getLanguage(scriptEngineService); for (final ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { // Top level, like "script.engine.groovy.inline" - final ScriptMode defaultNonFileScriptMode = scriptEngineRegistry.getDefaultInlineScriptModes().get(language); - ScriptMode defaultLangAndType = defaultNonFileScriptMode; + final boolean defaultNonFileScriptMode = scriptEngineRegistry.getDefaultInlineScriptEnableds().get(language); + boolean defaultLangAndType = defaultNonFileScriptMode; // Files are treated differently because they are never default-deny if (ScriptService.ScriptType.FILE == scriptType) { - defaultLangAndType = ScriptService.ScriptType.FILE.getDefaultScriptMode(); + defaultLangAndType = ScriptService.ScriptType.FILE.getDefaultScriptEnabled(); } - final ScriptMode defaultIfNothingSet = defaultLangAndType; + final boolean defaultIfNothingSet = defaultLangAndType; // Setting for something like "script.engine.groovy.inline" - final Setting langAndTypeSetting = new Setting<>(ScriptModes.getGlobalKey(language, scriptType), - defaultLangAndType.toString(), ScriptMode::parse, Property.NodeScope); + final Setting langAndTypeSetting = Setting.boolSetting(ScriptModes.getGlobalKey(language, scriptType), + defaultLangAndType, Property.NodeScope); scriptModeSettings.add(langAndTypeSetting); for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { @@ -115,32 +110,31 @@ public class ScriptSettings { // A function that, given a setting, will return what the default should be. Since the fine-grained script settings // read from a bunch of different places this is implemented in this way. Function defaultSettingFn = settings -> { - final Setting globalOpSetting = scriptContextSettingMap.get(scriptContext); - final Setting globalTypeSetting = scriptTypeSettingMap.get(scriptType); - final Setting langAndTypeAndContextSetting = new Setting<>(langAndTypeAndContextName, - defaultIfNothingSet.toString(), ScriptMode::parse, Property.NodeScope); + final Setting globalOpSetting = scriptContextSettingMap.get(scriptContext); + final Setting globalTypeSetting = scriptTypeSettingMap.get(scriptType); + final Setting langAndTypeAndContextSetting = Setting.boolSetting(langAndTypeAndContextName, + defaultIfNothingSet, Property.NodeScope); // fallback logic for script mode settings if (langAndTypeAndContextSetting.exists(settings)) { // like: "script.engine.groovy.inline.aggs: true" - return langAndTypeAndContextSetting.get(settings).getMode(); + return langAndTypeAndContextSetting.get(settings).toString(); } else if (langAndTypeSetting.exists(settings)) { // like: "script.engine.groovy.inline: true" - return langAndTypeSetting.get(settings).getMode(); + return langAndTypeSetting.get(settings).toString(); } else if (globalOpSetting.exists(settings)) { // like: "script.aggs: true" - return globalOpSetting.get(settings).getMode(); + return globalOpSetting.get(settings).toString(); } else if (globalTypeSetting.exists(settings)) { // like: "script.inline: true" - return globalTypeSetting.get(settings).getMode(); + return globalTypeSetting.get(settings).toString(); } else { // Nothing is set! - return defaultIfNothingSet.getMode(); + return Boolean.toString(defaultIfNothingSet); } }; // The actual setting for finest grained script settings - Setting setting = new Setting<>(langAndTypeAndContextName, defaultSettingFn, - ScriptMode::parse, Property.NodeScope); + Setting setting = Setting.boolSetting(langAndTypeAndContextName, defaultSettingFn, Property.NodeScope); scriptModeSettings.add(setting); } } @@ -148,15 +142,15 @@ public class ScriptSettings { return scriptModeSettings; } - public Iterable> getScriptTypeSettings() { + public Iterable> getScriptTypeSettings() { return Collections.unmodifiableCollection(SCRIPT_TYPE_SETTING_MAP.values()); } - public Iterable> getScriptContextSettings() { + public Iterable> getScriptContextSettings() { return Collections.unmodifiableCollection(scriptContextSettingMap.values()); } - public Iterable> getScriptLanguageSettings() { + public Iterable> getScriptLanguageSettings() { return scriptLanguageSettings; } diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 513a5c88a37..e76597c256c 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.search.function.ScoreFunction; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase; @@ -97,51 +98,51 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.action.SearchTransportService; import org.elasticsearch.search.aggregations.AggregationPhase; import org.elasticsearch.search.aggregations.Aggregator; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorParsers; -import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.children.InternalChildren; -import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; -import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridParser; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramParser; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramParser; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing; -import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.missing.MissingParser; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNested; -import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.InternalRange; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.RangeParser; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeParser; import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRange; -import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser; import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistance; -import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.InternalBinaryRange; import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeParser; -import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedSamplerParser; import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.UnmappedSampler; import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsParser; import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; @@ -155,50 +156,50 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsParser; import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.avg.AvgParser; import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser; import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsParser; import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidParser; import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid; import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxParser; import org.elasticsearch.search.aggregations.metrics.min.InternalMin; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.min.MinParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser; import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks; import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles; import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks; import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles; import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.InternalStats; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.StatsParser; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsParser; import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats; import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.SumParser; import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -290,6 +291,8 @@ public class SearchModule extends AbstractModule { private final Settings settings; private final NamedWriteableRegistry namedWriteableRegistry; + public static final Setting INDICES_MAX_CLAUSE_COUNT_SETTING = Setting.intSetting("indices.query.bool.max_clause_count", + 1024, 1, Integer.MAX_VALUE, Setting.Property.NodeScope); // pkg private so tests can mock Class searchServiceImpl = SearchService.class; @@ -421,10 +424,10 @@ public class SearchModule extends AbstractModule { * @param aggregationName names by which the aggregation may be parsed. The first name is special because it is the name that the reader * is registered under. */ - public > void registerAggregation(Writeable.Reader reader, Aggregator.Parser aggregationParser, - ParseField aggregationName) { + public > void registerAggregation(Writeable.Reader reader, Aggregator.Parser aggregationParser, + ParseField aggregationName) { aggregationParserRegistry.register(aggregationParser, aggregationName); - namedWriteableRegistry.register(AggregatorBuilder.class, aggregationName.getPreferredName(), reader); + namedWriteableRegistry.register(AggregationBuilder.class, aggregationName.getPreferredName(), reader); } /** @@ -478,55 +481,57 @@ public class SearchModule extends AbstractModule { } protected void configureAggs() { - registerAggregation(AvgAggregatorBuilder::new, new AvgParser(), AvgAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(SumAggregatorBuilder::new, new SumParser(), SumAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(MinAggregatorBuilder::new, new MinParser(), MinAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(MaxAggregatorBuilder::new, new MaxParser(), MaxAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(StatsAggregatorBuilder::new, new StatsParser(), StatsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(ExtendedStatsAggregatorBuilder::new, new ExtendedStatsParser(), - ExtendedStatsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(ValueCountAggregatorBuilder::new, new ValueCountParser(), ValueCountAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(PercentilesAggregatorBuilder::new, new PercentilesParser(), - PercentilesAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(PercentileRanksAggregatorBuilder::new, new PercentileRanksParser(), - PercentileRanksAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(CardinalityAggregatorBuilder::new, new CardinalityParser(), - CardinalityAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(GlobalAggregatorBuilder::new, GlobalAggregatorBuilder::parse, GlobalAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(MissingAggregatorBuilder::new, new MissingParser(), MissingAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(FilterAggregatorBuilder::new, FilterAggregatorBuilder::parse, FilterAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(FiltersAggregatorBuilder::new, FiltersAggregatorBuilder::parse, - FiltersAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(SamplerAggregatorBuilder::new, SamplerAggregatorBuilder::parse, - SamplerAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(DiversifiedAggregatorBuilder::new, new DiversifiedSamplerParser(), - DiversifiedAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(TermsAggregatorBuilder::new, new TermsParser(), TermsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(SignificantTermsAggregatorBuilder::new, + registerAggregation(AvgAggregationBuilder::new, new AvgParser(), AvgAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(SumAggregationBuilder::new, new SumParser(), SumAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(MinAggregationBuilder::new, new MinParser(), MinAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(MaxAggregationBuilder::new, new MaxParser(), MaxAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(StatsAggregationBuilder::new, new StatsParser(), StatsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(ExtendedStatsAggregationBuilder::new, new ExtendedStatsParser(), + ExtendedStatsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(ValueCountAggregationBuilder::new, new ValueCountParser(), ValueCountAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(PercentilesAggregationBuilder::new, new PercentilesParser(), + PercentilesAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(PercentileRanksAggregationBuilder::new, new PercentileRanksParser(), + PercentileRanksAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(CardinalityAggregationBuilder::new, new CardinalityParser(), + CardinalityAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(GlobalAggregationBuilder::new, GlobalAggregationBuilder::parse, + GlobalAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(MissingAggregationBuilder::new, new MissingParser(), MissingAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(FilterAggregationBuilder::new, FilterAggregationBuilder::parse, + FilterAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(FiltersAggregationBuilder::new, FiltersAggregationBuilder::parse, + FiltersAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(SamplerAggregationBuilder::new, SamplerAggregationBuilder::parse, + SamplerAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(DiversifiedAggregationBuilder::new, new DiversifiedSamplerParser(), + DiversifiedAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(TermsAggregationBuilder::new, new TermsParser(), TermsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(SignificantTermsAggregationBuilder::new, new SignificantTermsParser(significanceHeuristicParserRegistry, queryParserRegistry), - SignificantTermsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(RangeAggregatorBuilder::new, new RangeParser(), RangeAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(DateRangeAggregatorBuilder::new, new DateRangeParser(), DateRangeAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(IpRangeAggregatorBuilder::new, new IpRangeParser(), IpRangeAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(HistogramAggregatorBuilder::new, new HistogramParser(), HistogramAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(DateHistogramAggregatorBuilder::new, new DateHistogramParser(), - DateHistogramAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(GeoDistanceAggregatorBuilder::new, new GeoDistanceParser(), - GeoDistanceAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(GeoGridAggregatorBuilder::new, new GeoHashGridParser(), GeoGridAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(NestedAggregatorBuilder::new, NestedAggregatorBuilder::parse, NestedAggregatorBuilder.AGGREGATION_FIELD_NAME); - registerAggregation(ReverseNestedAggregatorBuilder::new, ReverseNestedAggregatorBuilder::parse, - ReverseNestedAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(TopHitsAggregatorBuilder::new, TopHitsAggregatorBuilder::parse, - TopHitsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(GeoBoundsAggregatorBuilder::new, new GeoBoundsParser(), GeoBoundsAggregatorBuilder.AGGREGATION_NAME_FIED); - registerAggregation(GeoCentroidAggregatorBuilder::new, new GeoCentroidParser(), - GeoCentroidAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(ScriptedMetricAggregatorBuilder::new, ScriptedMetricAggregatorBuilder::parse, - ScriptedMetricAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(ChildrenAggregatorBuilder::new, ChildrenAggregatorBuilder::parse, - ChildrenAggregatorBuilder.AGGREGATION_NAME_FIELD); - + SignificantTermsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(RangeAggregationBuilder::new, new RangeParser(), RangeAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(DateRangeAggregationBuilder::new, new DateRangeParser(), DateRangeAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(IpRangeAggregationBuilder::new, new IpRangeParser(), IpRangeAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(HistogramAggregationBuilder::new, new HistogramParser(), HistogramAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(DateHistogramAggregationBuilder::new, new DateHistogramParser(), + DateHistogramAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(GeoDistanceAggregationBuilder::new, new GeoDistanceParser(), + GeoDistanceAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(GeoGridAggregationBuilder::new, new GeoHashGridParser(), GeoGridAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(NestedAggregationBuilder::new, NestedAggregationBuilder::parse, + NestedAggregationBuilder.AGGREGATION_FIELD_NAME); + registerAggregation(ReverseNestedAggregationBuilder::new, ReverseNestedAggregationBuilder::parse, + ReverseNestedAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(TopHitsAggregationBuilder::new, TopHitsAggregationBuilder::parse, + TopHitsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(GeoBoundsAggregationBuilder::new, new GeoBoundsParser(), GeoBoundsAggregationBuilder.AGGREGATION_NAME_FIED); + registerAggregation(GeoCentroidAggregationBuilder::new, new GeoCentroidParser(), + GeoCentroidAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(ScriptedMetricAggregationBuilder::new, ScriptedMetricAggregationBuilder::parse, + ScriptedMetricAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(ChildrenAggregationBuilder::new, ChildrenAggregationBuilder::parse, + ChildrenAggregationBuilder.AGGREGATION_NAME_FIELD); registerPipelineAggregation(DerivativePipelineAggregatorBuilder::new, DerivativePipelineAggregatorBuilder::parse, DerivativePipelineAggregatorBuilder.AGGREGATION_NAME_FIELD); registerPipelineAggregation(MaxBucketPipelineAggregatorBuilder::new, MaxBucketPipelineAggregatorBuilder.PARSER, @@ -650,8 +655,7 @@ public class SearchModule extends AbstractModule { registerQuery(MatchAllQueryBuilder::new, MatchAllQueryBuilder::fromXContent, MatchAllQueryBuilder.QUERY_NAME_FIELD); registerQuery(QueryStringQueryBuilder::new, QueryStringQueryBuilder::fromXContent, QueryStringQueryBuilder.QUERY_NAME_FIELD); registerQuery(BoostingQueryBuilder::new, BoostingQueryBuilder::fromXContent, BoostingQueryBuilder.QUERY_NAME_FIELD); - BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count", - settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount()))); + BooleanQuery.setMaxClauseCount(INDICES_MAX_CLAUSE_COUNT_SETTING.get(settings)); registerQuery(BoolQueryBuilder::new, BoolQueryBuilder::fromXContent, BoolQueryBuilder.QUERY_NAME_FIELD); registerQuery(TermQueryBuilder::new, TermQueryBuilder::fromXContent, TermQueryBuilder.QUERY_NAME_FIELD); registerQuery(TermsQueryBuilder::new, TermsQueryBuilder::fromXContent, TermsQueryBuilder.QUERY_NAME_FIELD); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java similarity index 93% rename from core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index 159ab5a8a10..e0336247c75 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -36,7 +36,9 @@ import java.util.Objects; /** * A factory that knows how to create an {@link Aggregator} of a specific type. */ -public abstract class AggregatorBuilder> extends ToXContentToBytes implements NamedWriteable, ToXContent { +public abstract class AggregationBuilder> + extends ToXContentToBytes + implements NamedWriteable, ToXContent { protected String name; protected Type type; @@ -44,12 +46,12 @@ public abstract class AggregatorBuilder> extend protected Map metaData; /** - * Constructs a new aggregator factory. + * Constructs a new aggregation builder. * * @param name The aggregation name * @param type The aggregation type */ - public AggregatorBuilder(String name, Type type) { + public AggregationBuilder(String name, Type type) { if (name == null) { throw new IllegalArgumentException("[name] must not be null: [" + name + "]"); } @@ -63,7 +65,7 @@ public abstract class AggregatorBuilder> extend /** * Read from a stream. */ - protected AggregatorBuilder(StreamInput in, Type type) throws IOException { + protected AggregationBuilder(StreamInput in, Type type) throws IOException { name = in.readString(); this.type = type; factoriesBuilder = new AggregatorFactories.Builder(in); @@ -84,7 +86,7 @@ public abstract class AggregatorBuilder> extend * Add a sub aggregation to this aggregation. */ @SuppressWarnings("unchecked") - public AB subAggregation(AggregatorBuilder aggregation) { + public AB subAggregation(AggregationBuilder aggregation) { if (aggregation == null) { throw new IllegalArgumentException("[aggregation] must not be null: [" + name + "]"); } @@ -178,7 +180,7 @@ public abstract class AggregatorBuilder> extend if (getClass() != obj.getClass()) return false; @SuppressWarnings("unchecked") - AggregatorBuilder other = (AggregatorBuilder) obj; + AggregationBuilder other = (AggregationBuilder) obj; if (!Objects.equals(name, other.name)) return false; if (!Objects.equals(type, other.type)) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java index b76d5d17891..b1818971d6b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java @@ -22,65 +22,65 @@ import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.bucket.children.Children; -import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; -import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filters.Filters; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; -import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid; import org.elasticsearch.search.aggregations.bucket.global.Global; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.missing.Missing; -import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.nested.Nested; -import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested; -import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.Range; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.min.Min; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; /** * Utility class to create aggregations. @@ -93,234 +93,234 @@ public class AggregationBuilders { /** * Create a new {@link ValueCount} aggregation with the given name. */ - public static ValueCountAggregatorBuilder count(String name) { - return new ValueCountAggregatorBuilder(name, null); + public static ValueCountAggregationBuilder count(String name) { + return new ValueCountAggregationBuilder(name, null); } /** * Create a new {@link Avg} aggregation with the given name. */ - public static AvgAggregatorBuilder avg(String name) { - return new AvgAggregatorBuilder(name); + public static AvgAggregationBuilder avg(String name) { + return new AvgAggregationBuilder(name); } /** * Create a new {@link Max} aggregation with the given name. */ - public static MaxAggregatorBuilder max(String name) { - return new MaxAggregatorBuilder(name); + public static MaxAggregationBuilder max(String name) { + return new MaxAggregationBuilder(name); } /** * Create a new {@link Min} aggregation with the given name. */ - public static MinAggregatorBuilder min(String name) { - return new MinAggregatorBuilder(name); + public static MinAggregationBuilder min(String name) { + return new MinAggregationBuilder(name); } /** * Create a new {@link Sum} aggregation with the given name. */ - public static SumAggregatorBuilder sum(String name) { - return new SumAggregatorBuilder(name); + public static SumAggregationBuilder sum(String name) { + return new SumAggregationBuilder(name); } /** * Create a new {@link Stats} aggregation with the given name. */ - public static StatsAggregatorBuilder stats(String name) { - return new StatsAggregatorBuilder(name); + public static StatsAggregationBuilder stats(String name) { + return new StatsAggregationBuilder(name); } /** * Create a new {@link ExtendedStats} aggregation with the given name. */ - public static ExtendedStatsAggregatorBuilder extendedStats(String name) { - return new ExtendedStatsAggregatorBuilder(name); + public static ExtendedStatsAggregationBuilder extendedStats(String name) { + return new ExtendedStatsAggregationBuilder(name); } /** * Create a new {@link Filter} aggregation with the given name. */ - public static FilterAggregatorBuilder filter(String name, QueryBuilder filter) { - return new FilterAggregatorBuilder(name, filter); + public static FilterAggregationBuilder filter(String name, QueryBuilder filter) { + return new FilterAggregationBuilder(name, filter); } /** * Create a new {@link Filters} aggregation with the given name. */ - public static FiltersAggregatorBuilder filters(String name, KeyedFilter... filters) { - return new FiltersAggregatorBuilder(name, filters); + public static FiltersAggregationBuilder filters(String name, KeyedFilter... filters) { + return new FiltersAggregationBuilder(name, filters); } /** * Create a new {@link Filters} aggregation with the given name. */ - public static FiltersAggregatorBuilder filters(String name, QueryBuilder... filters) { - return new FiltersAggregatorBuilder(name, filters); + public static FiltersAggregationBuilder filters(String name, QueryBuilder... filters) { + return new FiltersAggregationBuilder(name, filters); } /** * Create a new {@link Sampler} aggregation with the given name. */ - public static SamplerAggregatorBuilder sampler(String name) { - return new SamplerAggregatorBuilder(name); + public static SamplerAggregationBuilder sampler(String name) { + return new SamplerAggregationBuilder(name); } /** * Create a new {@link Sampler} aggregation with the given name. */ - public static DiversifiedAggregatorBuilder diversifiedSampler(String name) { - return new DiversifiedAggregatorBuilder(name); + public static DiversifiedAggregationBuilder diversifiedSampler(String name) { + return new DiversifiedAggregationBuilder(name); } /** * Create a new {@link Global} aggregation with the given name. */ - public static GlobalAggregatorBuilder global(String name) { - return new GlobalAggregatorBuilder(name); + public static GlobalAggregationBuilder global(String name) { + return new GlobalAggregationBuilder(name); } /** * Create a new {@link Missing} aggregation with the given name. */ - public static MissingAggregatorBuilder missing(String name) { - return new MissingAggregatorBuilder(name, null); + public static MissingAggregationBuilder missing(String name) { + return new MissingAggregationBuilder(name, null); } /** * Create a new {@link Nested} aggregation with the given name. */ - public static NestedAggregatorBuilder nested(String name, String path) { - return new NestedAggregatorBuilder(name, path); + public static NestedAggregationBuilder nested(String name, String path) { + return new NestedAggregationBuilder(name, path); } /** * Create a new {@link ReverseNested} aggregation with the given name. */ - public static ReverseNestedAggregatorBuilder reverseNested(String name) { - return new ReverseNestedAggregatorBuilder(name); + public static ReverseNestedAggregationBuilder reverseNested(String name) { + return new ReverseNestedAggregationBuilder(name); } /** * Create a new {@link Children} aggregation with the given name. */ - public static ChildrenAggregatorBuilder children(String name, String childType) { - return new ChildrenAggregatorBuilder(name, childType); + public static ChildrenAggregationBuilder children(String name, String childType) { + return new ChildrenAggregationBuilder(name, childType); } /** * Create a new {@link GeoDistance} aggregation with the given name. */ - public static GeoDistanceAggregatorBuilder geoDistance(String name, GeoPoint origin) { - return new GeoDistanceAggregatorBuilder(name, origin); + public static GeoDistanceAggregationBuilder geoDistance(String name, GeoPoint origin) { + return new GeoDistanceAggregationBuilder(name, origin); } /** * Create a new {@link Histogram} aggregation with the given name. */ - public static HistogramAggregatorBuilder histogram(String name) { - return new HistogramAggregatorBuilder(name); + public static HistogramAggregationBuilder histogram(String name) { + return new HistogramAggregationBuilder(name); } /** * Create a new {@link GeoHashGrid} aggregation with the given name. */ - public static GeoGridAggregatorBuilder geohashGrid(String name) { - return new GeoGridAggregatorBuilder(name); + public static GeoGridAggregationBuilder geohashGrid(String name) { + return new GeoGridAggregationBuilder(name); } /** * Create a new {@link SignificantTerms} aggregation with the given name. */ - public static SignificantTermsAggregatorBuilder significantTerms(String name) { - return new SignificantTermsAggregatorBuilder(name, null); + public static SignificantTermsAggregationBuilder significantTerms(String name) { + return new SignificantTermsAggregationBuilder(name, null); } /** - * Create a new {@link DateHistogramAggregatorBuilder} aggregation with the given + * Create a new {@link DateHistogramAggregationBuilder} aggregation with the given * name. */ - public static DateHistogramAggregatorBuilder dateHistogram(String name) { - return new DateHistogramAggregatorBuilder(name); + public static DateHistogramAggregationBuilder dateHistogram(String name) { + return new DateHistogramAggregationBuilder(name); } /** * Create a new {@link Range} aggregation with the given name. */ - public static RangeAggregatorBuilder range(String name) { - return new RangeAggregatorBuilder(name); + public static RangeAggregationBuilder range(String name) { + return new RangeAggregationBuilder(name); } /** - * Create a new {@link DateRangeAggregatorBuilder} aggregation with the + * Create a new {@link DateRangeAggregationBuilder} aggregation with the * given name. */ - public static DateRangeAggregatorBuilder dateRange(String name) { - return new DateRangeAggregatorBuilder(name); + public static DateRangeAggregationBuilder dateRange(String name) { + return new DateRangeAggregationBuilder(name); } /** - * Create a new {@link IpRangeAggregatorBuilder} aggregation with the + * Create a new {@link IpRangeAggregationBuilder} aggregation with the * given name. */ - public static IpRangeAggregatorBuilder ipRange(String name) { - return new IpRangeAggregatorBuilder(name); + public static IpRangeAggregationBuilder ipRange(String name) { + return new IpRangeAggregationBuilder(name); } /** * Create a new {@link Terms} aggregation with the given name. */ - public static TermsAggregatorBuilder terms(String name) { - return new TermsAggregatorBuilder(name, null); + public static TermsAggregationBuilder terms(String name) { + return new TermsAggregationBuilder(name, null); } /** * Create a new {@link Percentiles} aggregation with the given name. */ - public static PercentilesAggregatorBuilder percentiles(String name) { - return new PercentilesAggregatorBuilder(name); + public static PercentilesAggregationBuilder percentiles(String name) { + return new PercentilesAggregationBuilder(name); } /** * Create a new {@link PercentileRanks} aggregation with the given name. */ - public static PercentileRanksAggregatorBuilder percentileRanks(String name) { - return new PercentileRanksAggregatorBuilder(name); + public static PercentileRanksAggregationBuilder percentileRanks(String name) { + return new PercentileRanksAggregationBuilder(name); } /** * Create a new {@link Cardinality} aggregation with the given name. */ - public static CardinalityAggregatorBuilder cardinality(String name) { - return new CardinalityAggregatorBuilder(name, null); + public static CardinalityAggregationBuilder cardinality(String name) { + return new CardinalityAggregationBuilder(name, null); } /** * Create a new {@link TopHits} aggregation with the given name. */ - public static TopHitsAggregatorBuilder topHits(String name) { - return new TopHitsAggregatorBuilder(name); + public static TopHitsAggregationBuilder topHits(String name) { + return new TopHitsAggregationBuilder(name); } /** * Create a new {@link GeoBounds} aggregation with the given name. */ - public static GeoBoundsAggregatorBuilder geoBounds(String name) { - return new GeoBoundsAggregatorBuilder(name); + public static GeoBoundsAggregationBuilder geoBounds(String name) { + return new GeoBoundsAggregationBuilder(name); } /** * Create a new {@link GeoCentroid} aggregation with the given name. */ - public static GeoCentroidAggregatorBuilder geoCentroid(String name) { - return new GeoCentroidAggregatorBuilder(name); + public static GeoCentroidAggregationBuilder geoCentroid(String name) { + return new GeoCentroidAggregationBuilder(name); } /** * Create a new {@link ScriptedMetric} aggregation with the given name. */ - public static ScriptedMetricAggregatorBuilder scriptedMetric(String name) { - return new ScriptedMetricAggregatorBuilder(name); + public static ScriptedMetricAggregationBuilder scriptedMetric(String name) { + return new ScriptedMetricAggregationBuilder(name); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java index 426f148e38e..faceada6415 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java @@ -42,7 +42,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable { /** * Parses the aggregation request and creates the appropriate aggregator factory for it. * - * @see AggregatorBuilder + * @see AggregationBuilder */ @FunctionalInterface public interface Parser { @@ -55,7 +55,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable { * @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped * @throws java.io.IOException When parsing fails */ - AggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException; + AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException; } /** diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index f1236cd5cce..4e07ffcc4d8 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -124,7 +124,7 @@ public class AggregatorFactories { public static class Builder extends ToXContentToBytes implements Writeable { private final Set names = new HashSet<>(); - private final List> aggregatorBuilders = new ArrayList<>(); + private final List> aggregationBuilders = new ArrayList<>(); private final List> pipelineAggregatorBuilders = new ArrayList<>(); private boolean skipResolveOrder; @@ -140,7 +140,7 @@ public class AggregatorFactories { public Builder(StreamInput in) throws IOException { int factoriesSize = in.readVInt(); for (int i = 0; i < factoriesSize; i++) { - addAggregator(in.readNamedWriteable(AggregatorBuilder.class)); + addAggregator(in.readNamedWriteable(AggregationBuilder.class)); } int pipelineFactoriesSize = in.readVInt(); for (int i = 0; i < pipelineFactoriesSize; i++) { @@ -150,8 +150,8 @@ public class AggregatorFactories { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(this.aggregatorBuilders.size()); - for (AggregatorBuilder factory : aggregatorBuilders) { + out.writeVInt(this.aggregationBuilders.size()); + for (AggregationBuilder factory : aggregationBuilders) { out.writeNamedWriteable(factory); } out.writeVInt(this.pipelineAggregatorBuilders.size()); @@ -164,11 +164,11 @@ public class AggregatorFactories { throw new UnsupportedOperationException("This needs to be removed"); } - public Builder addAggregator(AggregatorBuilder factory) { + public Builder addAggregator(AggregationBuilder factory) { if (!names.add(factory.name)) { throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]"); } - aggregatorBuilders.add(factory); + aggregationBuilders.add(factory); return this; } @@ -186,30 +186,30 @@ public class AggregatorFactories { } public AggregatorFactories build(AggregationContext context, AggregatorFactory parent) throws IOException { - if (aggregatorBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) { + if (aggregationBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) { return EMPTY; } List> orderedpipelineAggregators = null; if (skipResolveOrder) { orderedpipelineAggregators = new ArrayList<>(pipelineAggregatorBuilders); } else { - orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregatorBuilders); + orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregationBuilders); } - AggregatorFactory[] aggFactories = new AggregatorFactory[aggregatorBuilders.size()]; - for (int i = 0; i < aggregatorBuilders.size(); i++) { - aggFactories[i] = aggregatorBuilders.get(i).build(context, parent); + AggregatorFactory[] aggFactories = new AggregatorFactory[aggregationBuilders.size()]; + for (int i = 0; i < aggregationBuilders.size(); i++) { + aggFactories[i] = aggregationBuilders.get(i).build(context, parent); } return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators); } private List> resolvePipelineAggregatorOrder( - List> pipelineAggregatorBuilders, List> aggBuilders) { + List> pipelineAggregatorBuilders, List> aggBuilders) { Map> pipelineAggregatorBuildersMap = new HashMap<>(); for (PipelineAggregatorBuilder builder : pipelineAggregatorBuilders) { pipelineAggregatorBuildersMap.put(builder.getName(), builder); } - Map> aggBuildersMap = new HashMap<>(); - for (AggregatorBuilder aggBuilder : aggBuilders) { + Map> aggBuildersMap = new HashMap<>(); + for (AggregationBuilder aggBuilder : aggBuilders) { aggBuildersMap.put(aggBuilder.name, aggBuilder); } List> orderedPipelineAggregatorrs = new LinkedList<>(); @@ -223,7 +223,7 @@ public class AggregatorFactories { return orderedPipelineAggregatorrs; } - private void resolvePipelineAggregatorOrder(Map> aggBuildersMap, + private void resolvePipelineAggregatorOrder(Map> aggBuildersMap, Map> pipelineAggregatorBuildersMap, List> orderedPipelineAggregators, List> unmarkedBuilders, Set> temporarilyMarked, PipelineAggregatorBuilder builder) { @@ -238,7 +238,7 @@ public class AggregatorFactories { if (bucketsPath.equals("_count") || bucketsPath.equals("_key")) { continue; } else if (aggBuildersMap.containsKey(firstAggName)) { - AggregatorBuilder aggBuilder = aggBuildersMap.get(firstAggName); + AggregationBuilder aggBuilder = aggBuildersMap.get(firstAggName); for (int i = 1; i < bucketsPathElements.size(); i++) { PathElement pathElement = bucketsPathElements.get(i); String aggName = pathElement.name; @@ -247,9 +247,9 @@ public class AggregatorFactories { } else { // Check the non-pipeline sub-aggregator // factories - AggregatorBuilder[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories(); + AggregationBuilder[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories(); boolean foundSubBuilder = false; - for (AggregatorBuilder subBuilder : subBuilders) { + for (AggregationBuilder subBuilder : subBuilders) { if (aggName.equals(subBuilder.name)) { aggBuilder = subBuilder; foundSubBuilder = true; @@ -289,8 +289,8 @@ public class AggregatorFactories { } } - AggregatorBuilder[] getAggregatorFactories() { - return this.aggregatorBuilders.toArray(new AggregatorBuilder[this.aggregatorBuilders.size()]); + AggregationBuilder[] getAggregatorFactories() { + return this.aggregationBuilders.toArray(new AggregationBuilder[this.aggregationBuilders.size()]); } List> getPipelineAggregatorFactories() { @@ -298,14 +298,14 @@ public class AggregatorFactories { } public int count() { - return aggregatorBuilders.size() + pipelineAggregatorBuilders.size(); + return aggregationBuilders.size() + pipelineAggregatorBuilders.size(); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - if (aggregatorBuilders != null) { - for (AggregatorBuilder subAgg : aggregatorBuilders) { + if (aggregationBuilders != null) { + for (AggregationBuilder subAgg : aggregationBuilders) { subAgg.toXContent(builder, params); } } @@ -320,7 +320,7 @@ public class AggregatorFactories { @Override public int hashCode() { - return Objects.hash(aggregatorBuilders, pipelineAggregatorBuilders); + return Objects.hash(aggregationBuilders, pipelineAggregatorBuilders); } @Override @@ -330,7 +330,7 @@ public class AggregatorFactories { if (getClass() != obj.getClass()) return false; Builder other = (Builder) obj; - if (!Objects.equals(aggregatorBuilders, other.aggregatorBuilders)) + if (!Objects.equals(aggregationBuilders, other.aggregationBuilders)) return false; if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders)) return false; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java index 2fd1f63d620..55345d6e5ec 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java @@ -104,7 +104,7 @@ public class AggregatorParsers { + token + "], expected a [" + XContentParser.Token.START_OBJECT + "]."); } - AggregatorBuilder aggFactory = null; + AggregationBuilder aggFactory = null; PipelineAggregatorBuilder pipelineAggregatorFactory = null; AggregatorFactories.Builder subFactories = null; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregationBuilder.java similarity index 92% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregationBuilder.java index c5982d1000e..3749d2b2edd 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregationBuilder.java @@ -36,7 +36,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.ParentChild; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -44,7 +44,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalChildren.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -59,7 +59,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class FilterAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalFilter.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -51,7 +50,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder { +public class FiltersAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalFilters.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -62,11 +62,11 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder filters) { + private FiltersAggregationBuilder(String name, List filters) { super(name, InternalFilters.TYPE); // internally we want to have a fixed order of filters, regardless of the order of the filters in the request this.filters = new ArrayList<>(filters); @@ -80,7 +80,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder keyedFilters = new ArrayList<>(filters.length); for (int i = 0; i < filters.length; i++) { @@ -93,7 +93,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder { +public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalGeoHashGrid.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -53,14 +53,14 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder pipelineAggregators, Map metaData) throws IOException { super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java index ba4f84017c1..1b2c4c26372 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java @@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.NonCollectingAggregator; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder.CellIdSource; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder.CellIdSource; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java index 2ecf4953e78..1ae31e09ba0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java @@ -45,10 +45,10 @@ public class GeoHashGridParser extends GeoPointValuesSourceParser { } @Override - protected GeoGridAggregatorBuilder createFactory( + protected GeoGridAggregationBuilder createFactory( String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - GeoGridAggregatorBuilder factory = new GeoGridAggregatorBuilder(aggregationName); + GeoGridAggregationBuilder factory = new GeoGridAggregationBuilder(aggregationName); Integer precision = (Integer) otherOptions.get(GeoHashGridParams.FIELD_PRECISION); if (precision != null) { factory.precision(precision); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java similarity index 84% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java index 7a60dcdab93..0f7e0713598 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java @@ -24,25 +24,25 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; -public class GlobalAggregatorBuilder extends AggregatorBuilder { +public class GlobalAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalGlobal.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public GlobalAggregatorBuilder(String name) { + public GlobalAggregationBuilder(String name) { super(name, InternalGlobal.TYPE); } /** * Read from a stream. */ - public GlobalAggregatorBuilder(StreamInput in) throws IOException { + public GlobalAggregationBuilder(StreamInput in) throws IOException { super(in, InternalGlobal.TYPE); } @@ -64,9 +64,9 @@ public class GlobalAggregatorBuilder extends AggregatorBuilder> - extends ValuesSourceAggregatorBuilder { + extends ValuesSourceAggregationBuilder { protected long interval; protected long offset = 0; @@ -200,4 +200,4 @@ public abstract class AbstractHistogramBuilder { +public class DateHistogramAggregationBuilder extends AbstractHistogramBuilder { public static final String NAME = InternalDateHistogram.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private DateHistogramInterval dateHistogramInterval; - public DateHistogramAggregatorBuilder(String name) { + public DateHistogramAggregationBuilder(String name) { super(name, InternalDateHistogram.HISTOGRAM_FACTORY); } /** * Read from a stream. */ - public DateHistogramAggregatorBuilder(StreamInput in) throws IOException { + public DateHistogramAggregationBuilder(StreamInput in) throws IOException { super(in, InternalDateHistogram.HISTOGRAM_FACTORY); dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); } @@ -61,7 +61,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder otherOptions) { - DateHistogramAggregatorBuilder factory = new DateHistogramAggregatorBuilder(aggregationName); + protected DateHistogramAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder(aggregationName); Object interval = otherOptions.get(Rounding.Interval.INTERVAL_FIELD); if (interval == null) { throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]"); @@ -89,6 +89,6 @@ public class DateHistogramParser extends HistogramParser { @Override protected long parseStringOffset(String offset) throws IOException { - return DateHistogramAggregatorBuilder.parseStringOffset(offset); + return DateHistogramAggregationBuilder.parseStringOffset(offset); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java similarity index 90% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java index 49bbd7160cf..54d52466bbb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java @@ -29,18 +29,18 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import java.io.IOException; -public class HistogramAggregatorBuilder extends AbstractHistogramBuilder { +public class HistogramAggregationBuilder extends AbstractHistogramBuilder { public static final String NAME = InternalHistogram.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public HistogramAggregatorBuilder(String name) { + public HistogramAggregationBuilder(String name) { super(name, InternalHistogram.HISTOGRAM_FACTORY); } /** * Read from a stream. */ - public HistogramAggregatorBuilder(StreamInput in) throws IOException { + public HistogramAggregationBuilder(StreamInput in) throws IOException { super(in, InternalHistogram.HISTOGRAM_FACTORY); } @@ -55,4 +55,4 @@ public class HistogramAggregatorBuilder extends AbstractHistogramBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - HistogramAggregatorBuilder factory = new HistogramAggregatorBuilder(aggregationName); + HistogramAggregationBuilder factory = new HistogramAggregationBuilder(aggregationName); Long interval = (Long) otherOptions.get(Rounding.Interval.INTERVAL_FIELD); if (interval == null) { throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]"); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java index 34263980bf4..f10f7683841 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java @@ -28,25 +28,25 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class MissingAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalMissing.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public MissingAggregatorBuilder(String name, ValueType targetValueType) { + public MissingAggregationBuilder(String name, ValueType targetValueType) { super(name, InternalMissing.TYPE, ValuesSourceType.ANY, targetValueType); } /** * Read from a stream. */ - public MissingAggregatorBuilder(StreamInput in) throws IOException { + public MissingAggregationBuilder(StreamInput in) throws IOException { super(in, InternalMissing.TYPE, ValuesSourceType.ANY); } @@ -85,4 +85,4 @@ public class MissingAggregatorBuilder extends ValuesSourceAggregatorBuilder otherOptions) { - return new MissingAggregatorBuilder(aggregationName, targetValueType); + protected MissingAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new MissingAggregationBuilder(aggregationName, targetValueType); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java similarity index 89% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java index f01a78e9211..33771910f16 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -34,7 +34,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; import java.util.Objects; -public class NestedAggregatorBuilder extends AggregatorBuilder { +public class NestedAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalNested.TYPE.name(); public static final ParseField AGGREGATION_FIELD_NAME = new ParseField(NAME); @@ -47,7 +47,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder { +public class ReverseNestedAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalReverseNested.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private String path; - public ReverseNestedAggregatorBuilder(String name) { + public ReverseNestedAggregationBuilder(String name) { super(name, InternalReverseNested.TYPE); } /** * Read from a stream. */ - public ReverseNestedAggregatorBuilder(StreamInput in) throws IOException { + public ReverseNestedAggregationBuilder(StreamInput in) throws IOException { super(in, InternalReverseNested.TYPE); path = in.readOptionalString(); } @@ -62,7 +62,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder, R extends Range> - extends ValuesSourceAggregatorBuilder { + extends ValuesSourceAggregationBuilder { protected final InternalRange.Factory rangeFactory; protected List ranges = new ArrayList<>(); @@ -103,4 +103,4 @@ public abstract class AbstractRangeBuilder { +public class RangeAggregationBuilder extends AbstractRangeBuilder { public static final String NAME = InternalRange.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public RangeAggregatorBuilder(String name) { + public RangeAggregationBuilder(String name) { super(name, InternalRange.FACTORY); } /** * Read from a stream. */ - public RangeAggregatorBuilder(StreamInput in) throws IOException { + public RangeAggregationBuilder(StreamInput in) throws IOException { super(in, InternalRange.FACTORY, Range::new); } @@ -55,7 +55,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilderfrom and * to. */ - public RangeAggregatorBuilder addRange(double from, double to) { + public RangeAggregationBuilder addRange(double from, double to) { return addRange(null, from, to); } @@ -77,7 +77,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - RangeAggregatorBuilder factory = new RangeAggregatorBuilder(aggregationName); + RangeAggregationBuilder factory = new RangeAggregationBuilder(aggregationName); @SuppressWarnings("unchecked") List ranges = (List) otherOptions.get(RangeAggregator.RANGES_FIELD); for (Range range : ranges) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregationBuilder.java similarity index 81% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregationBuilder.java index 9c28461df6c..392744a4f1c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregationBuilder.java @@ -33,18 +33,18 @@ import org.joda.time.DateTime; import java.io.IOException; -public class DateRangeAggregatorBuilder extends AbstractRangeBuilder { +public class DateRangeAggregationBuilder extends AbstractRangeBuilder { public static final String NAME = InternalDateRange.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public DateRangeAggregatorBuilder(String name) { + public DateRangeAggregationBuilder(String name) { super(name, InternalDateRange.FACTORY); } /** * Read from a stream. */ - public DateRangeAggregatorBuilder(StreamInput in) throws IOException { + public DateRangeAggregationBuilder(StreamInput in) throws IOException { super(in, InternalDateRange.FACTORY, Range::new); } @@ -63,7 +63,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilderfrom and to. */ - public DateRangeAggregatorBuilder addRange(String from, String to) { + public DateRangeAggregationBuilder addRange(String from, String to) { return addRange(null, from, to); } @@ -84,7 +84,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilderfrom and to. */ - public DateRangeAggregatorBuilder addRange(double from, double to) { + public DateRangeAggregationBuilder addRange(double from, double to) { return addRange(null, from, to); } @@ -149,7 +149,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilderfrom and to. */ - public DateRangeAggregatorBuilder addRange(DateTime from, DateTime to) { + public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) { return addRange(null, from, to); } @@ -222,7 +222,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder otherOptions) { - DateRangeAggregatorBuilder factory = new DateRangeAggregatorBuilder(aggregationName); + protected DateRangeAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder(aggregationName); @SuppressWarnings("unchecked") List ranges = (List) otherOptions.get(RangeAggregator.RANGES_FIELD); for (Range range : ranges) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregationBuilder.java similarity index 85% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregationBuilder.java index a72b4fd322e..e82a769431a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregationBuilder.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; @@ -42,7 +42,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; -public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalGeoDistance.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -52,12 +52,12 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< private GeoDistance distanceType = GeoDistance.DEFAULT; private boolean keyed = false; - public GeoDistanceAggregatorBuilder(String name, GeoPoint origin) { + public GeoDistanceAggregationBuilder(String name, GeoPoint origin) { this(name, origin, InternalGeoDistance.FACTORY); } - private GeoDistanceAggregatorBuilder(String name, GeoPoint origin, - InternalRange.Factory rangeFactory) { + private GeoDistanceAggregationBuilder(String name, GeoPoint origin, + InternalRange.Factory rangeFactory) { super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType()); if (origin == null) { throw new IllegalArgumentException("[origin] must not be null: [" + name + "]"); @@ -68,7 +68,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< /** * Read from a stream. */ - public GeoDistanceAggregatorBuilder(StreamInput in) throws IOException { + public GeoDistanceAggregationBuilder(StreamInput in) throws IOException { super(in, InternalGeoDistance.FACTORY.type(), InternalGeoDistance.FACTORY.getValueSourceType(), InternalGeoDistance.FACTORY.getValueType()); origin = new GeoPoint(in.readDouble(), in.readDouble()); @@ -95,7 +95,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< unit.writeTo(out); } - public GeoDistanceAggregatorBuilder addRange(Range range) { + public GeoDistanceAggregationBuilder addRange(Range range) { if (range == null) { throw new IllegalArgumentException("[range] must not be null: [" + name + "]"); } @@ -113,7 +113,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * @param to * the upper bound on the distances, exclusive */ - public GeoDistanceAggregatorBuilder addRange(String key, double from, double to) { + public GeoDistanceAggregationBuilder addRange(String key, double from, double to) { ranges.add(new Range(key, from, to)); return this; } @@ -123,7 +123,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * automatically generated based on from and * to. */ - public GeoDistanceAggregatorBuilder addRange(double from, double to) { + public GeoDistanceAggregationBuilder addRange(double from, double to) { return addRange(null, from, to); } @@ -135,7 +135,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * @param to * the upper bound on the distances, exclusive */ - public GeoDistanceAggregatorBuilder addUnboundedTo(String key, double to) { + public GeoDistanceAggregationBuilder addUnboundedTo(String key, double to) { ranges.add(new Range(key, null, to)); return this; } @@ -144,7 +144,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * Same as {@link #addUnboundedTo(String, double)} but the key will be * computed automatically. */ - public GeoDistanceAggregatorBuilder addUnboundedTo(double to) { + public GeoDistanceAggregationBuilder addUnboundedTo(double to) { return addUnboundedTo(null, to); } @@ -156,7 +156,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * @param from * the lower bound on the distances, inclusive */ - public GeoDistanceAggregatorBuilder addUnboundedFrom(String key, double from) { + public GeoDistanceAggregationBuilder addUnboundedFrom(String key, double from) { addRange(new Range(key, from, null)); return this; } @@ -165,7 +165,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * Same as {@link #addUnboundedFrom(String, double)} but the key will be * computed automatically. */ - public GeoDistanceAggregatorBuilder addUnboundedFrom(double from) { + public GeoDistanceAggregationBuilder addUnboundedFrom(double from) { return addUnboundedFrom(null, from); } @@ -178,7 +178,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< return NAME; } - public GeoDistanceAggregatorBuilder unit(DistanceUnit unit) { + public GeoDistanceAggregationBuilder unit(DistanceUnit unit) { if (unit == null) { throw new IllegalArgumentException("[unit] must not be null: [" + name + "]"); } @@ -190,7 +190,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< return unit; } - public GeoDistanceAggregatorBuilder distanceType(GeoDistance distanceType) { + public GeoDistanceAggregationBuilder distanceType(GeoDistance distanceType) { if (distanceType == null) { throw new IllegalArgumentException("[distanceType] must not be null: [" + name + "]"); } @@ -202,7 +202,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< return distanceType; } - public GeoDistanceAggregatorBuilder keyed(boolean keyed) { + public GeoDistanceAggregationBuilder keyed(boolean keyed) { this.keyed = keyed; return this; } @@ -236,7 +236,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< @Override protected boolean innerEquals(Object obj) { - GeoDistanceAggregatorBuilder other = (GeoDistanceAggregatorBuilder) obj; + GeoDistanceAggregationBuilder other = (GeoDistanceAggregationBuilder) obj; return Objects.equals(origin, other.origin) && Objects.equals(ranges, other.ranges) && Objects.equals(keyed, other.keyed) @@ -244,4 +244,4 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< && Objects.equals(unit, other.unit); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java index ed6d6a67e2a..b98757aae5d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java @@ -85,10 +85,10 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser { } @Override - protected GeoDistanceAggregatorBuilder createFactory( + protected GeoDistanceAggregationBuilder createFactory( String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { GeoPoint origin = (GeoPoint) otherOptions.get(ORIGIN_FIELD); - GeoDistanceAggregatorBuilder factory = new GeoDistanceAggregatorBuilder(aggregationName, origin); + GeoDistanceAggregationBuilder factory = new GeoDistanceAggregationBuilder(aggregationName, origin); @SuppressWarnings("unchecked") List ranges = (List) otherOptions.get(RangeAggregator.RANGES_FIELD); for (Range range : ranges) { @@ -171,4 +171,4 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser { } return false; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregationBuilder.java similarity index 90% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregationBuilder.java index c56a2952f8d..243db5f75e3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregationBuilder.java @@ -44,14 +44,14 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; -public final class IpRangeAggregatorBuilder - extends ValuesSourceAggregatorBuilder { +public final class IpRangeAggregationBuilder + extends ValuesSourceAggregationBuilder { private static final String NAME = "ip_range"; public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private static final InternalAggregation.Type TYPE = new InternalAggregation.Type(NAME); @@ -163,7 +163,7 @@ public final class IpRangeAggregatorBuilder private boolean keyed = false; private List ranges = new ArrayList<>(); - public IpRangeAggregatorBuilder(String name) { + public IpRangeAggregationBuilder(String name) { super(name, TYPE, ValuesSourceType.BYTES, ValueType.IP); } @@ -172,7 +172,7 @@ public final class IpRangeAggregatorBuilder return NAME; } - public IpRangeAggregatorBuilder keyed(boolean keyed) { + public IpRangeAggregationBuilder keyed(boolean keyed) { this.keyed = keyed; return this; } @@ -187,7 +187,7 @@ public final class IpRangeAggregatorBuilder } /** Add a new {@link Range} to this aggregation. */ - public IpRangeAggregatorBuilder addRange(Range range) { + public IpRangeAggregationBuilder addRange(Range range) { ranges.add(range); return this; } @@ -202,7 +202,7 @@ public final class IpRangeAggregatorBuilder * @param to * the upper bound on the distances, exclusive */ - public IpRangeAggregatorBuilder addRange(String key, String from, String to) { + public IpRangeAggregationBuilder addRange(String key, String from, String to) { addRange(new Range(key, from, to)); return this; } @@ -210,7 +210,7 @@ public final class IpRangeAggregatorBuilder /** * Add a new range to this aggregation using the CIDR notation. */ - public IpRangeAggregatorBuilder addMaskRange(String key, String mask) { + public IpRangeAggregationBuilder addMaskRange(String key, String mask) { return addRange(new Range(key, mask)); } @@ -218,7 +218,7 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addMaskRange(String, String)} but uses the mask itself as * a key. */ - public IpRangeAggregatorBuilder addMaskRange(String mask) { + public IpRangeAggregationBuilder addMaskRange(String mask) { return addRange(new Range(mask, mask)); } @@ -226,7 +226,7 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addRange(String, String, String)} but the key will be * automatically generated. */ - public IpRangeAggregatorBuilder addRange(String from, String to) { + public IpRangeAggregationBuilder addRange(String from, String to) { return addRange(null, from, to); } @@ -234,7 +234,7 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addRange(String, String, String)} but there will be no * lower bound. */ - public IpRangeAggregatorBuilder addUnboundedTo(String key, String to) { + public IpRangeAggregationBuilder addUnboundedTo(String key, String to) { addRange(new Range(key, null, to)); return this; } @@ -243,7 +243,7 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addUnboundedTo(String, String)} but the key will be * generated automatically. */ - public IpRangeAggregatorBuilder addUnboundedTo(String to) { + public IpRangeAggregationBuilder addUnboundedTo(String to) { return addUnboundedTo(null, to); } @@ -251,13 +251,13 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addRange(String, String, String)} but there will be no * upper bound. */ - public IpRangeAggregatorBuilder addUnboundedFrom(String key, String from) { + public IpRangeAggregationBuilder addUnboundedFrom(String key, String from) { addRange(new Range(key, from, null)); return this; } @Override - public IpRangeAggregatorBuilder script(Script script) { + public IpRangeAggregationBuilder script(Script script) { throw new IllegalArgumentException("[ip_range] does not support scripts"); } @@ -265,11 +265,11 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addUnboundedFrom(String, String)} but the key will be * generated automatically. */ - public IpRangeAggregatorBuilder addUnboundedFrom(String from) { + public IpRangeAggregationBuilder addUnboundedFrom(String from) { return addUnboundedFrom(null, from); } - public IpRangeAggregatorBuilder(StreamInput in) throws IOException { + public IpRangeAggregationBuilder(StreamInput in) throws IOException { super(in, TYPE, ValuesSourceType.BYTES, ValueType.IP); final int numRanges = in.readVInt(); for (int i = 0; i < numRanges; ++i) { @@ -323,7 +323,7 @@ public final class IpRangeAggregatorBuilder @Override protected boolean innerEquals(Object obj) { - IpRangeAggregatorBuilder that = (IpRangeAggregatorBuilder) obj; + IpRangeAggregationBuilder that = (IpRangeAggregationBuilder) obj; return keyed == that.keyed && ranges.equals(that.ranges); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java index 64ed77d42f3..8445fb2d459 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java @@ -30,10 +30,10 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.BytesValuesSourceParser; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; -import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder.Range; +import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder.Range; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceType; /** @@ -48,10 +48,10 @@ public class IpRangeParser extends BytesValuesSourceParser { } @Override - protected ValuesSourceAggregatorBuilder createFactory( + protected ValuesSourceAggregationBuilder createFactory( String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - IpRangeAggregatorBuilder range = new IpRangeAggregatorBuilder(aggregationName); + IpRangeAggregationBuilder range = new IpRangeAggregationBuilder(aggregationName); @SuppressWarnings("unchecked") Iterable ranges = (Iterable) otherOptions.get(RangeAggregator.RANGES_FIELD); if (otherOptions.containsKey(RangeAggregator.RANGES_FIELD)) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java similarity index 88% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java index f0c923e6efc..804574eea10 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java @@ -28,7 +28,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -36,25 +36,25 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = "diversified_sampler"; public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); public static final Type TYPE = new Type(NAME); public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1; - private int shardSize = SamplerAggregatorBuilder.DEFAULT_SHARD_SAMPLE_SIZE; + private int shardSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE; private int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT; private String executionHint = null; - public DiversifiedAggregatorBuilder(String name) { + public DiversifiedAggregationBuilder(String name) { super(name, TYPE, ValuesSourceType.ANY, null); } /** * Read from a stream. */ - public DiversifiedAggregatorBuilder(StreamInput in) throws IOException { + public DiversifiedAggregationBuilder(StreamInput in) throws IOException { super(in, TYPE, ValuesSourceType.ANY, null); shardSize = in.readVInt(); maxDocsPerValue = in.readVInt(); @@ -71,7 +71,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< /** * Set the max num docs to be returned from each shard. */ - public DiversifiedAggregatorBuilder shardSize(int shardSize) { + public DiversifiedAggregationBuilder shardSize(int shardSize) { if (shardSize < 0) { throw new IllegalArgumentException( "[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]"); @@ -90,7 +90,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< /** * Set the max num docs to be returned per value. */ - public DiversifiedAggregatorBuilder maxDocsPerValue(int maxDocsPerValue) { + public DiversifiedAggregationBuilder maxDocsPerValue(int maxDocsPerValue) { if (maxDocsPerValue < 0) { throw new IllegalArgumentException( "[maxDocsPerValue] must be greater than or equal to 0. Found [" + maxDocsPerValue + "] in [" + name + "]"); @@ -109,7 +109,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< /** * Set the execution hint. */ - public DiversifiedAggregatorBuilder executionHint(String executionHint) { + public DiversifiedAggregationBuilder executionHint(String executionHint) { this.executionHint = executionHint; return this; } @@ -145,7 +145,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< @Override protected boolean innerEquals(Object obj) { - DiversifiedAggregatorBuilder other = (DiversifiedAggregatorBuilder) obj; + DiversifiedAggregationBuilder other = (DiversifiedAggregationBuilder) obj; return Objects.equals(shardSize, other.shardSize) && Objects.equals(maxDocsPerValue, other.maxDocsPerValue) && Objects.equals(executionHint, other.executionHint); @@ -155,4 +155,4 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java index cb87e53f2c0..f495071f6d2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java @@ -38,9 +38,9 @@ public class DiversifiedSamplerParser extends AnyValuesSourceParser { } @Override - protected DiversifiedAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - DiversifiedAggregatorBuilder factory = new DiversifiedAggregatorBuilder(aggregationName); + protected DiversifiedAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder(aggregationName); Integer shardSize = (Integer) otherOptions.get(SamplerAggregator.SHARD_SIZE_FIELD); if (shardSize != null) { factory.shardSize(shardSize); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java similarity index 87% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java index 2cc3bb4c303..1220a2ddd42 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -34,7 +34,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; import java.util.Objects; -public class SamplerAggregatorBuilder extends AggregatorBuilder { +public class SamplerAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalSampler.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -42,14 +42,14 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder { +public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = SignificantStringTerms.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -62,14 +62,14 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS); private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC; - public SignificantTermsAggregatorBuilder(String name, ValueType valueType) { + public SignificantTermsAggregationBuilder(String name, ValueType valueType) { super(name, SignificantStringTerms.TYPE, ValuesSourceType.ANY, valueType); } /** * Read from a Stream. */ - public SignificantTermsAggregatorBuilder(StreamInput in) throws IOException { + public SignificantTermsAggregationBuilder(StreamInput in) throws IOException { super(in, SignificantStringTerms.TYPE, ValuesSourceType.ANY); bucketCountThresholds = new BucketCountThresholds(in); executionHint = in.readOptionalString(); @@ -100,7 +100,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui return bucketCountThresholds; } - public SignificantTermsAggregatorBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { + public SignificantTermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { if (bucketCountThresholds == null) { throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]"); } @@ -112,7 +112,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui * Sets the size - indicating how many term buckets should be returned * (defaults to 10) */ - public SignificantTermsAggregatorBuilder size(int size) { + public SignificantTermsAggregationBuilder size(int size) { if (size < 0) { throw new IllegalArgumentException("[size] must be greater than or equal to 0. Found [" + size + "] in [" + name + "]"); } @@ -126,7 +126,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui * search execution). The higher the shard size is, the more accurate the * results are. */ - public SignificantTermsAggregatorBuilder shardSize(int shardSize) { + public SignificantTermsAggregationBuilder shardSize(int shardSize) { if (shardSize < 0) { throw new IllegalArgumentException( "[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]"); @@ -139,7 +139,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui * Set the minimum document count terms should have in order to appear in * the response. */ - public SignificantTermsAggregatorBuilder minDocCount(long minDocCount) { + public SignificantTermsAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw new IllegalArgumentException( "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]"); @@ -152,7 +152,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui * Set the minimum document count terms should have on the shard in order to * appear in the response. */ - public SignificantTermsAggregatorBuilder shardMinDocCount(long shardMinDocCount) { + public SignificantTermsAggregationBuilder shardMinDocCount(long shardMinDocCount) { if (shardMinDocCount < 0) { throw new IllegalArgumentException( "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]"); @@ -164,7 +164,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui /** * Expert: sets an execution hint to the aggregation. */ - public SignificantTermsAggregatorBuilder executionHint(String executionHint) { + public SignificantTermsAggregationBuilder executionHint(String executionHint) { this.executionHint = executionHint; return this; } @@ -176,7 +176,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui return executionHint; } - public SignificantTermsAggregatorBuilder backgroundFilter(QueryBuilder backgroundFilter) { + public SignificantTermsAggregationBuilder backgroundFilter(QueryBuilder backgroundFilter) { if (backgroundFilter == null) { throw new IllegalArgumentException("[backgroundFilter] must not be null: [" + name + "]"); } @@ -191,7 +191,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui /** * Set terms to include and exclude from the aggregation results */ - public SignificantTermsAggregatorBuilder includeExclude(IncludeExclude includeExclude) { + public SignificantTermsAggregationBuilder includeExclude(IncludeExclude includeExclude) { this.includeExclude = includeExclude; return this; } @@ -203,7 +203,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui return includeExclude; } - public SignificantTermsAggregatorBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) { + public SignificantTermsAggregationBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) { if (significanceHeuristic == null) { throw new IllegalArgumentException("[significanceHeuristic] must not be null: [" + name + "]"); } @@ -226,7 +226,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { bucketCountThresholds.toXContent(builder, params); if (executionHint != null) { - builder.field(TermsAggregatorBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint); + builder.field(TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint); } if (filterBuilder != null) { builder.field(BACKGROUND_FILTER.getPreferredName(), filterBuilder); @@ -245,7 +245,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui @Override protected boolean innerEquals(Object obj) { - SignificantTermsAggregatorBuilder other = (SignificantTermsAggregatorBuilder) obj; + SignificantTermsAggregationBuilder other = (SignificantTermsAggregationBuilder) obj; return Objects.equals(bucketCountThresholds, other.bucketCountThresholds) && Objects.equals(executionHint, other.executionHint) && Objects.equals(filterBuilder, other.filterBuilder) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 4b9e3acb873..ab30e1b2d4a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -178,7 +178,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac numberOfAggregatorsCreated++; BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds); - if (bucketCountThresholds.getShardSize() == SignificantTermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { + if (bucketCountThresholds.getShardSize() == SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { // The user has not made a shardSize selection . // Use default heuristic to avoid any wrong-ranking caused by // distributed counting diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java index 60805bea692..33db8f97335 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java @@ -53,10 +53,11 @@ public class SignificantTermsParser extends AbstractTermsParser { } @Override - protected SignificantTermsAggregatorBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint, - IncludeExclude incExc, Map otherOptions) { - SignificantTermsAggregatorBuilder factory = new SignificantTermsAggregatorBuilder(aggregationName, targetValueType); + protected SignificantTermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, BucketCountThresholds bucketCountThresholds, + SubAggCollectionMode collectMode, String executionHint, + IncludeExclude incExc, Map otherOptions) { + SignificantTermsAggregationBuilder factory = new SignificantTermsAggregationBuilder(aggregationName, targetValueType); if (bucketCountThresholds != null) { factory.bucketCountThresholds(bucketCountThresholds); } @@ -66,11 +67,12 @@ public class SignificantTermsParser extends AbstractTermsParser { if (incExc != null) { factory.includeExclude(incExc); } - QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER); + QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregationBuilder.BACKGROUND_FILTER); if (backgroundFilter != null) { factory.backgroundFilter(backgroundFilter); } - SignificanceHeuristic significanceHeuristic = (SignificanceHeuristic) otherOptions.get(SignificantTermsAggregatorBuilder.HEURISTIC); + SignificanceHeuristic significanceHeuristic = + (SignificanceHeuristic) otherOptions.get(SignificantTermsAggregationBuilder.HEURISTIC); if (significanceHeuristic != null) { factory.significanceHeuristic(significanceHeuristic); } @@ -85,12 +87,12 @@ public class SignificantTermsParser extends AbstractTermsParser { .lookupReturningNullIfNotFound(currentFieldName, parseFieldMatcher); if (significanceHeuristicParser != null) { SignificanceHeuristic significanceHeuristic = significanceHeuristicParser.parse(parser, parseFieldMatcher); - otherOptions.put(SignificantTermsAggregatorBuilder.HEURISTIC, significanceHeuristic); + otherOptions.put(SignificantTermsAggregationBuilder.HEURISTIC, significanceHeuristic); return true; - } else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregatorBuilder.BACKGROUND_FILTER)) { + } else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregationBuilder.BACKGROUND_FILTER)) { QueryParseContext queryParseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); QueryBuilder filter = queryParseContext.parseInnerQueryBuilder(); - otherOptions.put(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER, filter); + otherOptions.put(SignificantTermsAggregationBuilder.BACKGROUND_FILTER, filter); return true; } } @@ -99,6 +101,6 @@ public class SignificantTermsParser extends AbstractTermsParser { @Override protected BucketCountThresholds getDefaultBucketCountThresholds() { - return new TermsAggregator.BucketCountThresholds(SignificantTermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS); + return new TermsAggregator.BucketCountThresholds(SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java index 5369e269058..b5781aa34be 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java @@ -60,7 +60,7 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms pipelineAggregators, Map metaData) { //We pass zero for index/subset sizes because for the purpose of significant term analysis // we assume an unmapped index's size is irrelevant to the proceedings. - super(0, 0, name, DocValueFormat.RAW, requiredSize, minDocCount, SignificantTermsAggregatorBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC, + super(0, 0, name, DocValueFormat.RAW, requiredSize, minDocCount, SignificantTermsAggregationBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC, BUCKETS, pipelineAggregators, metaData); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParser.java index a15c7d28427..3f27c4f1c6f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParser.java @@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; @@ -50,8 +50,10 @@ public abstract class AbstractTermsParser extends AnyValuesSourceParser { } @Override - protected final ValuesSourceAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { + protected final ValuesSourceAggregationBuilder createFactory(String aggregationName, + ValuesSourceType valuesSourceType, + ValueType targetValueType, + Map otherOptions) { BucketCountThresholds bucketCountThresholds = getDefaultBucketCountThresholds(); Integer requiredSize = (Integer) otherOptions.get(REQUIRED_SIZE_FIELD_NAME); if (requiredSize != null && requiredSize != -1) { @@ -77,10 +79,14 @@ public abstract class AbstractTermsParser extends AnyValuesSourceParser { otherOptions); } - protected abstract ValuesSourceAggregatorBuilder doCreateFactory(String aggregationName, - ValuesSourceType valuesSourceType, - ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint, - IncludeExclude incExc, Map otherOptions); + protected abstract ValuesSourceAggregationBuilder doCreateFactory(String aggregationName, + ValuesSourceType valuesSourceType, + ValueType targetValueType, + BucketCountThresholds bucketCountThresholds, + SubAggCollectionMode collectMode, + String executionHint, + IncludeExclude incExc, + Map otherOptions); @Override protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser, diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java similarity index 89% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java index 53887d8b20c..f4cb133c499 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java @@ -30,7 +30,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -38,7 +38,7 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = StringTerms.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -61,14 +61,14 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder orders) { + public TermsAggregationBuilder order(List orders) { if (orders == null) { throw new IllegalArgumentException("[orders] must not be null: [" + name + "]"); } @@ -190,7 +190,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder otherOptions) { - TermsAggregatorBuilder factory = new TermsAggregatorBuilder(aggregationName, targetValueType); + protected TermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, BucketCountThresholds bucketCountThresholds, + SubAggCollectionMode collectMode, String executionHint, + IncludeExclude incExc, Map otherOptions) { + TermsAggregationBuilder factory = new TermsAggregationBuilder(aggregationName, targetValueType); @SuppressWarnings("unchecked") - List orderElements = (List) otherOptions.get(TermsAggregatorBuilder.ORDER_FIELD); + List orderElements = (List) otherOptions.get(TermsAggregationBuilder.ORDER_FIELD); if (orderElements != null) { List orders = new ArrayList<>(orderElements.size()); for (OrderElement orderElement : orderElements) { @@ -66,7 +67,7 @@ public class TermsParser extends AbstractTermsParser { if (incExc != null) { factory.includeExclude(incExc); } - Boolean showTermDocCountError = (Boolean) otherOptions.get(TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR); + Boolean showTermDocCountError = (Boolean) otherOptions.get(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR); if (showTermDocCountError != null) { factory.showTermDocCountError(showTermDocCountError); } @@ -77,12 +78,12 @@ public class TermsParser extends AbstractTermsParser { public boolean parseSpecial(String aggregationName, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Token token, String currentFieldName, Map otherOptions) throws IOException { if (token == XContentParser.Token.START_OBJECT) { - if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.ORDER_FIELD)) { - otherOptions.put(TermsAggregatorBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser))); + if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) { + otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser))); return true; } } else if (token == XContentParser.Token.START_ARRAY) { - if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.ORDER_FIELD)) { + if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) { List orderElements = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { @@ -93,12 +94,12 @@ public class TermsParser extends AbstractTermsParser { "Order elements must be of type object in [" + aggregationName + "] found token of type [" + token + "]."); } } - otherOptions.put(TermsAggregatorBuilder.ORDER_FIELD, orderElements); + otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, orderElements); return true; } } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR)) { - otherOptions.put(TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue()); + if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR)) { + otherOptions.put(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue()); return true; } } @@ -158,7 +159,7 @@ public class TermsParser extends AbstractTermsParser { @Override public TermsAggregator.BucketCountThresholds getDefaultBucketCountThresholds() { - return new TermsAggregator.BucketCountThresholds(TermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS); + return new TermsAggregator.BucketCountThresholds(TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS); } static Terms.Order resolveOrder(String key, boolean asc) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregationBuilder.java index da3733d13a9..ce098177a0b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregationBuilder.java @@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class AvgAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalAvg.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public AvgAggregatorBuilder(String name) { + public AvgAggregationBuilder(String name) { super(name, InternalAvg.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public AvgAggregatorBuilder(StreamInput in) throws IOException { + public AvgAggregationBuilder(StreamInput in) throws IOException { super(in, InternalAvg.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class AvgAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java index edb3d8f6620..b4f9261b1eb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java @@ -44,8 +44,8 @@ public class AvgParser extends NumericValuesSourceParser { } @Override - protected AvgAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new AvgAggregatorBuilder(aggregationName); + protected AvgAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new AvgAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregationBuilder.java similarity index 90% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregationBuilder.java index c9465cfb94d..a7850c23475 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregationBuilder.java @@ -28,14 +28,16 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public final class CardinalityAggregationBuilder + extends ValuesSourceAggregationBuilder.LeafOnly { + public static final String NAME = InternalCardinality.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -43,14 +45,14 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu private Long precisionThreshold = null; - public CardinalityAggregatorBuilder(String name, ValueType targetValueType) { + public CardinalityAggregationBuilder(String name, ValueType targetValueType) { super(name, InternalCardinality.TYPE, ValuesSourceType.ANY, targetValueType); } /** * Read from a stream. */ - public CardinalityAggregatorBuilder(StreamInput in) throws IOException { + public CardinalityAggregationBuilder(StreamInput in) throws IOException { super(in, InternalCardinality.TYPE, ValuesSourceType.ANY); if (in.readBoolean()) { precisionThreshold = in.readLong(); @@ -75,7 +77,7 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu * Set a precision threshold. Higher values improve accuracy but also * increase memory usage. */ - public CardinalityAggregatorBuilder precisionThreshold(long precisionThreshold) { + public CardinalityAggregationBuilder precisionThreshold(long precisionThreshold) { if (precisionThreshold < 0) { throw new IllegalArgumentException( "[precisionThreshold] must be greater than or equal to 0. Found [" + precisionThreshold + "] in [" + name + "]"); @@ -122,7 +124,7 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu @Override protected boolean innerEquals(Object obj) { - CardinalityAggregatorBuilder other = (CardinalityAggregatorBuilder) obj; + CardinalityAggregationBuilder other = (CardinalityAggregationBuilder) obj; return Objects.equals(precisionThreshold, other.precisionThreshold); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java index 3272d90eede..3a2e6a2072a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java @@ -40,10 +40,10 @@ public class CardinalityParser extends AnyValuesSourceParser { } @Override - protected CardinalityAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - CardinalityAggregatorBuilder factory = new CardinalityAggregatorBuilder(aggregationName, targetValueType); - Long precisionThreshold = (Long) otherOptions.get(CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD); + protected CardinalityAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + CardinalityAggregationBuilder factory = new CardinalityAggregationBuilder(aggregationName, targetValueType); + Long precisionThreshold = (Long) otherOptions.get(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD); if (precisionThreshold != null) { factory.precisionThreshold(precisionThreshold); } @@ -54,8 +54,8 @@ public class CardinalityParser extends AnyValuesSourceParser { protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Map otherOptions) throws IOException { if (token.isValue()) { - if (parseFieldMatcher.match(currentFieldName, CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD)) { - otherOptions.put(CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD, parser.longValue()); + if (parseFieldMatcher.match(currentFieldName, CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD)) { + otherOptions.put(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD, parser.longValue()); return true; } else if (parseFieldMatcher.match(currentFieldName, REHASH)) { // ignore diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregationBuilder.java similarity index 88% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregationBuilder.java index ea4681ed686..eff020ec610 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregationBuilder.java @@ -28,27 +28,27 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalGeoBounds.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIED = new ParseField(NAME); private boolean wrapLongitude = true; - public GeoBoundsAggregatorBuilder(String name) { + public GeoBoundsAggregationBuilder(String name) { super(name, InternalGeoBounds.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); } /** * Read from a stream. */ - public GeoBoundsAggregatorBuilder(StreamInput in) throws IOException { + public GeoBoundsAggregationBuilder(StreamInput in) throws IOException { super(in, InternalGeoBounds.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); wrapLongitude = in.readBoolean(); } @@ -61,7 +61,7 @@ public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder otherOptions) { - GeoBoundsAggregatorBuilder factory = new GeoBoundsAggregatorBuilder(aggregationName); + protected GeoBoundsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + GeoBoundsAggregationBuilder factory = new GeoBoundsAggregationBuilder(aggregationName); Boolean wrapLongitude = (Boolean) otherOptions.get(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD); if (wrapLongitude != null) { factory.wrapLongitude(wrapLongitude); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregationBuilder.java similarity index 90% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregationBuilder.java index ea8e54cdba9..f9bf2e0a346 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregationBuilder.java @@ -28,25 +28,25 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class GeoCentroidAggregatorBuilder - extends ValuesSourceAggregatorBuilder.LeafOnly { +public class GeoCentroidAggregationBuilder + extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalGeoCentroid.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public GeoCentroidAggregatorBuilder(String name) { + public GeoCentroidAggregationBuilder(String name) { super(name, InternalGeoCentroid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); } /** * Read from a stream. */ - public GeoCentroidAggregatorBuilder(StreamInput in) throws IOException { + public GeoCentroidAggregationBuilder(StreamInput in) throws IOException { super(in, InternalGeoCentroid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); } @@ -80,4 +80,4 @@ public class GeoCentroidAggregatorBuilder public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java index b056920b141..6c9e9ba67b0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java @@ -46,8 +46,8 @@ public class GeoCentroidParser extends GeoPointValuesSourceParser { } @Override - protected GeoCentroidAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new GeoCentroidAggregatorBuilder(aggregationName); + protected GeoCentroidAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new GeoCentroidAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregationBuilder.java index 7258e77aea5..9fa919fcf9e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregationBuilder.java @@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class MaxAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalMax.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public MaxAggregatorBuilder(String name) { + public MaxAggregationBuilder(String name) { super(name, InternalMax.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public MaxAggregatorBuilder(StreamInput in) throws IOException { + public MaxAggregationBuilder(StreamInput in) throws IOException { super(in, InternalMax.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class MaxAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java index 41c321acf33..d2ddd4daa08 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java @@ -44,8 +44,8 @@ public class MaxParser extends NumericValuesSourceParser { } @Override - protected MaxAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new MaxAggregatorBuilder(aggregationName); + protected MaxAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new MaxAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregationBuilder.java index c51e97f0538..af4f204bddb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregationBuilder.java @@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class MinAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalMin.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public MinAggregatorBuilder(String name) { + public MinAggregationBuilder(String name) { super(name, InternalMin.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public MinAggregatorBuilder(StreamInput in) throws IOException { + public MinAggregationBuilder(StreamInput in) throws IOException { super(in, InternalMin.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class MinAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java index 9f9eafc5035..194c08fc49b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java @@ -45,8 +45,8 @@ public class MinParser extends NumericValuesSourceParser { } @Override - protected MinAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new MinAggregatorBuilder(aggregationName); + protected MinAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new MinAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java index f29615a593f..ec145754a04 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; @@ -115,8 +115,8 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse } @Override - protected ValuesSourceAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { + protected ValuesSourceAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { PercentilesMethod method = (PercentilesMethod) otherOptions.getOrDefault(METHOD_FIELD, PercentilesMethod.TDIGEST); double[] cdfValues = (double[]) otherOptions.get(keysField()); @@ -126,10 +126,10 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse return buildFactory(aggregationName, cdfValues, method, compression, numberOfSignificantValueDigits, keyed); } - protected abstract ValuesSourceAggregatorBuilder buildFactory(String aggregationName, double[] cdfValues, - PercentilesMethod method, - Double compression, - Integer numberOfSignificantValueDigits, Boolean keyed); + protected abstract ValuesSourceAggregationBuilder buildFactory(String aggregationName, double[] cdfValues, + PercentilesMethod method, + Double compression, + Integer numberOfSignificantValueDigits, Boolean keyed); protected abstract ParseField keysField(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregationBuilder.java index ea03451cf67..d36dcdecb7b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregationBuilder.java @@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.LeafOnly; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -41,7 +41,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Objects; -public class PercentileRanksAggregatorBuilder extends LeafOnly { +public class PercentileRanksAggregationBuilder extends LeafOnly { public static final String NAME = InternalTDigestPercentileRanks.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -51,14 +51,14 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly 5) { throw new IllegalArgumentException("[numberOfSignificantValueDigits] must be between 0 and 5: [" + name + "]"); } @@ -135,7 +135,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly buildFactory(String aggregationName, double[] keys, PercentilesMethod method, - Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) { - PercentileRanksAggregatorBuilder factory = new PercentileRanksAggregatorBuilder(aggregationName); + protected ValuesSourceAggregationBuilder buildFactory(String aggregationName, double[] keys, PercentilesMethod method, + Double compression, Integer numberOfSignificantValueDigits, + Boolean keyed) { + PercentileRanksAggregationBuilder factory = new PercentileRanksAggregationBuilder(aggregationName); if (keys != null) { factory.values(keys); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java index 45f3d37732f..d2d1e8d9f8a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java @@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.LeafOnly; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -41,7 +41,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Objects; -public class PercentilesAggregatorBuilder extends LeafOnly { +public class PercentilesAggregationBuilder extends LeafOnly { public static final String NAME = InternalTDigestPercentiles.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -51,14 +51,14 @@ public class PercentilesAggregatorBuilder extends LeafOnly 5) { throw new IllegalArgumentException("[numberOfSignificantValueDigits] must be between 0 and 5: [" + name + "]"); } @@ -135,7 +135,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly buildFactory(String aggregationName, double[] keys, PercentilesMethod method, - Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) { - PercentilesAggregatorBuilder factory = new PercentilesAggregatorBuilder(aggregationName); + protected ValuesSourceAggregationBuilder buildFactory(String aggregationName, double[] keys, PercentilesMethod method, + Double compression, Integer numberOfSignificantValueDigits, + Boolean keyed) { + PercentilesAggregationBuilder factory = new PercentilesAggregationBuilder(aggregationName); if (keys != null) { factory.percentiles(keys); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java similarity index 93% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java index 753052b4fe0..d5bdf2f5626 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -40,7 +40,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder { +public class ScriptedMetricAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalScriptedMetric.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -58,14 +58,14 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder params; - public ScriptedMetricAggregatorBuilder(String name) { + public ScriptedMetricAggregationBuilder(String name) { super(name, InternalScriptedMetric.TYPE); } /** * Read from a stream. */ - public ScriptedMetricAggregatorBuilder(StreamInput in) throws IOException { + public ScriptedMetricAggregationBuilder(StreamInput in) throws IOException { super(in, InternalScriptedMetric.TYPE); initScript = in.readOptionalWriteable(Script::new); mapScript = in.readOptionalWriteable(Script::new); @@ -92,7 +92,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilderinit script. */ - public ScriptedMetricAggregatorBuilder initScript(Script initScript) { + public ScriptedMetricAggregationBuilder initScript(Script initScript) { if (initScript == null) { throw new IllegalArgumentException("[initScript] must not be null: [" + name + "]"); } @@ -110,7 +110,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuildermap script. */ - public ScriptedMetricAggregatorBuilder mapScript(Script mapScript) { + public ScriptedMetricAggregationBuilder mapScript(Script mapScript) { if (mapScript == null) { throw new IllegalArgumentException("[mapScript] must not be null: [" + name + "]"); } @@ -128,7 +128,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuildercombine script. */ - public ScriptedMetricAggregatorBuilder combineScript(Script combineScript) { + public ScriptedMetricAggregationBuilder combineScript(Script combineScript) { if (combineScript == null) { throw new IllegalArgumentException("[combineScript] must not be null: [" + name + "]"); } @@ -146,7 +146,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilderreduce script. */ - public ScriptedMetricAggregatorBuilder reduceScript(Script reduceScript) { + public ScriptedMetricAggregationBuilder reduceScript(Script reduceScript) { if (reduceScript == null) { throw new IllegalArgumentException("[reduceScript] must not be null: [" + name + "]"); } @@ -165,7 +165,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilderinit, * map and combine phases. */ - public ScriptedMetricAggregatorBuilder params(Map params) { + public ScriptedMetricAggregationBuilder params(Map params) { if (params == null) { throw new IllegalArgumentException("[params] must not be null: [" + name + "]"); } @@ -214,7 +214,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder { +public class StatsAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalStats.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public StatsAggregatorBuilder(String name) { + public StatsAggregationBuilder(String name) { super(name, InternalStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public StatsAggregatorBuilder(StreamInput in) throws IOException { + public StatsAggregationBuilder(StreamInput in) throws IOException { super(in, InternalStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class StatsAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOn public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java index a57ba89b676..eacfc0068b4 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java @@ -44,8 +44,8 @@ public class StatsParser extends NumericValuesSourceParser { } @Override - protected StatsAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new StatsAggregatorBuilder(aggregationName); + protected StatsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new StatsAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregationBuilder.java similarity index 88% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregationBuilder.java index e6f49d719d6..a2b961f1fc3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregationBuilder.java @@ -29,28 +29,28 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public class ExtendedStatsAggregatorBuilder - extends ValuesSourceAggregatorBuilder.LeafOnly { +public class ExtendedStatsAggregationBuilder + extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalExtendedStats.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private double sigma = 2.0; - public ExtendedStatsAggregatorBuilder(String name) { + public ExtendedStatsAggregationBuilder(String name) { super(name, InternalExtendedStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public ExtendedStatsAggregatorBuilder(StreamInput in) throws IOException { + public ExtendedStatsAggregationBuilder(StreamInput in) throws IOException { super(in, InternalExtendedStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); sigma = in.readDouble(); } @@ -60,7 +60,7 @@ public class ExtendedStatsAggregatorBuilder out.writeDouble(sigma); } - public ExtendedStatsAggregatorBuilder sigma(double sigma) { + public ExtendedStatsAggregationBuilder sigma(double sigma) { if (sigma < 0.0) { throw new IllegalArgumentException("[sigma] must be greater than or equal to 0. Found [" + sigma + "] in [" + name + "]"); } @@ -91,7 +91,7 @@ public class ExtendedStatsAggregatorBuilder @Override protected boolean innerEquals(Object obj) { - ExtendedStatsAggregatorBuilder other = (ExtendedStatsAggregatorBuilder) obj; + ExtendedStatsAggregationBuilder other = (ExtendedStatsAggregationBuilder) obj; return Objects.equals(sigma, other.sigma); } @@ -99,4 +99,4 @@ public class ExtendedStatsAggregatorBuilder public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java index 76e6beac2da..c650847360f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java @@ -50,9 +50,9 @@ public class ExtendedStatsParser extends NumericValuesSourceParser { } @Override - protected ExtendedStatsAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - ExtendedStatsAggregatorBuilder factory = new ExtendedStatsAggregatorBuilder(aggregationName); + protected ExtendedStatsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + ExtendedStatsAggregationBuilder factory = new ExtendedStatsAggregationBuilder(aggregationName); Double sigma = (Double) otherOptions.get(ExtendedStatsAggregator.SIGMA_FIELD); if (sigma != null) { factory.sigma(sigma); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregationBuilder.java index 30246452330..25dd1a3f214 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregationBuilder.java @@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class SumAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public class SumAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalSum.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public SumAggregatorBuilder(String name) { + public SumAggregationBuilder(String name) { super(name, InternalSum.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public SumAggregatorBuilder(StreamInput in) throws IOException { + public SumAggregationBuilder(StreamInput in) throws IOException { super(in, InternalSum.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class SumAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java index 7971adba4eb..6edc6cc8905 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java @@ -44,8 +44,8 @@ public class SumParser extends NumericValuesSourceParser { } @Override - protected SumAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new SumAggregatorBuilder(aggregationName); + protected SumAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new SumAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java similarity index 92% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java index 8f15437cc54..7195482f147 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationInitializationException; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -51,7 +51,7 @@ import java.util.List; import java.util.Objects; import java.util.Set; -public class TopHitsAggregatorBuilder extends AggregatorBuilder { +public class TopHitsAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalTopHits.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -67,14 +67,14 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder scriptFields; private FetchSourceContext fetchSourceContext; - public TopHitsAggregatorBuilder(String name) { + public TopHitsAggregationBuilder(String name) { super(name, InternalTopHits.TYPE); } /** * Read from a stream. */ - public TopHitsAggregatorBuilder(StreamInput in) throws IOException { + public TopHitsAggregationBuilder(StreamInput in) throws IOException { super(in, InternalTopHits.TYPE); explain = in.readBoolean(); fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); @@ -159,7 +159,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder0. */ - public TopHitsAggregatorBuilder from(int from) { + public TopHitsAggregationBuilder from(int from) { if (from < 0) { throw new IllegalArgumentException("[from] must be greater than or equal to 0. Found [" + from + "] in [" + name + "]"); } @@ -177,7 +177,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder10. */ - public TopHitsAggregatorBuilder size(int size) { + public TopHitsAggregationBuilder size(int size) { if (size < 0) { throw new IllegalArgumentException("[size] must be greater than or equal to 0. Found [" + size + "] in [" + name + "]"); } @@ -200,7 +200,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder sort) { + public TopHitsAggregationBuilder sort(SortBuilder sort) { if (sort == null) { throw new IllegalArgumentException("[sort] must not be null: [" + name + "]"); } @@ -248,7 +248,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder> sorts) { + public TopHitsAggregationBuilder sorts(List> sorts) { if (sorts == null) { throw new IllegalArgumentException("[sorts] must not be null: [" + name + "]"); } @@ -271,7 +271,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder fields) { + public TopHitsAggregationBuilder fields(List fields) { if (fields == null) { throw new IllegalArgumentException("[fields] must not be null: [" + name + "]"); } @@ -385,7 +385,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder fieldDataFields) { + public TopHitsAggregationBuilder fieldDataFields(List fieldDataFields) { if (fieldDataFields == null) { throw new IllegalArgumentException("[fieldDataFields] must not be null: [" + name + "]"); } @@ -442,7 +442,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder scriptFields) { + public TopHitsAggregationBuilder scriptFields(List scriptFields) { if (scriptFields == null) { throw new IllegalArgumentException("[scriptFields] must not be null: [" + name + "]"); } @@ -497,7 +497,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilderfalse. */ - public TopHitsAggregatorBuilder trackScores(boolean trackScores) { + public TopHitsAggregationBuilder trackScores(boolean trackScores) { this.trackScores = trackScores; return this; } @@ -544,7 +544,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder { +public class ValueCountAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalValueCount.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public ValueCountAggregatorBuilder(String name, ValueType targetValueType) { + public ValueCountAggregationBuilder(String name, ValueType targetValueType) { super(name, InternalValueCount.TYPE, ValuesSourceType.ANY, targetValueType); } /** * Read from a stream. */ - public ValueCountAggregatorBuilder(StreamInput in) throws IOException { + public ValueCountAggregationBuilder(StreamInput in) throws IOException { super(in, InternalValueCount.TYPE, ValuesSourceType.ANY); } @@ -84,4 +84,4 @@ public class ValueCountAggregatorBuilder extends ValuesSourceAggregatorBuilder.L public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java index 2f0e7e6f263..fe8a34f242b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; @@ -46,8 +46,8 @@ public class ValueCountParser extends AnyValuesSourceParser { } @Override - protected ValuesSourceAggregatorBuilder createFactory( + protected ValuesSourceAggregationBuilder createFactory( String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - return new ValueCountAggregatorBuilder(aggregationName, targetValueType); + return new ValueCountAggregationBuilder(aggregationName, targetValueType); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java index 030f5143af3..51d2ea2e8c9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java @@ -84,7 +84,7 @@ public abstract class AbstractValuesSourceParser } @Override - public final ValuesSourceAggregatorBuilder parse(String aggregationName, QueryParseContext context) + public final ValuesSourceAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { XContentParser parser = context.parser(); @@ -147,7 +147,7 @@ public abstract class AbstractValuesSourceParser } } - ValuesSourceAggregatorBuilder factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType, + ValuesSourceAggregationBuilder factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType, otherOptions); if (field != null) { factory.field(field); @@ -171,7 +171,7 @@ public abstract class AbstractValuesSourceParser } /** - * Creates a {@link ValuesSourceAggregatorBuilder} from the information + * Creates a {@link ValuesSourceAggregationBuilder} from the information * gathered by the subclass. Options parsed in * {@link AbstractValuesSourceParser} itself will be added to the factory * after it has been returned by this method. @@ -189,8 +189,8 @@ public abstract class AbstractValuesSourceParser * method * @return the created factory */ - protected abstract ValuesSourceAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions); + protected abstract ValuesSourceAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions); /** * Allows subclasses of {@link AbstractValuesSourceParser} to parse extra diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java similarity index 95% rename from core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java index c92faa09613..78d2a2da10f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java @@ -31,7 +31,7 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationInitializationException; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -46,11 +46,11 @@ import java.util.Objects; /** * */ -public abstract class ValuesSourceAggregatorBuilder> - extends AggregatorBuilder { +public abstract class ValuesSourceAggregationBuilder> + extends AggregationBuilder { - public static abstract class LeafOnly> - extends ValuesSourceAggregatorBuilder { + public static abstract class LeafOnly> + extends ValuesSourceAggregationBuilder { protected LeafOnly(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { super(name, type, valuesSourceType, targetValueType); @@ -87,7 +87,7 @@ public abstract class ValuesSourceAggregatorBuilder config; - protected ValuesSourceAggregatorBuilder(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { + protected ValuesSourceAggregationBuilder(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { super(name, type); if (valuesSourceType == null) { throw new IllegalArgumentException("[valuesSourceType] must not be null: [" + name + "]"); @@ -99,7 +99,7 @@ public abstract class ValuesSourceAggregatorBuilder other = (ValuesSourceAggregatorBuilder) obj; + ValuesSourceAggregationBuilder other = (ValuesSourceAggregationBuilder) obj; if (!Objects.equals(field, other.field)) return false; if (!Objects.equals(format, other.format)) diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 429aa36e56f..dc45a99fa87 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -41,7 +41,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.Script; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; @@ -600,7 +600,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ /** * Add an aggregation to perform as part of the search. */ - public SearchSourceBuilder aggregation(AggregatorBuilder aggregation) { + public SearchSourceBuilder aggregation(AggregationBuilder aggregation) { if (aggregations == null) { aggregations = AggregatorFactories.builder(); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 96319303420..e69e4094f44 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -48,7 +48,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; @@ -216,7 +215,7 @@ public class DefaultSearchContext extends SearchContext { + "be less than [" + maxWindow + "]. This prevents allocating massive heaps for storing the results to be " + "rescored. This limit can be set by chaning the [" + IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey() + "] index level setting."); - + } } } @@ -495,11 +494,6 @@ public class DefaultSearchContext extends SearchContext { return indexService.fieldData(); } - @Override - public PercolatorQueryCache percolatorQueryCache() { - return indexService.cache().getPercolatorQueryCache(); - } - @Override public long timeoutInMillis() { return timeoutInMillis; diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 8009d0b5fe4..ac283f1f7ef 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -270,11 +269,6 @@ public abstract class FilteredSearchContext extends SearchContext { return in.fieldData(); } - @Override - public PercolatorQueryCache percolatorQueryCache() { - return in.percolatorQueryCache(); - } - @Override public long timeoutInMillis() { return in.timeoutInMillis(); diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 550a5f76caf..55b66c42a60 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -231,8 +230,6 @@ public abstract class SearchContext implements Releasable { public abstract IndexFieldDataService fieldData(); - public abstract PercolatorQueryCache percolatorQueryCache(); - public abstract long timeoutInMillis(); public abstract void timeoutInMillis(long timeoutInMillis); diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 8e6681893c9..5b52e915195 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -44,6 +45,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; @@ -366,14 +368,7 @@ public class SnapshotsService extends AbstractLifecycleComponentemptyList()); - } catch (Throwable t2) { - logger.warn("[{}] failed to close snapshot in repository", snapshot.snapshotId()); - } - userCreateSnapshotListener.onFailure(t); + removeSnapshotFromClusterState(snapshot.snapshotId(), null, t, new CleanupAfterErrorListener(snapshot, true, userCreateSnapshotListener, t)); } @Override @@ -395,17 +390,46 @@ public class SnapshotsService extends AbstractLifecycleComponent { + + private final SnapshotsInProgress.Entry snapshot; + private final boolean snapshotCreated; + private final CreateSnapshotListener userCreateSnapshotListener; + private final Throwable t; + + public CleanupAfterErrorListener(SnapshotsInProgress.Entry snapshot, boolean snapshotCreated, CreateSnapshotListener userCreateSnapshotListener, Throwable t) { + this.snapshot = snapshot; + this.snapshotCreated = snapshotCreated; + this.userCreateSnapshotListener = userCreateSnapshotListener; + this.t = t; + } + + @Override + public void onResponse(SnapshotInfo snapshotInfo) { + cleanupAfterError(); + } + + @Override + public void onFailure(Throwable e) { + cleanupAfterError(); + } + + private void cleanupAfterError() { + if(snapshotCreated) { try { - repositoriesService.repository(snapshot.snapshotId().getRepository()).finalizeSnapshot(snapshot.snapshotId(), snapshot.indices(), snapshot.startTime(), - ExceptionsHelper.detailedMessage(t), 0, Collections.emptyList()); + repositoriesService.repository(snapshot.snapshotId().getRepository()).finalizeSnapshot( + snapshot.snapshotId(), snapshot.indices(), snapshot.startTime(), ExceptionsHelper.detailedMessage(t), 0, Collections.emptyList()); } catch (Throwable t2) { logger.warn("[{}] failed to close snapshot in repository", snapshot.snapshotId()); } } userCreateSnapshotListener.onFailure(t); } + } private SnapshotInfo inProgressSnapshot(SnapshotsInProgress.Entry entry) { @@ -818,6 +842,19 @@ public class SnapshotsService extends AbstractLifecycleComponent listener) { clusterService.submitStateUpdateTask("remove snapshot metadata", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { @@ -843,6 +880,9 @@ public class SnapshotsService extends AbstractLifecycleComponent implem ReleaseChannelFutureListener listener = new ReleaseChannelFutureListener(bytes); future.addListener(listener); addedReleaseListener = true; - transportServiceAdapter.onRequestSent(node, requestId, action, request, options); + final TransportRequestOptions finalOptions = options; + ChannelFutureListener channelFutureListener = + f -> transportServiceAdapter.onRequestSent(node, requestId, action, request, finalOptions); + future.addListener(channelFutureListener); } finally { if (!addedReleaseListener) { Releasables.close(bStream.bytes()); diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java index 57893ff1908..91b6bc120ad 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java @@ -37,6 +37,7 @@ import org.elasticsearch.transport.support.TransportStatus; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.ChannelFutureListener; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; @@ -114,7 +115,10 @@ public class NettyTransportChannel implements TransportChannel { ReleaseChannelFutureListener listener = new ReleaseChannelFutureListener(bytes); future.addListener(listener); addedReleaseListener = true; - transportServiceAdapter.onResponseSent(requestId, action, response, options); + final TransportResponseOptions finalOptions = options; + ChannelFutureListener onResponseSentListener = + f -> transportServiceAdapter.onResponseSent(requestId, action, response, finalOptions); + future.addListener(onResponseSentListener); } finally { if (!addedReleaseListener && bStream != null) { Releasables.close(bStream.bytes()); @@ -137,8 +141,10 @@ public class NettyTransportChannel implements TransportChannel { BytesReference bytes = stream.bytes(); ChannelBuffer buffer = bytes.toChannelBuffer(); NettyHeader.writeHeader(buffer, requestId, status, version); - channel.write(buffer); - transportServiceAdapter.onResponseSent(requestId, action, error); + ChannelFuture future = channel.write(buffer); + ChannelFutureListener onResponseSentListener = + f -> transportServiceAdapter.onResponseSent(requestId, action, error); + future.addListener(onResponseSentListener); } private void close() { diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 74404903e91..32f7b2bf0dd 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -71,7 +71,7 @@ grant { // set by ESTestCase to improve test reproducibility // TODO: set this with gradle or some other way that repros with seed? - permission java.util.PropertyPermission "es.processors.override", "write"; + permission java.util.PropertyPermission "processors.override", "write"; // TODO: these simply trigger a noisy warning if its unable to clear the properties // fix that in randomizedtesting diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java new file mode 100644 index 00000000000..927f572487c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java @@ -0,0 +1,181 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.cluster.reroute; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESAllocationTestCase; +import org.elasticsearch.test.gateway.NoopGatewayAllocator; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; + +public class ClusterRerouteTests extends ESAllocationTestCase { + + public void testSerializeRequest() throws IOException { + ClusterRerouteRequest req = new ClusterRerouteRequest(); + req.setRetryFailed(randomBoolean()); + req.dryRun(randomBoolean()); + req.explain(randomBoolean()); + req.commands(new AllocateEmptyPrimaryAllocationCommand("foo", 1, "bar", randomBoolean())); + req.timeout(TimeValue.timeValueMillis(randomIntBetween(0, 100))); + BytesStreamOutput out = new BytesStreamOutput(); + req.writeTo(out); + BytesReference bytes = out.bytes(); + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); + new NetworkModule(null, Settings.EMPTY, true, namedWriteableRegistry); + StreamInput wrap = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes.toBytes()), + namedWriteableRegistry); + ClusterRerouteRequest deserializedReq = new ClusterRerouteRequest(); + deserializedReq.readFrom(wrap); + + assertEquals(req.isRetryFailed(), deserializedReq.isRetryFailed()); + assertEquals(req.dryRun(), deserializedReq.dryRun()); + assertEquals(req.explain(), deserializedReq.explain()); + assertEquals(req.timeout(), deserializedReq.timeout()); + assertEquals(1, deserializedReq.getCommands().commands().size()); // allocation commands have their own tests + assertEquals(req.getCommands().commands().size(), deserializedReq.getCommands().commands().size()); + } + + public void testClusterStateUpdateTask() { + AllocationService allocationService = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, + Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); + ClusterState clusterState = createInitialClusterState(allocationService); + ClusterRerouteRequest req = new ClusterRerouteRequest(); + req.dryRun(true); + AtomicReference responseRef = new AtomicReference<>(); + ActionListener responseActionListener = new ActionListener() { + @Override + public void onResponse(ClusterRerouteResponse clusterRerouteResponse) { + responseRef.set(clusterRerouteResponse); + } + + @Override + public void onFailure(Throwable e) { + + } + }; + TransportClusterRerouteAction.ClusterRerouteResponseAckedClusterStateUpdateTask task = + new TransportClusterRerouteAction.ClusterRerouteResponseAckedClusterStateUpdateTask(logger, allocationService, req, + responseActionListener ); + ClusterState execute = task.execute(clusterState); + assertSame(execute, clusterState); // dry-run + task.onAllNodesAcked(null); + assertNotSame(responseRef.get().getState(), execute); + + req.dryRun(false);// now we allocate + + final int retries = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + // now fail it N-1 times + for (int i = 0; i < retries; i++) { + ClusterState newState = task.execute(clusterState); + assertNotSame(newState, clusterState); // dry-run=false + clusterState = newState; + RoutingTable routingTable = clusterState.routingTable(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), i); + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom" + i, + new UnsupportedOperationException())); + RoutingAllocation.Result result = allocationService.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + clusterState = ClusterState.builder(clusterState).routingTable(result.routingTable()).build(); + routingTable = clusterState.routingTable(); + assertEquals(routingTable.index("idx").shards().size(), 1); + if (i == retries-1) { + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + } else { + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + } + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), i+1); + } + + + // without retry_failed we won't allocate that shard + ClusterState newState = task.execute(clusterState); + assertNotSame(newState, clusterState); // dry-run=false + task.onAllNodesAcked(null); + assertSame(responseRef.get().getState(), newState); + RoutingTable routingTable = clusterState.routingTable(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + + req.setRetryFailed(true); // now we manually retry and get the shard back into initializing + newState = task.execute(clusterState); + assertNotSame(newState, clusterState); // dry-run=false + clusterState = newState; + routingTable = clusterState.routingTable(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + } + + private ClusterState createInitialClusterState(AllocationService service) { + MetaData.Builder metaBuilder = MetaData.builder(); + metaBuilder.put(IndexMetaData.builder("idx").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)); + MetaData metaData = metaBuilder.build(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + routingTableBuilder.addAsNew(metaData.index("idx")); + + RoutingTable routingTable = routingTableBuilder.build(); + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) + .metaData(metaData).routingTable(routingTable).build(); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))) + .build(); + RoutingTable prevRoutingTable = routingTable; + routingTable = service.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertEquals(prevRoutingTable.index("idx").shards().size(), 1); + assertEquals(prevRoutingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + return clusterState; + } +} diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java index 632646146fe..8b8a4d947a9 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java @@ -22,25 +22,15 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.monitor.jvm.JvmInfo; -import org.elasticsearch.test.ESTestCase; -import org.junit.After; -import org.junit.Before; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.hasEntry; -public class ElasticsearchCliTests extends ESTestCase { +public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { public void testVersion() throws Exception { runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-d"); @@ -96,7 +86,7 @@ public class ElasticsearchCliTests extends ESTestCase { false, output -> assertThat(output, containsString("Positional arguments not allowed, found [foo]")), (foreground, pidFile, esSettings) -> {}, - "-E", "something", "foo", "-E", "somethingelse" + "-E", "foo=bar", "foo", "-E", "baz=qux" ); } @@ -138,26 +128,10 @@ public class ElasticsearchCliTests extends ESTestCase { output -> {}, (foreground, pidFile, esSettings) -> { assertThat(esSettings.size(), equalTo(2)); - assertThat(esSettings, hasEntry("es.foo", "bar")); - assertThat(esSettings, hasEntry("es.baz", "qux")); + assertThat(esSettings, hasEntry("foo", "bar")); + assertThat(esSettings, hasEntry("baz", "qux")); }, - "-Ees.foo=bar", "-E", "es.baz=qux" - ); - } - - public void testElasticsearchSettingPrefix() throws Exception { - runElasticsearchSettingPrefixTest("-E", "foo"); - runElasticsearchSettingPrefixTest("-E", "foo=bar"); - runElasticsearchSettingPrefixTest("-E", "=bar"); - } - - private void runElasticsearchSettingPrefixTest(String... args) throws Exception { - runTest( - ExitCodes.USAGE, - false, - output -> assertThat(output, containsString("Elasticsearch settings must be prefixed with [es.] but was [")), - (foreground, pidFile, esSettings) -> {}, - args + "-Efoo=bar", "-E", "baz=qux" ); } @@ -165,9 +139,9 @@ public class ElasticsearchCliTests extends ESTestCase { runTest( ExitCodes.USAGE, false, - output -> assertThat(output, containsString("Elasticsearch setting [es.foo] must not be empty")), + output -> assertThat(output, containsString("Setting [foo] must not be empty")), (foreground, pidFile, esSettings) -> {}, - "-E", "es.foo=" + "-E", "foo=" ); } @@ -180,36 +154,4 @@ public class ElasticsearchCliTests extends ESTestCase { "--network.host"); } - private interface InitConsumer { - void accept(final boolean foreground, final String pidFile, final Map esSettings); - } - - private void runTest( - final int expectedStatus, - final boolean expectedInit, - final Consumer outputConsumer, - final InitConsumer initConsumer, - String... args) throws Exception { - final MockTerminal terminal = new MockTerminal(); - try { - final AtomicBoolean init = new AtomicBoolean(); - final int status = Elasticsearch.main(args, new Elasticsearch() { - @Override - void init(final boolean daemonize, final String pidFile, final Map esSettings) { - init.set(true); - initConsumer.accept(!daemonize, pidFile, esSettings); - } - }, terminal); - assertThat(status, equalTo(expectedStatus)); - assertThat(init.get(), equalTo(expectedInit)); - outputConsumer.accept(terminal.getOutput()); - } catch (Throwable t) { - // if an unexpected exception is thrown, we log - // terminal output to aid debugging - logger.info(terminal.getOutput()); - // rethrow so the test fails - throw t; - } - } - } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java index a20a5247ed6..9cdeef2a7ff 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java @@ -59,7 +59,6 @@ public class TransportClientIT extends ESIntegTestCase { .put("http.enabled", false) .put(Node.NODE_DATA_SETTING.getKey(), false) .put("cluster.name", "foobar") - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) .build()); node.start(); try { diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java index 2fcadb51a10..4ec1f66df57 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java @@ -55,7 +55,6 @@ public class TransportClientRetryIT extends ESIntegTestCase { .put("node.name", "transport_client_retry_test") .put(Node.NODE_MODE_SETTING.getKey(), internalCluster().getNodeMode()) .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), internalCluster().getClusterName()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); try (TransportClient client = TransportClient.builder().settings(builder.build()).build()) { diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java new file mode 100644 index 00000000000..c6ce30e2a52 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class MetaDataMappingServiceTests extends ESSingleNodeTestCase { + + // Tests _parent meta field logic, because part of the validation is in MetaDataMappingService + public void testAddChildTypePointingToAlreadyExistingType() throws Exception { + createIndex("test", Settings.EMPTY, "type", "field", "type=keyword"); + + // Shouldn't be able the add the _parent field pointing to an already existing type, which isn't a parent type + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().admin() + .indices() + .preparePutMapping("test") + .setType("child") + .setSource("_parent", "type=type") + .get()); + assertThat(e.getMessage(), + equalTo("can't add a _parent field that points to an already existing type, that isn't already a parent")); + } + + // Tests _parent meta field logic, because part of the validation is in MetaDataMappingService + public void testAddExtraChildTypePointingToAlreadyParentExistingType() throws Exception { + IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test") + .addMapping("parent") + .addMapping("child1", "_parent", "type=parent") + ); + + // adding the extra child type that points to an already existing parent type is allowed: + client().admin() + .indices() + .preparePutMapping("test") + .setType("child2") + .setSource("_parent", "type=parent") + .get(); + + DocumentMapper documentMapper = indexService.mapperService().documentMapper("child2"); + assertThat(documentMapper.parentFieldMapper().type(), equalTo("parent")); + assertThat(documentMapper.parentFieldMapper().active(), is(true)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 708f8ca5079..f46224570b0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -64,7 +64,8 @@ public class UnassignedInfoTests extends ESAllocationTestCase { UnassignedInfo.Reason.NODE_LEFT, UnassignedInfo.Reason.REROUTE_CANCELLED, UnassignedInfo.Reason.REINITIALIZED, - UnassignedInfo.Reason.REALLOCATED_REPLICA}; + UnassignedInfo.Reason.REALLOCATED_REPLICA, + UnassignedInfo.Reason.PRIMARY_FAILED}; for (int i = 0; i < order.length; i++) { assertThat(order[i].ordinal(), equalTo(i)); } @@ -72,7 +73,10 @@ public class UnassignedInfoTests extends ESAllocationTestCase { } public void testSerialization() throws Exception { - UnassignedInfo meta = new UnassignedInfo(RandomPicks.randomFrom(random(), UnassignedInfo.Reason.values()), randomBoolean() ? randomAsciiOfLength(4) : null); + UnassignedInfo.Reason reason = RandomPicks.randomFrom(random(), UnassignedInfo.Reason.values()); + UnassignedInfo meta = reason == UnassignedInfo.Reason.ALLOCATION_FAILED ? + new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null, null, randomIntBetween(1, 100), System.nanoTime(), System.currentTimeMillis()): + new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null); BytesStreamOutput out = new BytesStreamOutput(); meta.writeTo(out); out.close(); @@ -82,6 +86,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { assertThat(read.getUnassignedTimeInMillis(), equalTo(meta.getUnassignedTimeInMillis())); assertThat(read.getMessage(), equalTo(meta.getMessage())); assertThat(read.getDetails(), equalTo(meta.getDetails())); + assertThat(read.getNumFailedAllocations(), equalTo(meta.getNumFailedAllocations())); } public void testIndexCreated() { @@ -273,7 +278,10 @@ public class UnassignedInfoTests extends ESAllocationTestCase { public void testUnassignedDelayOnlyNodeLeftNonNodeLeftReason() throws Exception { EnumSet reasons = EnumSet.allOf(UnassignedInfo.Reason.class); reasons.remove(UnassignedInfo.Reason.NODE_LEFT); - UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(random(), reasons), null); + UnassignedInfo.Reason reason = RandomPicks.randomFrom(random(), reasons); + UnassignedInfo unassignedInfo = reason == UnassignedInfo.Reason.ALLOCATION_FAILED ? + new UnassignedInfo(reason, null, null, 1, System.nanoTime(), System.currentTimeMillis()): + new UnassignedInfo(reason, null); unassignedInfo = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY); long delay = unassignedInfo.getLastComputedLeftDelayNanos(); @@ -287,7 +295,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { */ public void testLeftDelayCalculation() throws Exception { final long baseTime = System.nanoTime(); - UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, baseTime, System.currentTimeMillis()); + UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, 0, baseTime, System.currentTimeMillis()); final long totalDelayNanos = TimeValue.timeValueMillis(10).nanos(); final Settings settings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueNanos(totalDelayNanos)).build(); unassignedInfo = unassignedInfo.updateDelay(baseTime, settings, Settings.EMPTY); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 7aa8576ece3..b63692e0d2a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -94,7 +94,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { } else { toNodeId = "node1"; } - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(existingNodeId).iterator().next().state(), equalTo(ShardRoutingState.RELOCATING)); @@ -148,7 +148,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating to non-existent node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node42"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node42")), false, false); fail("expected IllegalArgumentException when allocating to non-existing node"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("failed to resolve [node42], no matching nodes")); @@ -156,7 +156,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating to non-data node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node4"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node4")), false, false); fail("expected IllegalArgumentException when allocating to non-data node"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("allocation can only be done on data nodes")); @@ -164,7 +164,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating non-existing shard, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 1, "node2"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 1, "node2")), false, false); fail("expected ShardNotFoundException when allocating non-existing shard"); } catch (ShardNotFoundException e) { assertThat(e.getMessage(), containsString("no such shard")); @@ -172,7 +172,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating non-existing index, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test2", 0, "node2"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test2", 0, "node2")), false, false); fail("expected ShardNotFoundException when allocating non-existing index"); } catch (IndexNotFoundException e) { assertThat(e.getMessage(), containsString("no such index")); @@ -180,7 +180,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating empty primary with acceptDataLoss flag set to false"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", false)), false, false); fail("expected IllegalArgumentException when allocating empty primary with acceptDataLoss flag set to false"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); @@ -188,14 +188,14 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating stale primary with acceptDataLoss flag set to false"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(index, shardId.id(), "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(index, shardId.id(), "node1", false)), false, false); fail("expected IllegalArgumentException when allocating stale primary with acceptDataLoss flag set to false"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); } logger.info("--> allocating empty primary with acceptDataLoss flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -211,13 +211,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocate the replica shard on the primary shard node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node1"))); + allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node1")), false, false); fail("expected IllegalArgumentException when allocating replica shard on the primary shard node"); } catch (IllegalArgumentException e) { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -236,7 +236,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> verify that we fail when there are no unassigned shards"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 0, "node3"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 0, "node3")), false, false); fail("expected IllegalArgumentException when allocating shard while no unassigned shard available"); } catch (IllegalArgumentException e) { } @@ -268,7 +268,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); logger.info("--> allocating empty primary shard with accept_data_loss flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -277,7 +277,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel primary allocation, make sure it fails..."); try { - allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false)), false, false); fail(); } catch (IllegalArgumentException e) { } @@ -291,13 +291,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel primary allocation, make sure it fails..."); try { - allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false)), false, false); fail(); } catch (IllegalArgumentException e) { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -306,7 +306,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> cancel the relocation allocation"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -315,7 +315,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -325,7 +325,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel the primary being replicated, make sure it fails"); try { - allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false)), false, false); fail(); } catch (IllegalArgumentException e) { } @@ -339,7 +339,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1)); logger.info("--> cancel allocation of the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -348,7 +348,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")), false, false); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(rerouteResult.changed(), equalTo(true)); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -364,7 +364,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1)); logger.info("--> move the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, "node2", "node3"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, "node2", "node3")), false, false); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); @@ -374,7 +374,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> cancel the move of the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node3", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node3", false)), false, false); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); @@ -383,7 +383,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel the primary allocation (with allow_primary set to true)"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", true))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", true)), false, false); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(rerouteResult.changed(), equalTo(true)); logger.error(clusterState.prettyPrint()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index ee993bf3ebd..805ab0321ba 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -868,7 +868,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { } commands.add(new MoveAllocationCommand("test", 0, primaryNode, "A-4")); - routingTable = strategy.reroute(clusterState, commands).routingTable(); + routingTable = strategy.reroute(clusterState, commands, false, false).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java index 11b78d2ae6a..b14aeca890e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java @@ -149,8 +149,8 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) - ); + new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")), + false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(origPrimaryNodeId).iterator().next().state(), equalTo(RELOCATING)); @@ -223,8 +223,8 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand("test",0 , clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) - ); + new MoveAllocationCommand("test",0 , clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")), + false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(origPrimaryNodeId).iterator().next().state(), equalTo(RELOCATING)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java index bfa27a36d8b..29644f07944 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java @@ -149,7 +149,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase { } else { toNodeId = "node1"; } - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertEquals(clusterState.getRoutingNodes().node(existingNodeId).iterator().next().state(), ShardRoutingState.RELOCATING); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 58e2397b043..e859c5811c3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -109,8 +109,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) - ); + new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")), + false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(origPrimaryNodeId).iterator().next().state(), equalTo(RELOCATING)); @@ -125,8 +125,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) - ); + new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")), + false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(origPrimaryNodeId).iterator().next().state(), equalTo(RELOCATING)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java new file mode 100644 index 00000000000..f76851cfef9 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java @@ -0,0 +1,210 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESAllocationTestCase; +import org.elasticsearch.test.gateway.NoopGatewayAllocator; + +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; + +public class MaxRetryAllocationDeciderTests extends ESAllocationTestCase { + + private AllocationService strategy; + + @Override + public void setUp() throws Exception { + super.setUp(); + strategy = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, + Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); + } + + private ClusterState createInitialClusterState() { + MetaData.Builder metaBuilder = MetaData.builder(); + metaBuilder.put(IndexMetaData.builder("idx").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)); + MetaData metaData = metaBuilder.build(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + routingTableBuilder.addAsNew(metaData.index("idx")); + + RoutingTable routingTable = routingTableBuilder.build(); + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) + .metaData(metaData).routingTable(routingTable).build(); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))) + .build(); + RoutingTable prevRoutingTable = routingTable; + routingTable = strategy.reroute(clusterState, "reroute", false).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertEquals(prevRoutingTable.index("idx").shards().size(), 1); + assertEquals(prevRoutingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + return clusterState; + } + + public void testSingleRetryOnIgnore() { + ClusterState clusterState = createInitialClusterState(); + RoutingTable routingTable = clusterState.routingTable(); + final int retries = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + // now fail it N-1 times + for (int i = 0; i < retries-1; i++) { + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom" + i, + new UnsupportedOperationException())); + RoutingAllocation.Result result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), i+1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom" + i); + } + // now we go and check that we are actually stick to unassigned on the next failure + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom", + new UnsupportedOperationException())); + RoutingAllocation.Result result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + + result = strategy.reroute(clusterState, new AllocationCommands(), false, true); // manual reroute should retry once + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + + // now we go and check that we are actually stick to unassigned on the next failure ie. no retry + failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom", + new UnsupportedOperationException())); + result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries+1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + + } + + public void testFailedAllocation() { + ClusterState clusterState = createInitialClusterState(); + RoutingTable routingTable = clusterState.routingTable(); + final int retries = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + // now fail it N-1 times + for (int i = 0; i < retries-1; i++) { + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom" + i, + new UnsupportedOperationException())); + RoutingAllocation.Result result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), i+1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom" + i); + } + // now we go and check that we are actually stick to unassigned on the next failure + { + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom", + new UnsupportedOperationException())); + RoutingAllocation.Result result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + } + + // change the settings and ensure we can do another round of allocation for that index. + clusterState = ClusterState.builder(clusterState).routingTable(routingTable) + .metaData(MetaData.builder(clusterState.metaData()) + .put(IndexMetaData.builder(clusterState.metaData().index("idx")).settings( + Settings.builder().put(clusterState.metaData().index("idx").getSettings()).put("index.allocation.max_retries", + retries+1).build() + ).build(), true).build()).build(); + RoutingAllocation.Result result = strategy.reroute(clusterState, "settings changed", false); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + // good we are initializing and we are maintaining failure information + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + + // now we start the shard + routingTable = strategy.applyStartedShards(clusterState, Collections.singletonList(routingTable.index("idx") + .shard(0).shards().get(0))).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + // all counters have been reset to 0 ie. no unassigned info + assertEquals(routingTable.index("idx").shards().size(), 1); + assertNull(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo()); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), STARTED); + + // now fail again and see if it has a new counter + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "ZOOOMG", + new UnsupportedOperationException())); + result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "ZOOOMG"); + } +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 97a3003ab2f..d0fc64b4b6b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -337,7 +337,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { AllocationService strategy = new MockAllocationService(Settings.EMPTY, allocationDeciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); - RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true); + RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true, false); // the two indices must stay as is, the replicas cannot move to oldNode2 because versions don't match state = ClusterState.builder(state).routingResult(result).build(); assertThat(result.routingTable().index(shard2.getIndex()).shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(0)); @@ -369,7 +369,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { AllocationService strategy = new MockAllocationService(Settings.EMPTY, allocationDeciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); - RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true); + RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true, false); // Make sure that primary shards are only allocated on the new node for (int i = 0; i < numberOfShards; i++) { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index 28d916e20c1..61a72bc352a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -283,7 +283,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); - RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node1").iterator().next().shardId().id(), "node1", "node2"))); + RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node1").iterator().next().shardId().id(), "node1", "node2")), false, false); assertEquals(reroute.explanations().explanations().size(), 1); assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.YES); routingTable = reroute.routingTable(); @@ -296,7 +296,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); // outgoing throttles - reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node1")), true); + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node1")), true, false); assertEquals(reroute.explanations().explanations().size(), 1); assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); @@ -311,7 +311,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); // incoming throttles - reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node2")), true); + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node2")), true, false); assertEquals(reroute.explanations().explanations().size(), 1); assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 2c4e86ad4b1..579e87150a7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -796,7 +796,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { AllocationCommand relocate1 = new MoveAllocationCommand("test", 0, "node2", "node3"); AllocationCommands cmds = new AllocationCommands(relocate1); - routingTable = strategy.reroute(clusterState, cmds).routingTable(); + routingTable = strategy.reroute(clusterState, cmds, false, false).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); logShardStates(clusterState); @@ -808,7 +808,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // node3, which will put it over the low watermark when it // completes, with shard relocations taken into account this should // throw an exception about not being able to complete - strategy.reroute(clusterState, cmds).routingTable(); + strategy.reroute(clusterState, cmds, false, false).routingTable(); fail("should not have been able to reroute the shard"); } catch (IllegalArgumentException e) { assertThat("can't allocated because there isn't enough room: " + e.getMessage(), @@ -876,7 +876,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ); ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); RoutingAllocation routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, - System.nanoTime()); + System.nanoTime(), false); Decision decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.NO)); @@ -896,7 +896,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ) ); clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); - routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime()); + routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime(), + false); decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); @@ -992,7 +993,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ); ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); RoutingAllocation routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, - System.nanoTime()); + System.nanoTime(), false); Decision decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); // Two shards should start happily @@ -1051,7 +1052,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ); clusterState = ClusterState.builder(updateClusterState).routingTable(builder.build()).build(); - routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime()); + routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime(), + false); decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index d9e9fb95445..008884cbb8d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -136,7 +136,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { ImmutableOpenMap.Builder shardSizes = ImmutableOpenMap.builder(); shardSizes.put("[test][0][p]", 10L); // 10 bytes final ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), ImmutableOpenMap.of()); - RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime()); + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime(), false); assertEquals(mostAvailableUsage.toString(), Decision.YES, decider.canAllocate(test_0, new RoutingNode("node_0", node_0), allocation)); assertEquals(mostAvailableUsage.toString(), Decision.NO, decider.canAllocate(test_0, new RoutingNode("node_1", node_1), allocation)); } @@ -204,7 +204,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { shardSizes.put("[test][2][p]", 10L); final ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), shardRoutingMap.build()); - RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime()); + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime(), false); assertEquals(Decision.YES, decider.canRemain(test_0, new RoutingNode("node_0", node_0), allocation)); assertEquals(Decision.NO, decider.canRemain(test_1, new RoutingNode("node_1", node_1), allocation)); try { diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java index 63c04b1c5e1..b08b81db11a 100644 --- a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java +++ b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.test.ESBlobStoreContainerTestCase; +import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; import java.io.IOException; import java.nio.file.Path; diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java index f6f53549ce4..7d4ac1acc07 100644 --- a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java +++ b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.test.ESBlobStoreTestCase; +import org.elasticsearch.repositories.ESBlobStoreTestCase; import java.io.IOException; import java.nio.file.Path; diff --git a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java index 08a4ba11342..61710b726d4 100644 --- a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java +++ b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java @@ -25,7 +25,6 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; -import java.util.ArrayList; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; @@ -234,19 +233,18 @@ public class TimeZoneRoundingTests extends ESTestCase { * amount of milliseconds. */ public void testSubHourNextRoundingEquallySpaced() { - String timeZone = randomFrom(new ArrayList<>(DateTimeZone.getAvailableIDs())); DateTimeUnit unit = randomFrom(new DateTimeUnit[] { DateTimeUnit.HOUR_OF_DAY, DateTimeUnit.MINUTES_OF_HOUR, DateTimeUnit.SECOND_OF_MINUTE }); - DateTimeZone tz = DateTimeZone.forID(timeZone); - TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(unit, tz); + DateTimeZone timezone = randomDateTimeZone(); + TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(unit, timezone); // move the random date to transition for timezones that have offset change due to dst transition - long nextTransition = tz.nextTransition(Math.abs(randomLong() % ((long) 10e11))); + long nextTransition = timezone.nextTransition(Math.abs(randomLong() % ((long) 10e11))); final long millisPerUnit = unit.field().getDurationField().getUnitMillis(); // start ten units before transition long roundedDate = rounding.round(nextTransition - (10 * millisPerUnit)); while (roundedDate < nextTransition + 10 * millisPerUnit) { long delta = rounding.nextRoundingValue(roundedDate) - roundedDate; - assertEquals("Difference between rounded values not equally spaced for [" + unit.name() + "], [" + timeZone + "] at " + assertEquals("Difference between rounded values not equally spaced for [" + unit.name() + "], [" + timezone + "] at " + new DateTime(roundedDate), millisPerUnit, delta); roundedDate = rounding.nextRoundingValue(roundedDate); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index aa8614aee71..3afd60d86e4 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -302,11 +302,8 @@ public class ScopedSettingsTests extends ESTestCase { public void testLoggingUpdates() { final String level = ESLoggerFactory.getRootLogger().getLevel(); final String testLevel = ESLoggerFactory.getLogger("test").getLevel(); - String property = System.getProperty("es.logger.level"); - Settings.Builder builder = Settings.builder(); - if (property != null) { - builder.put("logger.level", property); - } + String property = randomFrom(ESLoggerFactory.LogLevel.values()).toString(); + Settings.Builder builder = Settings.builder().put("logger.level", property); try { ClusterSettings settings = new ClusterSettings(builder.build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); try { @@ -319,7 +316,7 @@ public class ScopedSettingsTests extends ESTestCase { settings.applySettings(Settings.builder().put("logger._root", "TRACE").build()); assertEquals("TRACE", ESLoggerFactory.getRootLogger().getLevel()); settings.applySettings(Settings.builder().build()); - assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(property, ESLoggerFactory.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger.test", "TRACE").build()); assertEquals("TRACE", ESLoggerFactory.getLogger("test").getLevel()); settings.applySettings(Settings.builder().build()); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index 977d5fa7b09..5e992fc947c 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -208,4 +208,13 @@ public class SettingsModuleTests extends ModuleTestCase { assertThat(e.getMessage(), containsString("Cannot register setting [foo.bar] twice")); } } + + public void testOldMaxClauseCountSetting() { + Settings settings = Settings.builder().put("index.query.bool.max_clause_count", 1024).build(); + SettingsModule module = new SettingsModule(settings); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> assertInstanceBinding(module, Settings.class, (s) -> s == settings)); + assertEquals("unknown setting [index.query.bool.max_clause_count] did you mean [indices.query.bool.max_clause_count]?", + ex.getMessage()); + } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 3539e54d943..346c5bc60de 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -31,7 +31,9 @@ import java.util.Set; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -42,31 +44,30 @@ public class SettingsTests extends ESTestCase { String value = System.getProperty("java.home"); assertFalse(value.isEmpty()); Settings settings = Settings.builder() - .put("setting1", "${java.home}") + .put("property.placeholder", value) + .put("setting1", "${property.placeholder}") .replacePropertyPlaceholders() .build(); assertThat(settings.get("setting1"), equalTo(value)); - - assertNull(System.getProperty("_test_property_should_not_exist")); - settings = Settings.builder() - .put("setting1", "${_test_property_should_not_exist:defaultVal1}") - .replacePropertyPlaceholders() - .build(); - assertThat(settings.get("setting1"), equalTo("defaultVal1")); - - settings = Settings.builder() - .put("setting1", "${_test_property_should_not_exist:}") - .replacePropertyPlaceholders() - .build(); - assertThat(settings.get("setting1"), is(nullValue())); } - public void testReplacePropertiesPlaceholderIgnoreEnvUnset() { - Settings settings = Settings.builder() - .put("setting1", "${env.UNSET_ENV_VAR}") + public void testReplacePropertiesPlaceholderSystemVariablesHaveNoEffect() { + final String value = System.getProperty("java.home"); + assertNotNull(value); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> Settings.builder() + .put("setting1", "${java.home}") .replacePropertyPlaceholders() - .build(); - assertThat(settings.get("setting1"), is(nullValue())); + .build()); + assertThat(e, hasToString(containsString("Could not resolve placeholder 'java.home'"))); + } + + public void testReplacePropertiesPlaceholderByEnvironmentVariables() { + final String hostname = randomAsciiOfLength(16); + final Settings implicitEnvSettings = Settings.builder() + .put("setting1", "${HOSTNAME}") + .replacePropertyPlaceholders(name -> "HOSTNAME".equals(name) ? hostname : null) + .build(); + assertThat(implicitEnvSettings.get("setting1"), equalTo(hostname)); } public void testReplacePropertiesPlaceholderIgnoresPrompt() { diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index 01e717e6fa9..ee1cf7280e7 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -346,7 +346,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state, null, System.nanoTime()); + return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state, null, System.nanoTime(), false); } /** @@ -425,7 +425,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state, null, System.nanoTime()); + return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state, null, System.nanoTime(), false); } /** @@ -444,7 +444,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -452,7 +452,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas testAllocator.addData(node1, 1, null, randomBoolean()); - allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -460,7 +460,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas testAllocator.addData(node2, 1, null, randomBoolean()); - allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(0)); @@ -485,7 +485,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -493,7 +493,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas testAllocator.addData(node1, 1, null, randomBoolean()); - allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -501,7 +501,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas testAllocator.addData(node2, 2, null, randomBoolean()); - allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(0)); @@ -525,7 +525,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTableBuilder.build()) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, null, System.nanoTime()); + return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, null, System.nanoTime(), false); } class TestAllocator extends PrimaryShardAllocator { diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 672c9de3d3e..20eb6286813 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -302,7 +302,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, ClusterInfo.EMPTY, System.nanoTime()); + return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, ClusterInfo.EMPTY, System.nanoTime(), false); } private RoutingAllocation onePrimaryOnNode1And1ReplicaRecovering(AllocationDeciders deciders) { @@ -324,7 +324,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, ClusterInfo.EMPTY, System.nanoTime()); + return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, ClusterInfo.EMPTY, System.nanoTime(), false); } class TestAllocator extends ReplicaShardAllocator { diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java index 632df308e99..eeda96743df 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.HttpResponseStatus; @@ -53,6 +54,7 @@ public class NettyHttpRequestSizeLimitIT extends ESIntegTestCase { .build(); } + @TestLogging("_root:DEBUG,org.elasticsearch.common.breaker:TRACE,org.elasticsearch.test:TRACE,org.elasticsearch.transport:TRACE") public void testLimitsInFlightRequests() throws Exception { ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java index b667c256019..b72996bd1a1 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java @@ -41,6 +41,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class AnalysisServiceTests extends ESTestCase { @@ -183,4 +184,19 @@ public class AnalysisServiceTests extends ESTestCase { assertSame(analysisService.analyzer(preBuiltAnalyzers.name()), otherAnalysisSergice.analyzer(preBuiltAnalyzers.name())); } } + + public void testNoTypeOrTokenizerErrorMessage() throws IOException { + Version version = VersionUtils.randomVersion(random()); + Settings settings = Settings + .builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, version) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .putArray("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"}) + .putArray("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"}) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new AnalysisRegistry(null, new Environment(settings)).build(idxSettings)); + assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer")); + } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java index 8c1d530e448..ac412207021 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java @@ -26,13 +26,13 @@ import org.elasticsearch.test.ESTokenStreamTestCase; public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { public void testFingerprint() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255, false); + Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255); assertAnalyzesTo(a, "foo bar@baz Baz $ foo foo FOO. FoO", new String[]{"bar baz foo"}); } public void testReusableTokenStream() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255, false); + Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255); assertAnalyzesTo(a, "foo bar baz Baz foo foo FOO. FoO", new String[]{"bar baz foo"}); assertAnalyzesTo(a, "xyz XYZ abc 123.2 abc", @@ -40,7 +40,7 @@ public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { } public void testAsciifolding() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255, false); + Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255); assertAnalyzesTo(a, "gödel escher bach", new String[]{"bach escher godel"}); @@ -48,14 +48,8 @@ public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { new String[]{"bach escher godel"}); } - public void testPreserveOriginal() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255, true); - assertAnalyzesTo(a, "gödel escher bach", - new String[]{"bach escher godel gödel"}); - } - public void testLimit() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 3, false); + Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 3); assertAnalyzesTo(a, "e d c b a", new String[]{}); @@ -63,10 +57,4 @@ public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { new String[]{"a b"}); } - public void testSeparator() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, '_', 255, true); - assertAnalyzesTo(a, "b c a", - new String[]{"a_b_c"}); - } - } diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java index 604670f179e..8faad63c0a4 100644 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java @@ -38,7 +38,6 @@ import org.junit.Before; import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.percolator.PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; @@ -251,7 +250,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { } private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws IOException { - XContentParser sourceParser = QUERY_BUILDER_CONTENT_TYPE.xContent().createParser(actual.bytes, actual.offset, actual.length); + XContentParser sourceParser = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent() + .createParser(actual.bytes, actual.offset, actual.length); QueryParseContext qsc = indexService.newQueryShardContext().newParseContext(sourceParser); assertThat(qsc.parseInnerQueryBuilder(), equalTo(expected)); } diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java deleted file mode 100644 index 30986e45e4b..00000000000 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java +++ /dev/null @@ -1,390 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.percolator; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockAnalyzer; -import org.apache.lucene.analysis.core.SimpleAnalyzer; -import org.apache.lucene.document.BinaryDocValuesField; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.StoredField; -import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.TieredMergePolicy; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.store.Directory; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexWarmer; -import org.elasticsearch.index.analysis.AnalysisRegistry; -import org.elasticsearch.index.analysis.AnalysisService; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.DocumentFieldMappers; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.internal.SourceFieldMapper; -import org.elasticsearch.index.mapper.internal.TypeFieldMapper; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.PercolateQuery; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.WildcardQueryBuilder; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.index.warmer.ShardIndexWarmerService; -import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.threadpool.ThreadPool; - -import java.io.IOException; -import java.util.Collections; - -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class PercolatorQueryCacheTests extends ESTestCase { - - private QueryShardContext queryShardContext; - private PercolatorQueryCache cache; - - void initialize(Object... fields) throws IOException { - Settings settings = Settings.builder() - .put("node.name", PercolatorQueryCacheTests.class.toString()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .build(); - - IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(); - QueryParser termParser = TermQueryBuilder::fromXContent; - indicesQueriesRegistry.register(termParser, TermQueryBuilder.QUERY_NAME_FIELD); - QueryParser wildcardParser = WildcardQueryBuilder::fromXContent; - indicesQueriesRegistry.register(wildcardParser, WildcardQueryBuilder.QUERY_NAME_FIELD); - QueryParser boolQueryParser = BoolQueryBuilder::fromXContent; - indicesQueriesRegistry.register(boolQueryParser, BoolQueryBuilder.QUERY_NAME_FIELD); - - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("_index", ClusterState.UNKNOWN_UUID), indexSettings); - SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); - MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry(); - MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, - () -> queryShardContext); - mapperService.merge("type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("type", fields).string()), - MapperService.MergeReason.MAPPING_UPDATE, false); - cache = new PercolatorQueryCache(idxSettings, () -> queryShardContext); - ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); - queryShardContext = new QueryShardContext(idxSettings, null, null, mapperService, similarityService, null, - indicesQueriesRegistry, null, cache, null, state); - } - - public void testLoadQueries() throws Exception { - Directory directory = newDirectory(); - IndexWriter indexWriter = new IndexWriter( - directory, - new IndexWriterConfig(new MockAnalyzer(random())) - .setMergePolicy(NoMergePolicy.INSTANCE) - ); - - boolean legacyFormat = randomBoolean(); - Version version = legacyFormat ? Version.V_2_0_0 : Version.CURRENT; - IndexShard indexShard = mockIndexShard(version, legacyFormat); - - storeQuery("0", indexWriter, termQuery("field1", "value1"), true, legacyFormat); - storeQuery("1", indexWriter, wildcardQuery("field1", "v*"), true, legacyFormat); - storeQuery("2", indexWriter, boolQuery().must(termQuery("field1", "value1")).must(termQuery("field2", "value2")), - true, legacyFormat); - // dymmy docs should be skipped during loading: - Document doc = new Document(); - doc.add(new StringField("dummy", "value", Field.Store.YES)); - indexWriter.addDocument(doc); - storeQuery("4", indexWriter, termQuery("field2", "value2"), true, legacyFormat); - // only documents that .percolator type should be loaded: - storeQuery("5", indexWriter, termQuery("field2", "value2"), false, legacyFormat); - storeQuery("6", indexWriter, termQuery("field3", "value3"), true, legacyFormat); - indexWriter.forceMerge(1); - - // also include queries for percolator docs marked as deleted: - indexWriter.deleteDocuments(new Term("id", "6")); - indexWriter.close(); - - ShardId shardId = new ShardId("_index", ClusterState.UNKNOWN_UUID, 0); - IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); - assertThat(indexReader.leaves().size(), equalTo(1)); - assertThat(indexReader.numDeletedDocs(), equalTo(1)); - assertThat(indexReader.maxDoc(), equalTo(7)); - - initialize("field1", "type=keyword", "field2", "type=keyword", "field3", "type=keyword"); - - PercolatorQueryCache.QueriesLeaf leaf = cache.loadQueries(indexReader.leaves().get(0), indexShard); - assertThat(leaf.queries.size(), equalTo(5)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("field1", "value1")))); - assertThat(leaf.getQuery(1), equalTo(new WildcardQuery(new Term("field1", "v*")))); - assertThat(leaf.getQuery(2), equalTo(new BooleanQuery.Builder() - .add(new TermQuery(new Term("field1", "value1")), BooleanClause.Occur.MUST) - .add(new TermQuery(new Term("field2", "value2")), BooleanClause.Occur.MUST) - .build() - )); - assertThat(leaf.getQuery(4), equalTo(new TermQuery(new Term("field2", "value2")))); - assertThat(leaf.getQuery(6), equalTo(new TermQuery(new Term("field3", "value3")))); - - indexReader.close(); - directory.close(); - } - - public void testGetQueries() throws Exception { - Directory directory = newDirectory(); - IndexWriter indexWriter = new IndexWriter( - directory, - new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE) - ); - - storeQuery("0", indexWriter, termQuery("a", "0"), true, false); - storeQuery("1", indexWriter, termQuery("a", "1"), true, false); - storeQuery("2", indexWriter, termQuery("a", "2"), true, false); - indexWriter.flush(); - storeQuery("3", indexWriter, termQuery("a", "3"), true, false); - storeQuery("4", indexWriter, termQuery("a", "4"), true, false); - storeQuery("5", indexWriter, termQuery("a", "5"), true, false); - indexWriter.flush(); - storeQuery("6", indexWriter, termQuery("a", "6"), true, false); - storeQuery("7", indexWriter, termQuery("a", "7"), true, false); - storeQuery("8", indexWriter, termQuery("a", "8"), true, false); - indexWriter.flush(); - indexWriter.close(); - - ShardId shardId = new ShardId("_index", ClusterState.UNKNOWN_UUID , 0); - IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); - assertThat(indexReader.leaves().size(), equalTo(3)); - assertThat(indexReader.maxDoc(), equalTo(9)); - - initialize("a", "type=keyword"); - - try { - cache.getQueries(indexReader.leaves().get(0)); - fail("IllegalStateException expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("queries not loaded, queries should be have been preloaded during index warming...")); - } - - IndexShard indexShard = mockIndexShard(Version.CURRENT, false); - ThreadPool threadPool = mockThreadPool(); - IndexWarmer.Listener listener = cache.createListener(threadPool); - listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); - PercolatorQueryCacheStats stats = cache.getStats(shardId); - assertThat(stats.getNumQueries(), equalTo(9L)); - - PercolateQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); - assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "1")))); - assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "2")))); - - leaf = cache.getQueries(indexReader.leaves().get(1)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "3")))); - assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "4")))); - assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "5")))); - - leaf = cache.getQueries(indexReader.leaves().get(2)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "6")))); - assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "7")))); - assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "8")))); - - indexReader.close(); - directory.close(); - } - - public void testInvalidateEntries() throws Exception { - Directory directory = newDirectory(); - IndexWriter indexWriter = new IndexWriter( - directory, - new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE) - ); - - storeQuery("0", indexWriter, termQuery("a", "0"), true, false); - indexWriter.flush(); - storeQuery("1", indexWriter, termQuery("a", "1"), true, false); - indexWriter.flush(); - storeQuery("2", indexWriter, termQuery("a", "2"), true, false); - indexWriter.flush(); - - ShardId shardId = new ShardId("_index", ClusterState.UNKNOWN_UUID, 0); - IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); - assertThat(indexReader.leaves().size(), equalTo(3)); - assertThat(indexReader.maxDoc(), equalTo(3)); - - initialize("a", "type=keyword"); - - IndexShard indexShard = mockIndexShard(Version.CURRENT, false); - ThreadPool threadPool = mockThreadPool(); - IndexWarmer.Listener listener = cache.createListener(threadPool); - listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); - assertThat(cache.getStats(shardId).getNumQueries(), equalTo(3L)); - - PercolateQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); - - leaf = cache.getQueries(indexReader.leaves().get(1)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "1")))); - - leaf = cache.getQueries(indexReader.leaves().get(2)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "2")))); - - // change merge policy, so that merges will actually happen: - indexWriter.getConfig().setMergePolicy(new TieredMergePolicy()); - indexWriter.deleteDocuments(new Term("id", "1")); - indexWriter.forceMergeDeletes(); - indexReader.close(); - indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); - assertThat(indexReader.leaves().size(), equalTo(2)); - assertThat(indexReader.maxDoc(), equalTo(2)); - listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); - assertThat(cache.getStats(shardId).getNumQueries(), equalTo(2L)); - - leaf = cache.getQueries(indexReader.leaves().get(0)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); - - leaf = cache.getQueries(indexReader.leaves().get(1)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "2")))); - - indexWriter.forceMerge(1); - indexReader.close(); - indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); - assertThat(indexReader.leaves().size(), equalTo(1)); - assertThat(indexReader.maxDoc(), equalTo(2)); - listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); - assertThat(cache.getStats(shardId).getNumQueries(), equalTo(2L)); - - leaf = cache.getQueries(indexReader.leaves().get(0)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); - assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "2")))); - - indexWriter.close(); - indexReader.close(); - directory.close(); - } - - void storeQuery(String id, IndexWriter indexWriter, QueryBuilder queryBuilder, boolean typeField, boolean legacy) throws IOException { - Document doc = new Document(); - doc.add(new StringField("id", id, Field.Store.NO)); - if (typeField) { - if (legacy) { - doc.add(new StringField(TypeFieldMapper.NAME, PercolatorFieldMapper.LEGACY_TYPE_NAME, Field.Store.NO)); - } else { - doc.add(new StringField(TypeFieldMapper.NAME, "query", Field.Store.NO)); - } - } - if (legacy) { - BytesReference percolatorQuery = XContentFactory.jsonBuilder().startObject() - .field("query", queryBuilder) - .endObject().bytes(); - doc.add(new StoredField( - SourceFieldMapper.NAME, - percolatorQuery.array(), percolatorQuery.arrayOffset(), percolatorQuery.length()) - ); - } else { - BytesRef queryBuilderAsBytes = new BytesRef( - XContentFactory.contentBuilder(PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE).value(queryBuilder).bytes().toBytes() - ); - doc.add(new BinaryDocValuesField(PercolatorFieldMapper.QUERY_BUILDER_FIELD_NAME, queryBuilderAsBytes)); - } - indexWriter.addDocument(doc); - } - - IndexShard mockIndexShard(Version version, boolean legacyFormat) { - IndexShard indexShard = mock(IndexShard.class); - ShardIndexWarmerService shardIndexWarmerService = mock(ShardIndexWarmerService.class); - when(shardIndexWarmerService.logger()).thenReturn(logger); - when(indexShard.warmerService()).thenReturn(shardIndexWarmerService); - IndexSettings indexSettings = new IndexSettings( - IndexMetaData.builder("_index").settings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_VERSION_CREATED, version) - ).build(), - Settings.EMPTY - ); - when(indexShard.indexSettings()).thenReturn(indexSettings); - - PercolatorFieldMapper.PercolatorFieldType fieldType = mock(PercolatorFieldMapper.PercolatorFieldType.class); - when(fieldType.name()).thenReturn("query"); - when(fieldType.getQueryBuilderFieldName()).thenReturn(PercolatorFieldMapper.QUERY_BUILDER_FIELD_NAME); - PercolatorFieldMapper percolatorFieldMapper = mock(PercolatorFieldMapper.class); - when(percolatorFieldMapper.fieldType()).thenReturn(fieldType); - MapperService mapperService = mock(MapperService.class); - DocumentMapper documentMapper = mock(DocumentMapper.class); - if (legacyFormat) { - when(documentMapper.type()).thenReturn(PercolatorFieldMapper.LEGACY_TYPE_NAME); - when(documentMapper.typeFilter()) - .thenReturn(new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.LEGACY_TYPE_NAME))); - } else { - when(documentMapper.type()).thenReturn("query"); - when(documentMapper.typeFilter()).thenReturn(new TermQuery(new Term(TypeFieldMapper.NAME, "query"))); - } - - Analyzer analyzer = new SimpleAnalyzer(); - DocumentFieldMappers documentFieldMappers = - new DocumentFieldMappers(Collections.singleton(percolatorFieldMapper), analyzer, analyzer, analyzer); - when(documentMapper.mappers()).thenReturn(documentFieldMappers); - - when(mapperService.docMappers(false)).thenReturn(Collections.singleton(documentMapper)); - - when(indexShard.mapperService()).thenReturn(mapperService); - - return indexShard; - } - - ThreadPool mockThreadPool() { - ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.executor(anyString())).thenReturn(Runnable::run); - return threadPool; - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index 9c47d701f57..b49488bd7c2 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; -import org.elasticsearch.script.ScriptMode; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; @@ -76,7 +75,6 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; @@ -190,7 +188,6 @@ public abstract class AbstractQueryTestCase> private static IndexSettings idxSettings; private static SimilarityService similarityService; private static MapperService mapperService; - private static PercolatorQueryCache percolatorQueryCache; private static BitsetFilterCache bitsetFilterCache; private static ScriptService scriptService; @@ -241,7 +238,7 @@ public abstract class AbstractQueryTestCase> ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections .singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, - MockScriptEngine.NAME, ScriptMode.ON))); + MockScriptEngine.NAME, true))); bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts); bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry); @@ -308,7 +305,6 @@ public abstract class AbstractQueryTestCase> } }); - percolatorQueryCache = new PercolatorQueryCache(idxSettings, () -> createShardContext()); indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); //create some random type with some default field, those types will stick around for all of the subclasses currentTypes = new String[randomIntBetween(0, 5)]; @@ -349,7 +345,6 @@ public abstract class AbstractQueryTestCase> idxSettings = null; similarityService = null; mapperService = null; - percolatorQueryCache = null; bitsetFilterCache = null; scriptService = null; } @@ -750,7 +745,7 @@ public abstract class AbstractQueryTestCase> ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); Client client = injector.getInstance(Client.class); return new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, - scriptService, indicesQueriesRegistry, client, percolatorQueryCache, null, state); + scriptService, indicesQueriesRegistry, client, null, state); } /** @@ -903,12 +898,6 @@ public abstract class AbstractQueryTestCase> return randomFrom("1", "-1", "75%", "-25%", "2<75%", "2<-25%"); } - protected static String randomTimeZone() { - return randomFrom(TIMEZONE_IDS); - } - - private static final List TIMEZONE_IDS = new ArrayList<>(DateTimeZone.getAvailableIDs()); - private static class ClientInvocationHandler implements InvocationHandler { AbstractQueryTestCase delegate; diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 6cec3ae8c52..c5e01c1d0ff 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PointRangeQuery; @@ -89,7 +88,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase new MatchQueryBuilder(null, "value")); + assertEquals("[match] requires fieldName", e.getMessage()); } - try { - new MatchQueryBuilder("fieldName", null); - fail("value must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchQueryBuilder("fieldName", null)); + assertEquals("[match] requires query value", e.getMessage()); } MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); - try { - matchQuery.prefixLength(-1); - fail("must not be positive"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.prefixLength(-1)); + assertEquals("[match] requires prefix length to be non-negative.", e.getMessage()); } - try { - matchQuery.maxExpansions(-1); - fail("must not be positive"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> matchQuery.maxExpansions(randomIntBetween(-10, 0))); + assertEquals("[match] requires maxExpansions to be positive.", e.getMessage()); } - try { - matchQuery.operator(null); - fail("must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.operator(null)); + assertEquals("[match] requires operator to be non-null", e.getMessage()); } - try { - matchQuery.type(null); - fail("must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.type(null)); + assertEquals("[match] requires type to be non-null", e.getMessage()); } - try { - matchQuery.zeroTermsQuery(null); - fail("must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.zeroTermsQuery(null)); + assertEquals("[match] requires zeroTermsQuery to be non-null", e.getMessage()); } - } - public void testBadAnalyzer() throws IOException { - MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); matchQuery.analyzer("bogusAnalyzer"); - try { - matchQuery.toQuery(createShardContext()); - fail("Expected QueryShardException"); - } catch (QueryShardException e) { + { + QueryShardException e = expectThrows(QueryShardException.class, () -> matchQuery.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found")); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java index 690b2c03a2f..92a9369aed5 100644 --- a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.query; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ResourceNotFoundException; @@ -42,6 +41,7 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.Collections; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/PercolateQueryTests.java index acb4917b421..8dde535e3cc 100644 --- a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/PercolateQueryTests.java @@ -84,14 +84,14 @@ public class PercolateQueryTests extends ESTestCase { private Directory directory; private IndexWriter indexWriter; private Map queries; - private PercolateQuery.QueryRegistry queryRegistry; + private PercolateQuery.QueryStore queryStore; private DirectoryReader directoryReader; @Before public void init() throws Exception { directory = newDirectory(); queries = new HashMap<>(); - queryRegistry = ctx -> docId -> { + queryStore = ctx -> docId -> { try { String val = ctx.reader().document(docId).get(UidFieldMapper.NAME); return queries.get(Uid.createUid(val).id()); @@ -145,7 +145,7 @@ public class PercolateQueryTests extends ESTestCase { PercolateQuery.Builder builder = new PercolateQuery.Builder( "docType", - queryRegistry, + queryStore, new BytesArray("{}"), percolateSearcher ); @@ -219,7 +219,7 @@ public class PercolateQueryTests extends ESTestCase { PercolateQuery.Builder builder = new PercolateQuery.Builder( "docType", - queryRegistry, + queryStore, new BytesArray("{}"), percolateSearcher ); @@ -336,7 +336,7 @@ public class PercolateQueryTests extends ESTestCase { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery.Builder builder1 = new PercolateQuery.Builder( "docType", - queryRegistry, + queryStore, new BytesArray("{}"), percolateSearcher ); @@ -346,7 +346,7 @@ public class PercolateQueryTests extends ESTestCase { PercolateQuery.Builder builder2 = new PercolateQuery.Builder( "docType", - queryRegistry, + queryStore, new BytesArray("{}"), percolateSearcher ); diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java index f82561a4179..d0f2236ce06 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java @@ -314,7 +314,7 @@ public class QueryDSLDocumentationTests extends ESTestCase { public void testSpanContaining() { spanContainingQuery( spanNearQuery(spanTermQuery("field1","bar"), 5) - .clause(spanTermQuery("field1","baz")) + .addClause(spanTermQuery("field1","baz")) .inOrder(true), spanTermQuery("field1","foo")); } @@ -332,8 +332,8 @@ public class QueryDSLDocumentationTests extends ESTestCase { public void testSpanNear() { spanNearQuery(spanTermQuery("field","value1"), 12) - .clause(spanTermQuery("field","value2")) - .clause(spanTermQuery("field","value3")) + .addClause(spanTermQuery("field","value2")) + .addClause(spanTermQuery("field","value3")) .inOrder(false); } @@ -344,8 +344,8 @@ public class QueryDSLDocumentationTests extends ESTestCase { public void testSpanOr() { spanOrQuery(spanTermQuery("field","value1")) - .clause(spanTermQuery("field","value2")) - .clause(spanTermQuery("field","value3")); + .addClause(spanTermQuery("field","value2")) + .addClause(spanTermQuery("field","value3")); } public void testSpanTerm() { @@ -355,7 +355,7 @@ public class QueryDSLDocumentationTests extends ESTestCase { public void testSpanWithin() { spanWithinQuery( spanNearQuery(spanTermQuery("field1", "bar"), 5) - .clause(spanTermQuery("field1", "baz")) + .addClause(spanTermQuery("field1", "baz")) .inOrder(true), spanTermQuery("field1", "foo")); } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java index b6d69374661..9a4c9aece5c 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java @@ -47,7 +47,7 @@ public class QueryShardContextTests extends ESTestCase { MapperService mapperService = mock(MapperService.class); when(mapperService.getIndexSettings()).thenReturn(indexSettings); QueryShardContext context = new QueryShardContext( - indexSettings, null, null, mapperService, null, null, null, null, null, null, null + indexSettings, null, null, mapperService, null, null, null, null, null, null ); context.setAllowUnmappedFields(false); diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 375db0c1eaf..cf16456b2bf 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import org.apache.lucene.document.IntPoint; import org.apache.lucene.index.Term; import org.apache.lucene.queryparser.classic.MapperQueryParser; import org.apache.lucene.queryparser.classic.QueryParserSettings; @@ -27,7 +26,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; @@ -148,7 +146,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase new SpanNearQueryBuilder(null, 1)); + assertEquals("[span_near] must include at least one clause", e.getMessage()); - try { SpanNearQueryBuilder spanNearQueryBuilder = new SpanNearQueryBuilder(new SpanTermQueryBuilder("field", "value"), 1); - spanNearQueryBuilder.clause(null); - fail("cannot be null"); - } catch (IllegalArgumentException e) { - // ecpected - } + e = expectThrows(IllegalArgumentException.class, () -> spanNearQueryBuilder.addClause(null)); + assertEquals("[span_near] clauses cannot be null", e.getMessage()); + } + + public void testClausesUnmodifiable() { + SpanNearQueryBuilder spanNearQueryBuilder = new SpanNearQueryBuilder(new SpanTermQueryBuilder("field", "value"), 1); + expectThrows(UnsupportedOperationException.class, + () -> spanNearQueryBuilder.clauses().add(new SpanTermQueryBuilder("field", "value2"))); } public void testFromJson() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java index 527b7e5e83a..e536dec0598 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java @@ -110,7 +110,7 @@ public class SpanNotQueryBuilderTests extends AbstractQueryTestCase new SpanOrQueryBuilder((SpanQueryBuilder) null)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SpanOrQueryBuilder((SpanQueryBuilder) null)); + assertEquals("[span_or] must include at least one clause", e.getMessage()); - try { - SpanOrQueryBuilder spanOrBuilder = new SpanOrQueryBuilder(new SpanTermQueryBuilder("field", "value")); - spanOrBuilder.clause(null); - fail("cannot be null"); - } catch (IllegalArgumentException e) { - // expected - } + SpanOrQueryBuilder spanOrBuilder = new SpanOrQueryBuilder(new SpanTermQueryBuilder("field", "value")); + e = expectThrows(IllegalArgumentException.class, () -> spanOrBuilder.addClause(null)); + assertEquals("[span_or] inner clause cannot be null", e.getMessage()); + } + + public void testClausesUnmodifiable() { + SpanNearQueryBuilder spanNearQueryBuilder = new SpanNearQueryBuilder(new SpanTermQueryBuilder("field", "value"), 1); + expectThrows(UnsupportedOperationException.class, + () -> spanNearQueryBuilder.clauses().add(new SpanTermQueryBuilder("field", "value2"))); } public void testFromJson() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 52c16dd74d9..f96b546855b 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -644,7 +644,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); - ShardStats stats = new ShardStats(shard.routingEntry(), shard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), test.cache().getPercolatorQueryCache(), shard, new CommonStatsFlags()), shard.commitStats()); + ShardStats stats = new ShardStats(shard.routingEntry(), shard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), shard, new CommonStatsFlags()), shard.commitStats()); assertEquals(shard.shardPath().getRootDataPath().toString(), stats.getDataPath()); assertEquals(shard.shardPath().getRootStatePath().toString(), stats.getStatePath()); assertEquals(shard.shardPath().isCustomDataPath(), stats.isCustomDataPath()); diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 9b6e4670794..b4d2423921c 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -42,7 +42,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.index.Index; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; @@ -138,8 +137,8 @@ public class TranslogTests extends ESTestCase { private TranslogConfig getTranslogConfig(Path path) { Settings build = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .build(); + .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) + .build(); ByteSizeValue bufferSize = randomBoolean() ? TranslogConfig.DEFAULT_BUFFER_SIZE : new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES); return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.getIndex(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize); } @@ -330,9 +329,9 @@ public class TranslogTests extends ESTestCase { assertEquals(6, copy.estimatedNumberOfOperations()); assertEquals(431, copy.getTranslogSizeInBytes()); assertEquals("\"translog\"{\n" + - " \"operations\" : 6,\n" + - " \"size_in_bytes\" : 431\n" + - "}", copy.toString().trim()); + " \"operations\" : 6,\n" + + " \"size_in_bytes\" : 431\n" + + "}", copy.toString().trim()); try { new TranslogStats(1, -1); @@ -604,7 +603,8 @@ public class TranslogTests extends ESTestCase { final List errors = new CopyOnWriteArrayList<>(); logger.debug("using [{}] readers. [{}] writers. flushing every ~[{}] ops.", readers.length, writers.length, flushEveryOps); for (int i = 0; i < writers.length; i++) { - final String threadId = "writer_" + i; + final String threadName = "writer_" + i; + final int threadId = i; writers[i] = new Thread(new AbstractRunnable() { @Override public void doRun() throws BrokenBarrierException, InterruptedException, IOException { @@ -629,18 +629,21 @@ public class TranslogTests extends ESTestCase { if (existing != null) { fail("duplicate op [" + op + "], old entry at " + location); } + if (id % writers.length == threadId) { + translog.ensureSynced(location); + } writtenOpsLatch.get().countDown(); counter++; } - logger.debug("--> [{}] done. wrote [{}] ops.", threadId, counter); + logger.debug("--> [{}] done. wrote [{}] ops.", threadName, counter); } @Override public void onFailure(Throwable t) { - logger.error("--> writer [{}] had an error", t, threadId); + logger.error("--> writer [{}] had an error", t, threadName); errors.add(t); } - }, threadId); + }, threadName); writers[i].start(); } @@ -1262,12 +1265,12 @@ public class TranslogTests extends ESTestCase { case CREATE: case INDEX: op = new Translog.Index("test", threadId + "_" + opCount, - randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); + randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); break; case DELETE: op = new Translog.Delete(new Term("_uid", threadId + "_" + opCount), - 1 + randomInt(100000), - randomFrom(VersionType.values())); + 1 + randomInt(100000), + randomFrom(VersionType.values())); break; default: throw new ElasticsearchException("not supported op type"); diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index dc533737886..81c50cc4f9c 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -103,7 +103,7 @@ public class RareClusterStateIT extends ESIntegTestCase { .nodes(DiscoveryNodes.EMPTY_NODES) .build(), false ); - RoutingAllocation routingAllocation = new RoutingAllocation(allocationDeciders, routingNodes, current, ClusterInfo.EMPTY, System.nanoTime()); + RoutingAllocation routingAllocation = new RoutingAllocation(allocationDeciders, routingNodes, current, ClusterInfo.EMPTY, System.nanoTime(), false); allocator.allocateUnassigned(routingAllocation); } diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 4716e7dba78..92c3260aeb0 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -670,7 +670,7 @@ public class IndexStatsIT extends ESIntegTestCase { public void testFlagOrdinalOrder() { Flag[] flags = new Flag[]{Flag.Store, Flag.Indexing, Flag.Get, Flag.Search, Flag.Merge, Flag.Flush, Flag.Refresh, - Flag.QueryCache, Flag.FieldData, Flag.Docs, Flag.Warmer, Flag.PercolatorCache, Flag.Completion, Flag.Segments, + Flag.QueryCache, Flag.FieldData, Flag.Docs, Flag.Warmer, Flag.Completion, Flag.Segments, Flag.Translog, Flag.Suggest, Flag.RequestCache, Flag.Recovery}; assertThat(flags.length, equalTo(Flag.values().length)); @@ -913,9 +913,6 @@ public class IndexStatsIT extends ESIntegTestCase { case Warmer: builder.setWarmer(set); break; - case PercolatorCache: - builder.setPercolate(set); - break; case Completion: builder.setCompletion(set); break; @@ -963,8 +960,6 @@ public class IndexStatsIT extends ESIntegTestCase { return response.getStore() != null; case Warmer: return response.getWarmer() != null; - case PercolatorCache: - return response.getPercolatorCache() != null; case Completion: return response.getCompletion() != null; case Segments: diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java index 1eecb1397d3..68b5d175509 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java @@ -120,7 +120,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("field", sourceField); config.put("formats", Collections.singletonList("dd/MM/yyyyy")); - DateTimeZone timezone = randomTimezone(); + DateTimeZone timezone = randomDateTimeZone(); config.put("timezone", timezone.getID()); DateProcessor processor = factory.create(config); assertThat(processor.getTimezone(), equalTo(timezone)); @@ -141,14 +141,6 @@ public class DateProcessorFactoryTests extends ESTestCase { } } - //we generate a timezone out of the available ones in joda, some available in the jdk are not available in joda by default - private static DateTimeZone randomTimezone() { - List ids = new ArrayList<>(DateTimeZone.getAvailableIDs()); - Collections.sort(ids); - return DateTimeZone.forID(randomFrom(ids)); - } - - public void testParseMatchFormats() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index e1397ca47f1..87abc20a0de 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -19,11 +19,6 @@ package org.elasticsearch.node.internal; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; - import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.settings.Settings; @@ -33,6 +28,11 @@ import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -134,7 +134,6 @@ public class InternalSettingsPreparerTests extends ESTestCase { Files.createDirectory(config); Files.copy(garbage, config.resolve("elasticsearch.yml")); InternalSettingsPreparer.prepareEnvironment(Settings.builder() - .put("config.ignore_system_properties", true) .put(baseEnvSettings) .build(), null); } catch (SettingsException e) { @@ -153,7 +152,6 @@ public class InternalSettingsPreparerTests extends ESTestCase { try { InternalSettingsPreparer.prepareEnvironment(Settings.builder() - .put("config.ignore_system_properties", true) .put(baseEnvSettings) .build(), null); } catch (SettingsException e) { diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 77b67227401..c489025a943 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -514,38 +514,6 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(percolate.getMatches(), emptyArray()); } - public void testPercolateStatistics() throws Exception { - client().admin().indices().prepareCreate(INDEX_NAME) - .addMapping(TYPE_NAME, "query", "type=percolator") - .get(); - client().admin().indices().prepareCreate("test2") - .addMapping(TYPE_NAME, "query", "type=percolator") - .get(); - ensureGreen(); - - logger.info("--> register a query"); - client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) - .execute().actionGet(); - client().prepareIndex("test2", TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) - .execute().actionGet(); - refresh(); - - logger.info("--> First percolate request"); - PercolateResponse response = client().preparePercolate() - .setIndices(INDEX_NAME).setDocumentType("type") - .setSource(jsonBuilder().startObject().startObject("doc").field("field", "val").endObject().endObject()) - .execute().actionGet(); - assertMatchCount(response, 1L); - assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContaining("1")); - - NumShards numShards = getNumShards(INDEX_NAME); - - IndicesStatsResponse indicesResponse = client().admin().indices().prepareStats(INDEX_NAME).execute().actionGet(); - assertThat(indicesResponse.getTotal().getPercolatorCache().getNumQueries(), equalTo((long)numShards.dataCopies)); // number of copies - } - public void testPercolatingExistingDocs() throws Exception { client().admin().indices().prepareCreate(INDEX_NAME) .addMapping(TYPE_NAME, "query", "type=percolator") diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index 306f3813e9b..aa3b11e6250 100644 --- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.Index; +import org.elasticsearch.rest.support.RestUtils; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -35,8 +36,11 @@ import java.io.IOException; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; /** * @@ -147,6 +151,32 @@ public class BytesRestResponseTests extends ESTestCase { assertTrue(stackTrace.contains("Caused by: ParsingException[foobar]")); } + public void testResponseWhenPathContainsEncodingError() throws IOException { + final String path = "%a"; + final RestRequest request = mock(RestRequest.class); + when(request.rawPath()).thenReturn(path); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestUtils.decodeComponent(request.rawPath())); + final RestChannel channel = new DetailedExceptionRestChannel(request); + // if we try to decode the path, this will throw an IllegalArgumentException again + final BytesRestResponse response = new BytesRestResponse(channel, e); + assertNotNull(response.content()); + final String content = response.content().toUtf8(); + assertThat(content, containsString("\"type\":\"illegal_argument_exception\"")); + assertThat(content, containsString("\"reason\":\"partial escape sequence at end of string: %a\"")); + assertThat(content, containsString("\"status\":" + 400)); + } + + public void testResponseWhenInternalServerError() throws IOException { + final RestRequest request = new FakeRestRequest(); + final RestChannel channel = new DetailedExceptionRestChannel(request); + final BytesRestResponse response = new BytesRestResponse(channel, new ElasticsearchException("simulated")); + assertNotNull(response.content()); + final String content = response.content().toUtf8(); + assertThat(content, containsString("\"type\":\"exception\"")); + assertThat(content, containsString("\"reason\":\"simulated\"")); + assertThat(content, containsString("\"status\":" + 500)); + } + public static class WithHeadersException extends ElasticsearchException { WithHeadersException() { diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index e5f78d7474e..fefe55b9ec3 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.script; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.script.MockScriptEngine.MockCompiledScript; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.test.ESTestCase; import java.nio.file.Files; @@ -46,7 +45,7 @@ public class FileScriptTests extends ESTestCase { .put(settings) .build(); Set engines = new HashSet<>(Collections.singletonList(new MockScriptEngine())); - ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, ScriptMode.ON))); + ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); return new ScriptService(settings, new Environment(settings), engines, null, scriptEngineRegistry, scriptContextRegistry, scriptSettings); diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index 045f62ff8fe..5e1dc740f9e 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -75,10 +74,10 @@ public class NativeScriptTests extends ESTestCase { Settings.Builder builder = Settings.builder(); if (randomBoolean()) { ScriptType scriptType = randomFrom(ScriptType.values()); - builder.put("script" + "." + scriptType.getScriptType(), randomFrom(ScriptMode.values())); + builder.put("script" + "." + scriptType.getScriptType(), randomBoolean()); } else { ScriptContext scriptContext = randomFrom(ScriptContext.Standard.values()); - builder.put("script" + "." + scriptContext.getKey(), randomFrom(ScriptMode.values())); + builder.put("script" + "." + scriptContext.getKey(), randomBoolean()); } Settings settings = builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(settings); @@ -86,7 +85,7 @@ public class NativeScriptTests extends ESTestCase { Map nativeScriptFactoryMap = new HashMap<>(); nativeScriptFactoryMap.put("my", new MyNativeScriptFactory()); Set scriptEngineServices = singleton(new NativeScriptEngineService(settings, nativeScriptFactoryMap)); - ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.NAME, ScriptMode.ON))); + ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(new ArrayList<>()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); ScriptService scriptService = new ScriptService(settings, environment, scriptEngineServices, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 8405366f34d..715694fe890 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.script; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -46,7 +45,7 @@ public class ScriptContextTests extends ESTestCase { .build(); Set engines = new HashSet<>(Collections.singletonList(new MockScriptEngine())); ScriptEngineRegistry.ScriptEngineRegistration registration = - new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, ScriptMode.ON); + new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, true); ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(registration)); List customContexts = Arrays.asList( new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), diff --git a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java index 800c079b889..8f02bee234d 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java @@ -83,7 +83,7 @@ public class ScriptModesTests extends ESTestCase { @After public void assertNativeScriptsAreAlwaysAllowed() { if (assertScriptModesNonNull) { - assertThat(scriptModes.getScriptMode(NativeScriptEngineService.NAME, randomFrom(ScriptType.values()), randomFrom(scriptContexts)), equalTo(ScriptMode.ON)); + assertThat(scriptModes.getScriptEnabled(NativeScriptEngineService.NAME, randomFrom(ScriptType.values()), randomFrom(scriptContexts)), equalTo(true)); } } @@ -93,7 +93,7 @@ public class ScriptModesTests extends ESTestCase { assertThat(scriptModes, notNullValue()); int numberOfSettings = ScriptType.values().length * scriptContextRegistry.scriptContexts().size(); numberOfSettings += 3; // for top-level inline/store/file settings - assertThat(scriptModes.scriptModes.size(), equalTo(numberOfSettings)); + assertThat(scriptModes.scriptEnabled.size(), equalTo(numberOfSettings)); if (assertAllSettingsWereChecked) { assertThat(checkedSettings.size(), equalTo(numberOfSettings)); } @@ -102,15 +102,15 @@ public class ScriptModesTests extends ESTestCase { public void testDefaultSettings() { this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY); - assertScriptModesAllOps(ScriptMode.ON, ScriptType.FILE); - assertScriptModesAllOps(ScriptMode.OFF, ScriptType.STORED, ScriptType.INLINE); + assertScriptModesAllOps(true, ScriptType.FILE); + assertScriptModesAllOps(false, ScriptType.STORED, ScriptType.INLINE); } public void testMissingSetting() { assertAllSettingsWereChecked = false; this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY); try { - scriptModes.getScriptMode("non_existing", randomFrom(ScriptType.values()), randomFrom(scriptContexts)); + scriptModes.getScriptEnabled("non_existing", randomFrom(ScriptType.values()), randomFrom(scriptContexts)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("not found for lang [non_existing]")); @@ -120,13 +120,13 @@ public class ScriptModesTests extends ESTestCase { public void testScriptTypeGenericSettings() { int randomInt = randomIntBetween(1, ScriptType.values().length - 1); Set randomScriptTypesSet = new HashSet<>(); - ScriptMode[] randomScriptModes = new ScriptMode[randomInt]; + boolean[] randomScriptModes = new boolean[randomInt]; for (int i = 0; i < randomInt; i++) { boolean added = false; while (added == false) { added = randomScriptTypesSet.add(randomFrom(ScriptType.values())); } - randomScriptModes[i] = randomFrom(ScriptMode.values()); + randomScriptModes[i] = randomBoolean(); } ScriptType[] randomScriptTypes = randomScriptTypesSet.toArray(new ScriptType[randomScriptTypesSet.size()]); Settings.Builder builder = Settings.builder(); @@ -139,26 +139,26 @@ public class ScriptModesTests extends ESTestCase { assertScriptModesAllOps(randomScriptModes[i], randomScriptTypes[i]); } if (randomScriptTypesSet.contains(ScriptType.FILE) == false) { - assertScriptModesAllOps(ScriptMode.ON, ScriptType.FILE); + assertScriptModesAllOps(true, ScriptType.FILE); } if (randomScriptTypesSet.contains(ScriptType.STORED) == false) { - assertScriptModesAllOps(ScriptMode.OFF, ScriptType.STORED); + assertScriptModesAllOps(false, ScriptType.STORED); } if (randomScriptTypesSet.contains(ScriptType.INLINE) == false) { - assertScriptModesAllOps(ScriptMode.OFF, ScriptType.INLINE); + assertScriptModesAllOps(false, ScriptType.INLINE); } } public void testScriptContextGenericSettings() { int randomInt = randomIntBetween(1, scriptContexts.length - 1); Set randomScriptContextsSet = new HashSet<>(); - ScriptMode[] randomScriptModes = new ScriptMode[randomInt]; + boolean[] randomScriptModes = new boolean[randomInt]; for (int i = 0; i < randomInt; i++) { boolean added = false; while (added == false) { added = randomScriptContextsSet.add(randomFrom(scriptContexts)); } - randomScriptModes[i] = randomFrom(ScriptMode.values()); + randomScriptModes[i] = randomBoolean(); } ScriptContext[] randomScriptContexts = randomScriptContextsSet.toArray(new ScriptContext[randomScriptContextsSet.size()]); Settings.Builder builder = Settings.builder(); @@ -172,8 +172,8 @@ public class ScriptModesTests extends ESTestCase { } ScriptContext[] complementOf = complementOf(randomScriptContexts); - assertScriptModes(ScriptMode.ON, new ScriptType[]{ScriptType.FILE}, complementOf); - assertScriptModes(ScriptMode.OFF, new ScriptType[]{ScriptType.STORED, ScriptType.INLINE}, complementOf); + assertScriptModes(true, new ScriptType[]{ScriptType.FILE}, complementOf); + assertScriptModes(false, new ScriptType[]{ScriptType.STORED, ScriptType.INLINE}, complementOf); } public void testConflictingScriptTypeAndOpGenericSettings() { @@ -184,28 +184,28 @@ public class ScriptModesTests extends ESTestCase { .put("script.inline", "true"); //operations generic settings have precedence over script type generic settings this.scriptModes = new ScriptModes(scriptSettings, builder.build()); - assertScriptModesAllTypes(ScriptMode.OFF, scriptContext); + assertScriptModesAllTypes(false, scriptContext); ScriptContext[] complementOf = complementOf(scriptContext); - assertScriptModes(ScriptMode.ON, new ScriptType[]{ScriptType.FILE, ScriptType.STORED}, complementOf); - assertScriptModes(ScriptMode.ON, new ScriptType[]{ScriptType.INLINE}, complementOf); + assertScriptModes(true, new ScriptType[]{ScriptType.FILE, ScriptType.STORED}, complementOf); + assertScriptModes(true, new ScriptType[]{ScriptType.INLINE}, complementOf); } - private void assertScriptModesAllOps(ScriptMode expectedScriptMode, ScriptType... scriptTypes) { - assertScriptModes(expectedScriptMode, scriptTypes, scriptContexts); + private void assertScriptModesAllOps(boolean expectedScriptEnabled, ScriptType... scriptTypes) { + assertScriptModes(expectedScriptEnabled, scriptTypes, scriptContexts); } - private void assertScriptModesAllTypes(ScriptMode expectedScriptMode, ScriptContext... scriptContexts) { - assertScriptModes(expectedScriptMode, ScriptType.values(), scriptContexts); + private void assertScriptModesAllTypes(boolean expectedScriptEnabled, ScriptContext... scriptContexts) { + assertScriptModes(expectedScriptEnabled, ScriptType.values(), scriptContexts); } - private void assertScriptModes(ScriptMode expectedScriptMode, ScriptType[] scriptTypes, ScriptContext... scriptContexts) { + private void assertScriptModes(boolean expectedScriptEnabled, ScriptType[] scriptTypes, ScriptContext... scriptContexts) { assert scriptTypes.length > 0; assert scriptContexts.length > 0; for (ScriptType scriptType : scriptTypes) { checkedSettings.add("script.engine.custom." + scriptType); for (ScriptContext scriptContext : scriptContexts) { assertThat("custom." + scriptType + "." + scriptContext.getKey() + " doesn't have the expected value", - scriptModes.getScriptMode("custom", scriptType, scriptContext), equalTo(expectedScriptMode)); + scriptModes.getScriptEnabled("custom", scriptType, scriptContext), equalTo(expectedScriptEnabled)); checkedSettings.add("custom." + scriptType + "." + scriptContext); } } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 8fd8f674c3a..890ffccc514 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -71,12 +71,12 @@ public class ScriptServiceTests extends ESTestCase { private Path scriptsFilePath; private Settings baseSettings; - private static final Map DEFAULT_SCRIPT_MODES = new HashMap<>(); + private static final Map DEFAULT_SCRIPT_ENABLED = new HashMap<>(); static { - DEFAULT_SCRIPT_MODES.put(ScriptType.FILE, ScriptMode.ON); - DEFAULT_SCRIPT_MODES.put(ScriptType.STORED, ScriptMode.OFF); - DEFAULT_SCRIPT_MODES.put(ScriptType.INLINE, ScriptMode.OFF); + DEFAULT_SCRIPT_ENABLED.put(ScriptType.FILE, true); + DEFAULT_SCRIPT_ENABLED.put(ScriptType.STORED, false); + DEFAULT_SCRIPT_ENABLED.put(ScriptType.INLINE, false); } @Before @@ -110,7 +110,7 @@ public class ScriptServiceTests extends ESTestCase { contexts.put(context, new ScriptContext.Plugin(plugin, operation)); } List registries = new ArrayList<>(2); - registries.add(new ScriptEngineRegistry.ScriptEngineRegistration(TestEngineService.class, TestEngineService.NAME, ScriptMode.ON)); + registries.add(new ScriptEngineRegistry.ScriptEngineRegistration(TestEngineService.class, TestEngineService.NAME, true)); registries.add(new ScriptEngineRegistry.ScriptEngineRegistration(TestDangerousEngineService.class, TestDangerousEngineService.NAME)); scriptEngineRegistry = new ScriptEngineRegistry(registries); scriptContextRegistry = new ScriptContextRegistry(contexts.values()); @@ -215,25 +215,25 @@ public class ScriptServiceTests extends ESTestCase { public void testFineGrainedSettings() throws IOException { //collect the fine-grained settings to set for this run int numScriptSettings = randomIntBetween(0, ScriptType.values().length); - Map scriptSourceSettings = new HashMap<>(); + Map scriptSourceSettings = new HashMap<>(); for (int i = 0; i < numScriptSettings; i++) { ScriptType scriptType; do { scriptType = randomFrom(ScriptType.values()); } while (scriptSourceSettings.containsKey(scriptType)); - scriptSourceSettings.put(scriptType, randomFrom(ScriptMode.values())); + scriptSourceSettings.put(scriptType, randomBoolean()); } int numScriptContextSettings = randomIntBetween(0, this.scriptContextRegistry.scriptContexts().size()); - Map scriptContextSettings = new HashMap<>(); + Map scriptContextSettings = new HashMap<>(); for (int i = 0; i < numScriptContextSettings; i++) { ScriptContext scriptContext; do { scriptContext = randomFrom(this.scriptContexts); } while (scriptContextSettings.containsKey(scriptContext)); - scriptContextSettings.put(scriptContext, randomFrom(ScriptMode.values())); + scriptContextSettings.put(scriptContext, randomBoolean()); } int numEngineSettings = randomIntBetween(0, ScriptType.values().length * scriptContexts.length); - Map engineSettings = new HashMap<>(); + Map engineSettings = new HashMap<>(); for (int i = 0; i < numEngineSettings; i++) { String settingKey; do { @@ -241,43 +241,34 @@ public class ScriptServiceTests extends ESTestCase { ScriptContext scriptContext = randomFrom(this.scriptContexts); settingKey = scriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey(); } while (engineSettings.containsKey(settingKey)); - engineSettings.put(settingKey, randomFrom(ScriptMode.values())); + engineSettings.put(settingKey, randomBoolean()); } //set the selected fine-grained settings Settings.Builder builder = Settings.builder(); - for (Map.Entry entry : scriptSourceSettings.entrySet()) { - switch (entry.getValue()) { - case ON: - builder.put("script" + "." + entry.getKey().getScriptType(), "true"); - break; - case OFF: - builder.put("script" + "." + entry.getKey().getScriptType(), "false"); - break; + for (Map.Entry entry : scriptSourceSettings.entrySet()) { + if (entry.getValue()) { + builder.put("script" + "." + entry.getKey().getScriptType(), "true"); + } else { + builder.put("script" + "." + entry.getKey().getScriptType(), "false"); } } - for (Map.Entry entry : scriptContextSettings.entrySet()) { - switch (entry.getValue()) { - case ON: - builder.put("script" + "." + entry.getKey().getKey(), "true"); - break; - case OFF: - builder.put("script" + "." + entry.getKey().getKey(), "false"); - break; + for (Map.Entry entry : scriptContextSettings.entrySet()) { + if (entry.getValue()) { + builder.put("script" + "." + entry.getKey().getKey(), "true"); + } else { + builder.put("script" + "." + entry.getKey().getKey(), "false"); } } - for (Map.Entry entry : engineSettings.entrySet()) { + for (Map.Entry entry : engineSettings.entrySet()) { int delimiter = entry.getKey().indexOf('.'); String part1 = entry.getKey().substring(0, delimiter); String part2 = entry.getKey().substring(delimiter + 1); String lang = randomFrom(scriptEnginesByLangMap.get(part1).getType()); - switch (entry.getValue()) { - case ON: - builder.put("script.engine" + "." + lang + "." + part2, "true"); - break; - case OFF: - builder.put("script.engine" + "." + lang + "." + part2, "false"); - break; + if (entry.getValue()) { + builder.put("script.engine" + "." + lang + "." + part2, "true"); + } else { + builder.put("script.engine" + "." + lang + "." + part2, "false"); } } @@ -290,25 +281,22 @@ public class ScriptServiceTests extends ESTestCase { String script = scriptType == ScriptType.FILE ? "file_script" : "script"; for (ScriptContext scriptContext : this.scriptContexts) { //fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings - ScriptMode scriptMode = engineSettings.get(dangerousScriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey()); - if (scriptMode == null) { - scriptMode = scriptContextSettings.get(scriptContext); + Boolean scriptEnabled = engineSettings.get(dangerousScriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey()); + if (scriptEnabled == null) { + scriptEnabled = scriptContextSettings.get(scriptContext); } - if (scriptMode == null) { - scriptMode = scriptSourceSettings.get(scriptType); + if (scriptEnabled == null) { + scriptEnabled = scriptSourceSettings.get(scriptType); } - if (scriptMode == null) { - scriptMode = DEFAULT_SCRIPT_MODES.get(scriptType); + if (scriptEnabled == null) { + scriptEnabled = DEFAULT_SCRIPT_ENABLED.get(scriptType); } String lang = dangerousScriptEngineService.getType(); - switch (scriptMode) { - case ON: - assertCompileAccepted(lang, script, scriptType, scriptContext); - break; - case OFF: - assertCompileRejected(lang, script, scriptType, scriptContext); - break; + if (scriptEnabled) { + assertCompileAccepted(lang, script, scriptType, scriptContext); + } else { + assertCompileRejected(lang, script, scriptType, scriptContext); } } } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java b/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java index 3d82e2f1468..92598ec8dd2 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.script; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESTestCase; @@ -39,7 +38,7 @@ public class ScriptSettingsTests extends ESTestCase { public void testDefaultLanguageIsGroovy() { ScriptEngineRegistry scriptEngineRegistry = - new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, ScriptMode.ON))); + new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); assertThat(scriptSettings.getDefaultScriptLanguageSetting().get(Settings.EMPTY), equalTo("groovy")); @@ -47,7 +46,7 @@ public class ScriptSettingsTests extends ESTestCase { public void testCustomDefaultLanguage() { ScriptEngineRegistry scriptEngineRegistry = - new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, ScriptMode.ON))); + new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); String defaultLanguage = CustomScriptEngineService.NAME; @@ -57,7 +56,7 @@ public class ScriptSettingsTests extends ESTestCase { public void testInvalidDefaultLanguage() { ScriptEngineRegistry scriptEngineRegistry = - new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, ScriptMode.ON))); + new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); Settings settings = Settings.builder().put("script.default_lang", "C++").build(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java index 2949dadcc49..60185dbae28 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java @@ -51,7 +51,6 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; @@ -135,7 +134,7 @@ public class AggregatorParsingTests extends ESTestCase { new ScriptEngineRegistry(Collections .singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, - ScriptMode.ON))); + true))); bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts); bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index 6c221e4eb36..8df37e25f76 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -55,7 +55,6 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; @@ -80,7 +79,7 @@ import static org.elasticsearch.cluster.service.ClusterServiceUtils.createCluste import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; -public abstract class BaseAggregationTestCase> extends ESTestCase { +public abstract class BaseAggregationTestCase> extends ESTestCase { protected static final String STRING_FIELD_NAME = "mapped_string"; protected static final String INT_FIELD_NAME = "mapped_int"; @@ -149,7 +148,7 @@ public abstract class BaseAggregationTestCase> new ScriptEngineRegistry(Collections .singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, - ScriptMode.ON))); + true))); bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts); bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry); @@ -238,7 +237,7 @@ public abstract class BaseAggregationTestCase> assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals(testAgg.type.name(), parser.currentName()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); - AggregatorBuilder newAgg = aggParsers.parser(testAgg.getType(), ParseFieldMatcher.STRICT).parse(testAgg.name, parseContext); + AggregationBuilder newAgg = aggParsers.parser(testAgg.getType(), ParseFieldMatcher.STRICT).parse(testAgg.name, parseContext); assertSame(XContentParser.Token.END_OBJECT, parser.currentToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); @@ -258,7 +257,7 @@ public abstract class BaseAggregationTestCase> try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(testAgg); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { - AggregatorBuilder deserialized = in.readNamedWriteable(AggregatorBuilder.class); + AggregationBuilder deserialized = in.readNamedWriteable(AggregationBuilder.class); assertEquals(testAgg, deserialized); assertEquals(testAgg.hashCode(), deserialized.hashCode()); assertNotSame(testAgg, deserialized); @@ -299,7 +298,7 @@ public abstract class BaseAggregationTestCase> agg.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { @SuppressWarnings("unchecked") - AB secondAgg = (AB) namedWriteableRegistry.getReader(AggregatorBuilder.class, agg.getWriteableName()).read(in); + AB secondAgg = (AB) namedWriteableRegistry.getReader(AggregationBuilder.class, agg.getWriteableName()).read(in); return secondAgg; } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index 113b52cb0b6..c20344f35fc 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -55,7 +55,6 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; @@ -148,7 +147,7 @@ public abstract class BasePipelineAggregationTestCase { +public class ChildrenTests extends BaseAggregationTestCase { @Override - protected ChildrenAggregatorBuilder createTestAggregatorBuilder() { + protected ChildrenAggregationBuilder createTestAggregatorBuilder() { String name = randomAsciiOfLengthBetween(3, 20); String childType = randomAsciiOfLengthBetween(5, 40); - ChildrenAggregatorBuilder factory = new ChildrenAggregatorBuilder(name, childType); + ChildrenAggregationBuilder factory = new ChildrenAggregationBuilder(name, childType); return factory; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java index 58641a43b68..74ea18cc1d1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java @@ -20,16 +20,16 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; -public class DateHistogramTests extends BaseAggregationTestCase { +public class DateHistogramTests extends BaseAggregationTestCase { @Override - protected DateHistogramAggregatorBuilder createTestAggregatorBuilder() { - DateHistogramAggregatorBuilder factory = new DateHistogramAggregatorBuilder("foo"); + protected DateHistogramAggregationBuilder createTestAggregatorBuilder() { + DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder("foo"); factory.field(INT_FIELD_NAME); if (randomBoolean()) { factory.interval(randomIntBetween(1, 100000)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index b1dc61a9b9e..36613cfa784 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsM import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -117,7 +117,7 @@ public class DateRangeIT extends ESIntegTestCase { public void testDateMath() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "date"); - DateRangeAggregatorBuilder rangeBuilder = dateRange("range"); + DateRangeAggregationBuilder rangeBuilder = dateRange("range"); if (randomBoolean()) { rangeBuilder.field("date"); } else { @@ -295,8 +295,7 @@ public class DateRangeIT extends ESIntegTestCase { } public void testSingleValueFieldWithDateMath() throws Exception { - String[] ids = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); - DateTimeZone timezone = DateTimeZone.forID(randomFrom(ids)); + DateTimeZone timezone = randomDateTimeZone(); int timeZoneOffset = timezone.getOffset(date(2, 15)); // if time zone is UTC (or equivalent), time zone suffix is "Z", else something like "+03:00", which we get with the "ZZ" format String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).toString("ZZ"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index 71b61c0e6e6..ee32915fdd7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -21,17 +21,14 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; -import org.joda.time.DateTimeZone; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; -public class DateRangeTests extends BaseAggregationTestCase { - - private final static String[] timeZoneIds = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); +public class DateRangeTests extends BaseAggregationTestCase { @Override - protected DateRangeAggregatorBuilder createTestAggregatorBuilder() { + protected DateRangeAggregationBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); - DateRangeAggregatorBuilder factory = new DateRangeAggregatorBuilder("foo"); + DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder("foo"); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { @@ -60,7 +57,7 @@ public class DateRangeTests extends BaseAggregationTestCase { +public class DiversifiedSamplerTests extends BaseAggregationTestCase { @Override - protected final DiversifiedAggregatorBuilder createTestAggregatorBuilder() { - DiversifiedAggregatorBuilder factory = new DiversifiedAggregatorBuilder("foo"); + protected final DiversifiedAggregationBuilder createTestAggregatorBuilder() { + DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder("foo"); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java index 89dd3e3b137..65b80537c7a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java @@ -23,17 +23,17 @@ import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range; import org.elasticsearch.test.geo.RandomShapeGenerator; -public class GeoDistanceRangeTests extends BaseAggregationTestCase { +public class GeoDistanceRangeTests extends BaseAggregationTestCase { @Override - protected GeoDistanceAggregatorBuilder createTestAggregatorBuilder() { + protected GeoDistanceAggregationBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); GeoPoint origin = RandomShapeGenerator.randomPoint(random()); - GeoDistanceAggregatorBuilder factory = new GeoDistanceAggregatorBuilder("foo", origin); + GeoDistanceAggregationBuilder factory = new GeoDistanceAggregationBuilder("foo", origin); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java index 34e3e266d6a..c3c8f6902b3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java @@ -20,14 +20,14 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; -public class GeoHashGridTests extends BaseAggregationTestCase { +public class GeoHashGridTests extends BaseAggregationTestCase { @Override - protected GeoGridAggregatorBuilder createTestAggregatorBuilder() { + protected GeoGridAggregationBuilder createTestAggregatorBuilder() { String name = randomAsciiOfLengthBetween(3, 20); - GeoGridAggregatorBuilder factory = new GeoGridAggregatorBuilder(name); + GeoGridAggregationBuilder factory = new GeoGridAggregationBuilder(name); if (randomBoolean()) { int precision = randomIntBetween(1, 12); factory.precision(precision); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalTests.java index ca2b5c9d6c0..a874eff839b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalTests.java @@ -20,13 +20,13 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; -public class GlobalTests extends BaseAggregationTestCase { +public class GlobalTests extends BaseAggregationTestCase { @Override - protected GlobalAggregatorBuilder createTestAggregatorBuilder() { - return new GlobalAggregatorBuilder(randomAsciiOfLengthBetween(3, 20)); + protected GlobalAggregationBuilder createTestAggregatorBuilder() { + return new GlobalAggregationBuilder(randomAsciiOfLengthBetween(3, 20)); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java index 1cd930ecc31..ac0d6d0df8b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java @@ -22,13 +22,13 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; -public class HistogramTests extends BaseAggregationTestCase { +public class HistogramTests extends BaseAggregationTestCase { @Override - protected HistogramAggregatorBuilder createTestAggregatorBuilder() { - HistogramAggregatorBuilder factory = new HistogramAggregatorBuilder("foo"); + protected HistogramAggregationBuilder createTestAggregatorBuilder() { + HistogramAggregationBuilder factory = new HistogramAggregationBuilder("foo"); factory.field(INT_FIELD_NAME); factory.interval(randomIntBetween(1, 100000)); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java index 468e8503b0a..3a2abda6aa7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java @@ -37,7 +37,6 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java index db31f576e0c..5d86571f08f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java @@ -24,9 +24,9 @@ import java.net.UnknownHostException; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder; -public class IpRangeTests extends BaseAggregationTestCase { +public class IpRangeTests extends BaseAggregationTestCase { private static String randomIp(boolean v4) { try { @@ -45,9 +45,9 @@ public class IpRangeTests extends BaseAggregationTestCase> builder(); + public abstract ValuesSourceAggregationBuilder.LeafOnly> builder(); public String sortKey() { return name; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java index b9c60dab1be..22b4eae8421 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java @@ -21,14 +21,14 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; -public class RangeTests extends BaseAggregationTestCase { +public class RangeTests extends BaseAggregationTestCase { @Override - protected RangeAggregatorBuilder createTestAggregatorBuilder() { + protected RangeAggregationBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); - RangeAggregatorBuilder factory = new RangeAggregatorBuilder("foo"); + RangeAggregationBuilder factory = new RangeAggregationBuilder("foo"); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java index f42d213bb57..980d792013a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.metrics.max.Max; @@ -123,7 +123,7 @@ public class SamplerIT extends ESIntegTestCase { } public void testSimpleSampler() throws Exception { - SamplerAggregatorBuilder sampleAgg = sampler("sample").shardSize(100); + SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).execute().actionGet(); @@ -140,7 +140,7 @@ public class SamplerIT extends ESIntegTestCase { } public void testUnmappedChildAggNoDiversity() throws Exception { - SamplerAggregatorBuilder sampleAgg = sampler("sample").shardSize(100); + SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped") .setSearchType(SearchType.QUERY_AND_FETCH) @@ -157,7 +157,7 @@ public class SamplerIT extends ESIntegTestCase { } public void testPartiallyUnmappedChildAggNoDiversity() throws Exception { - SamplerAggregatorBuilder sampleAgg = sampler("sample").shardSize(100); + SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped", "test") .setSearchType(SearchType.QUERY_AND_FETCH) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java index 8d792fd72ff..e4de490f6b2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java @@ -20,13 +20,13 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; -public class SamplerTests extends BaseAggregationTestCase { +public class SamplerTests extends BaseAggregationTestCase { @Override - protected final SamplerAggregatorBuilder createTestAggregatorBuilder() { - SamplerAggregatorBuilder factory = new SamplerAggregatorBuilder("foo"); + protected final SamplerAggregationBuilder createTestAggregatorBuilder() { + SamplerAggregationBuilder factory = new SamplerAggregationBuilder("foo"); if (randomBoolean()) { factory.shardSize(randomIntBetween(1, 1000)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java index 373eb0e6e96..897125ee2fa 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java @@ -24,7 +24,7 @@ import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; @@ -37,7 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude import java.util.SortedSet; import java.util.TreeSet; -public class SignificantTermsTests extends BaseAggregationTestCase { +public class SignificantTermsTests extends BaseAggregationTestCase { private static final String[] executionHints; @@ -50,9 +50,9 @@ public class SignificantTermsTests extends BaseAggregationTestCase { +public class TermsTests extends BaseAggregationTestCase { private static final String[] executionHints; @@ -46,9 +46,9 @@ public class TermsTests extends BaseAggregationTestCase } @Override - protected TermsAggregatorBuilder createTestAggregatorBuilder() { + protected TermsAggregationBuilder createTestAggregatorBuilder() { String name = randomAsciiOfLengthBetween(3, 20); - TermsAggregatorBuilder factory = new TermsAggregatorBuilder(name, null); + TermsAggregationBuilder factory = new TermsAggregationBuilder(name, null); String field = randomAsciiOfLengthBetween(3, 20); int randomFieldBranch = randomInt(2); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index dbe10e2ff9d..272aa70d48b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -123,7 +123,7 @@ public class NestedAggregatorTests extends ESSingleNodeTestCase { AggregationContext context = new AggregationContext(searchContext); AggregatorFactories.Builder builder = AggregatorFactories.builder(); - NestedAggregatorBuilder factory = new NestedAggregatorBuilder("test", "nested_field"); + NestedAggregationBuilder factory = new NestedAggregationBuilder("test", "nested_field"); builder.addAggregator(factory); AggregatorFactories factories = builder.build(context, null); searchContext.aggregations(new SearchContextAggregations(factories)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java index 6ea5b3791d8..29dde100a08 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java @@ -21,11 +21,11 @@ package org.elasticsearch.search.aggregations.bucket.nested; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -public class NestedTests extends BaseAggregationTestCase { +public class NestedTests extends BaseAggregationTestCase { @Override - protected NestedAggregatorBuilder createTestAggregatorBuilder() { - return new NestedAggregatorBuilder(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(3, 40)); + protected NestedAggregationBuilder createTestAggregatorBuilder() { + return new NestedAggregationBuilder(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(3, 40)); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedTests.java index 1a45c550bc1..97dbf3718af 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedTests.java @@ -21,11 +21,11 @@ package org.elasticsearch.search.aggregations.bucket.nested; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -public class ReverseNestedTests extends BaseAggregationTestCase { +public class ReverseNestedTests extends BaseAggregationTestCase { @Override - protected ReverseNestedAggregatorBuilder createTestAggregatorBuilder() { - ReverseNestedAggregatorBuilder factory = new ReverseNestedAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected ReverseNestedAggregationBuilder createTestAggregatorBuilder() { + ReverseNestedAggregationBuilder factory = new ReverseNestedAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.path(randomAsciiOfLengthBetween(3, 40)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index df449aeeaf2..8dc015b30ed 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -257,7 +257,7 @@ public class SignificanceHeuristicTests extends ESTestCase { protected SignificanceHeuristic parseFromBuilder(ParseFieldRegistry significanceHeuristicParserRegistry, SearchContext searchContext, SignificanceHeuristic significanceHeuristic) throws IOException { - SignificantTermsAggregatorBuilder stBuilder = significantTerms("testagg"); + SignificantTermsAggregationBuilder stBuilder = significantTerms("testagg"); stBuilder.significanceHeuristic(significanceHeuristic).field("text").minDocCount(200); XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder(); stBuilder.internalXContent(stXContentBuilder, null); @@ -271,7 +271,7 @@ public class SignificanceHeuristicTests extends ESTestCase { IndicesQueriesRegistry registry = new IndicesQueriesRegistry(); QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT); stParser.nextToken(); - SignificantTermsAggregatorBuilder aggregatorFactory = (SignificantTermsAggregatorBuilder) new SignificantTermsParser( + SignificantTermsAggregationBuilder aggregatorFactory = (SignificantTermsAggregationBuilder) new SignificantTermsParser( significanceHeuristicParserRegistry, registry).parse("testagg", parseContext); stParser.nextToken(); assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java index 58d7fa70d62..f1ccf344a7c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java @@ -22,9 +22,9 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; -public abstract class AbstractNumericMetricTestCase> +public abstract class AbstractNumericMetricTestCase> extends BaseAggregationTestCase { @Override diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java index a01797ccf49..a19b274c4db 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.SearchScript; @@ -370,7 +369,7 @@ public class AvgIT extends AbstractNumericTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.NAME, true)); } } @@ -490,7 +489,7 @@ public class AvgIT extends AbstractNumericTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, true)); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java index 61e685169f6..df90dc4f7c3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -public class AvgTests extends AbstractNumericMetricTestCase { +public class AvgTests extends AbstractNumericMetricTestCase { @Override - protected AvgAggregatorBuilder doCreateTestAggregatorFactory() { - return new AvgAggregatorBuilder("foo"); + protected AvgAggregationBuilder doCreateTestAggregatorFactory() { + return new AvgAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java index 4a7ca7e8b38..3f78cc17aa9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; -public class ExtendedStatsTests extends AbstractNumericMetricTestCase { +public class ExtendedStatsTests extends AbstractNumericMetricTestCase { @Override - protected ExtendedStatsAggregatorBuilder doCreateTestAggregatorFactory() { - ExtendedStatsAggregatorBuilder factory = new ExtendedStatsAggregatorBuilder("foo"); + protected ExtendedStatsAggregationBuilder doCreateTestAggregatorFactory() { + ExtendedStatsAggregationBuilder factory = new ExtendedStatsAggregationBuilder("foo"); if (randomBoolean()) { factory.sigma(randomDoubleBetween(0.0, 10.0, true)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java index 8a6a4373691..1b563d531a8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; -public class FilterTests extends BaseAggregationTestCase { +public class FilterTests extends BaseAggregationTestCase { @Override - protected FilterAggregatorBuilder createTestAggregatorBuilder() { - FilterAggregatorBuilder factory = new FilterAggregatorBuilder(randomAsciiOfLengthBetween(1, 20), + protected FilterAggregationBuilder createTestAggregatorBuilder() { + FilterAggregationBuilder factory = new FilterAggregationBuilder(randomAsciiOfLengthBetween(1, 20), QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20))); // NORELEASE make RandomQueryBuilder work outside of the // AbstractQueryTestCase diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java index cd2dae53327..89fc38b7cd8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java @@ -24,15 +24,15 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; -import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; -public class FiltersTests extends BaseAggregationTestCase { +public class FiltersTests extends BaseAggregationTestCase { @Override - protected FiltersAggregatorBuilder createTestAggregatorBuilder() { + protected FiltersAggregationBuilder createTestAggregatorBuilder() { int size = randomIntBetween(1, 20); - FiltersAggregatorBuilder factory; + FiltersAggregationBuilder factory; if (randomBoolean()) { KeyedFilter[] filters = new KeyedFilter[size]; int i = 0; @@ -40,13 +40,13 @@ public class FiltersTests extends BaseAggregationTestCase { +public class GeoBoundsTests extends BaseAggregationTestCase { @Override - protected GeoBoundsAggregatorBuilder createTestAggregatorBuilder() { - GeoBoundsAggregatorBuilder factory = new GeoBoundsAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected GeoBoundsAggregationBuilder createTestAggregatorBuilder() { + GeoBoundsAggregationBuilder factory = new GeoBoundsAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); String field = randomAsciiOfLengthBetween(3, 20); factory.field(field); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java index c912c18a82e..1ea21a1ff1d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; -public class GeoCentroidTests extends BaseAggregationTestCase { +public class GeoCentroidTests extends BaseAggregationTestCase { @Override - protected GeoCentroidAggregatorBuilder createTestAggregatorBuilder() { - GeoCentroidAggregatorBuilder factory = new GeoCentroidAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected GeoCentroidAggregationBuilder createTestAggregatorBuilder() { + GeoCentroidAggregationBuilder factory = new GeoCentroidAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java index a9fe4654c9d..6ffd824aa3c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -public class MaxTests extends AbstractNumericMetricTestCase { +public class MaxTests extends AbstractNumericMetricTestCase { @Override - protected MaxAggregatorBuilder doCreateTestAggregatorFactory() { - return new MaxAggregatorBuilder("foo"); + protected MaxAggregationBuilder doCreateTestAggregatorFactory() { + return new MaxAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java index 54512f579f3..eed4059ade7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -public class MinTests extends AbstractNumericMetricTestCase { +public class MinTests extends AbstractNumericMetricTestCase { @Override - protected MinAggregatorBuilder doCreateTestAggregatorFactory() { - return new MinAggregatorBuilder("foo"); + protected MinAggregationBuilder doCreateTestAggregatorFactory() { + return new MinAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java index 3f49da5eb6e..979747ade2e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; -public class MissingTests extends BaseAggregationTestCase { +public class MissingTests extends BaseAggregationTestCase { @Override - protected final MissingAggregatorBuilder createTestAggregatorBuilder() { - MissingAggregatorBuilder factory = new MissingAggregatorBuilder("foo", null); + protected final MissingAggregationBuilder createTestAggregatorBuilder() { + MissingAggregationBuilder factory = new MissingAggregationBuilder("foo", null); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java index 4636e4ed174..1907733fbd8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; -public class PercentileRanksTests extends BaseAggregationTestCase { +public class PercentileRanksTests extends BaseAggregationTestCase { @Override - protected PercentileRanksAggregatorBuilder createTestAggregatorBuilder() { - PercentileRanksAggregatorBuilder factory = new PercentileRanksAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected PercentileRanksAggregationBuilder createTestAggregatorBuilder() { + PercentileRanksAggregationBuilder factory = new PercentileRanksAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.keyed(randomBoolean()); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java index 674197cffbf..b5539f8c1be 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; -public class PercentilesTests extends BaseAggregationTestCase { +public class PercentilesTests extends BaseAggregationTestCase { @Override - protected PercentilesAggregatorBuilder createTestAggregatorBuilder() { - PercentilesAggregatorBuilder factory = new PercentilesAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected PercentilesAggregationBuilder createTestAggregatorBuilder() { + PercentilesAggregationBuilder factory = new PercentilesAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.keyed(randomBoolean()); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java index a4e12b56d8e..e4f96fae762 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java @@ -22,16 +22,16 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; import java.util.HashMap; import java.util.Map; -public class ScriptedMetricTests extends BaseAggregationTestCase { +public class ScriptedMetricTests extends BaseAggregationTestCase { @Override - protected ScriptedMetricAggregatorBuilder createTestAggregatorBuilder() { - ScriptedMetricAggregatorBuilder factory = new ScriptedMetricAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected ScriptedMetricAggregationBuilder createTestAggregatorBuilder() { + ScriptedMetricAggregationBuilder factory = new ScriptedMetricAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.initScript(randomScript("initScript")); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java index 5db4e1e332b..76a8e9aa98a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; -public class StatsTests extends AbstractNumericMetricTestCase { +public class StatsTests extends AbstractNumericMetricTestCase { @Override - protected StatsAggregatorBuilder doCreateTestAggregatorFactory() { - return new StatsAggregatorBuilder("foo"); + protected StatsAggregationBuilder doCreateTestAggregatorFactory() { + return new StatsAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index c636508bbe1..641696722c9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.SearchScript; @@ -365,7 +364,7 @@ public class SumIT extends AbstractNumericTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.NAME, true)); } } @@ -487,7 +486,7 @@ public class SumIT extends AbstractNumericTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, true)); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java index a6d9f0bd270..edc6d4edef0 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -public class SumTests extends AbstractNumericMetricTestCase { +public class SumTests extends AbstractNumericMetricTestCase { @Override - protected SumAggregatorBuilder doCreateTestAggregatorFactory() { - return new SumAggregatorBuilder("foo"); + protected SumAggregationBuilder doCreateTestAggregatorFactory() { + return new SumAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java index 00bb04dde9f..c79ab04e492 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationInitializationException; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilderTests; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; @@ -38,11 +38,11 @@ import java.util.List; import static org.hamcrest.Matchers.containsString; -public class TopHitsTests extends BaseAggregationTestCase { +public class TopHitsTests extends BaseAggregationTestCase { @Override - protected final TopHitsAggregatorBuilder createTestAggregatorBuilder() { - TopHitsAggregatorBuilder factory = new TopHitsAggregatorBuilder("foo"); + protected final TopHitsAggregationBuilder createTestAggregatorBuilder() { + TopHitsAggregationBuilder factory = new TopHitsAggregationBuilder("foo"); if (randomBoolean()) { factory.from(randomIntBetween(0, 10000)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 86f531ce3e9..04bef292051 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.SearchScript; @@ -224,7 +223,7 @@ public class ValueCountIT extends ESIntegTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, true)); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java index c9b601c4e8b..99d4d41839c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; -public class ValueCountTests extends BaseAggregationTestCase { +public class ValueCountTests extends BaseAggregationTestCase { @Override - protected final ValueCountAggregatorBuilder createTestAggregatorBuilder() { - ValueCountAggregatorBuilder factory = new ValueCountAggregatorBuilder("foo", null); + protected final ValueCountAggregationBuilder createTestAggregatorBuilder() { + ValueCountAggregationBuilder factory = new ValueCountAggregationBuilder("foo", null); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java index a769a71b8e5..ab0377c6331 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java @@ -22,11 +22,11 @@ package org.elasticsearch.search.aggregations.metrics.cardinality; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -public class CardinalityTests extends BaseAggregationTestCase { +public class CardinalityTests extends BaseAggregationTestCase { @Override - protected final CardinalityAggregatorBuilder createTestAggregatorBuilder() { - CardinalityAggregatorBuilder factory = new CardinalityAggregatorBuilder("foo", null); + protected final CardinalityAggregationBuilder createTestAggregatorBuilder() { + CardinalityAggregationBuilder factory = new CardinalityAggregationBuilder("foo", null); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java index 43b3b4d357c..ce9394692de 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java @@ -20,11 +20,11 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -109,27 +109,27 @@ public class PipelineAggregationHelperTests extends ESTestCase { * @param values Array of values to compute metric for * @param metric A metric builder which defines what kind of metric should be returned for the values */ - public static double calculateMetric(double[] values, ValuesSourceAggregatorBuilder metric) { + public static double calculateMetric(double[] values, ValuesSourceAggregationBuilder metric) { - if (metric instanceof MinAggregatorBuilder) { + if (metric instanceof MinAggregationBuilder) { double accumulator = Double.POSITIVE_INFINITY; for (double value : values) { accumulator = Math.min(accumulator, value); } return accumulator; - } else if (metric instanceof MaxAggregatorBuilder) { + } else if (metric instanceof MaxAggregationBuilder) { double accumulator = Double.NEGATIVE_INFINITY; for (double value : values) { accumulator = Math.max(accumulator, value); } return accumulator; - } else if (metric instanceof SumAggregatorBuilder) { + } else if (metric instanceof SumAggregationBuilder) { double accumulator = 0; for (double value : values) { accumulator += value; } return accumulator; - } else if (metric instanceof AvgAggregatorBuilder) { + } else if (metric instanceof AvgAggregationBuilder) { double accumulator = 0; for (double value : values) { accumulator += value; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 94ac6fc10ab..c16d8e8062e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -39,7 +39,7 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -79,7 +79,7 @@ public class MovAvgIT extends ESIntegTestCase { static int period; static HoltWintersModel.SeasonalityType seasonalityType; static BucketHelpers.GapPolicy gapPolicy; - static ValuesSourceAggregatorBuilder> metric; + static ValuesSourceAggregationBuilder> metric; static List mockHisto; static Map> testValues; @@ -1289,8 +1289,8 @@ public class MovAvgIT extends ESIntegTestCase { } } - private ValuesSourceAggregatorBuilder> randomMetric(String name, - String field) { + private ValuesSourceAggregationBuilder> randomMetric(String name, + String field) { int rand = randomIntBetween(0,3); switch (rand) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java index 66961c2fcbc..1b263d1af09 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -61,7 +61,7 @@ public class SerialDiffIT extends ESIntegTestCase { static int numBuckets; static int lag; static BucketHelpers.GapPolicy gapPolicy; - static ValuesSourceAggregatorBuilder> metric; + static ValuesSourceAggregationBuilder> metric; static List mockHisto; static Map> testValues; @@ -81,7 +81,7 @@ public class SerialDiffIT extends ESIntegTestCase { } } - private ValuesSourceAggregatorBuilder> randomMetric(String name, String field) { + private ValuesSourceAggregationBuilder> randomMetric(String name, String field) { int rand = randomIntBetween(0,3); switch (rand) { diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 077d978a4ce..661643b0e8b 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -62,7 +62,6 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; @@ -155,7 +154,7 @@ public class SearchSourceBuilderTests extends ESTestCase { ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections .singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, - ScriptMode.ON))); + true))); bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts); bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry); diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 1a258abc9cc..90f5c65c066 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -1321,29 +1321,6 @@ public class ChildQuerySearchIT extends ESIntegTestCase { } } - public void testAddParentFieldAfterIndexingParentDocButBeforeIndexingChildDoc() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.builder() - .put(indexSettings()) - .put("index.refresh_interval", -1))); - ensureGreen(); - - String parentId = "p1"; - client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").get(); - refresh(); - - try { - assertAcked(client().admin() - .indices() - .preparePutMapping("test") - .setType("child") - .setSource("_parent", "type=parent")); - fail("Shouldn't be able the add the _parent field pointing to an already existing parent type"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("can't add a _parent field that points to an already existing type")); - } - } - public void testParentChildCaching() throws Exception { assertAcked(prepareCreate("test") .setSettings( diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index d5c6bf98835..9ebbb5b42e0 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -289,7 +289,7 @@ public class HighlightBuilderTests extends ESTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, indicesQueriesRegistry, - null, null, null, null) { + null, null, null) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java index 0065e46875a..067d84d5e00 100644 --- a/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java @@ -161,8 +161,8 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { client().prepareIndex("test", "queries", "3") .setSource(jsonBuilder().startObject().field("query", spanNearQuery(spanTermQuery("field1", "quick"), 0) - .clause(spanTermQuery("field1", "brown")) - .clause(spanTermQuery("field1", "fox")) + .addClause(spanTermQuery("field1", "brown")) + .addClause(spanTermQuery("field1", "fox")) .inOrder(true) ).endObject()) .get(); @@ -172,12 +172,12 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .setSource(jsonBuilder().startObject().field("query", spanNotQuery( spanNearQuery(spanTermQuery("field1", "quick"), 0) - .clause(spanTermQuery("field1", "brown")) - .clause(spanTermQuery("field1", "fox")) + .addClause(spanTermQuery("field1", "brown")) + .addClause(spanTermQuery("field1", "fox")) .inOrder(true), spanNearQuery(spanTermQuery("field1", "the"), 0) - .clause(spanTermQuery("field1", "lazy")) - .clause(spanTermQuery("field1", "dog")) + .addClause(spanTermQuery("field1", "lazy")) + .addClause(spanTermQuery("field1", "dog")) .inOrder(true)).dist(2) ).endObject()) .get(); @@ -187,12 +187,12 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .setSource(jsonBuilder().startObject().field("query", spanNotQuery( spanNearQuery(spanTermQuery("field1", "quick"), 0) - .clause(spanTermQuery("field1", "brown")) - .clause(spanTermQuery("field1", "fox")) + .addClause(spanTermQuery("field1", "brown")) + .addClause(spanTermQuery("field1", "fox")) .inOrder(true), spanNearQuery(spanTermQuery("field1", "the"), 0) - .clause(spanTermQuery("field1", "lazy")) - .clause(spanTermQuery("field1", "dog")) + .addClause(spanTermQuery("field1", "lazy")) + .addClause(spanTermQuery("field1", "dog")) .inOrder(true)).dist(3) ).endObject()) .get(); diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 9dd32e091cd..882226afe63 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1436,7 +1436,7 @@ public class SearchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test").setQuery( spanNearQuery(spanTermQuery("description", "foo"), 3) - .clause(spanTermQuery("description", "other"))).get(); + .addClause(spanTermQuery("description", "other"))).get(); assertHitCount(searchResponse, 3L); } @@ -1481,17 +1481,17 @@ public class SearchQueryIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("test") .setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) - .clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "brown"))).get(); + .addClause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "brown"))).get(); assertHitCount(searchResponse, 1L); searchResponse = client().prepareSearch("test") .setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) - .clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "sleeping")).dist(5)).get(); + .addClause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "sleeping")).dist(5)).get(); assertHitCount(searchResponse, 1L); searchResponse = client().prepareSearch("test") .setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) - .clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "jumped")).pre(1).post(1)).get(); + .addClause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "jumped")).pre(1).post(1)).get(); assertHitCount(searchResponse, 1L); } diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index d0f2c0492da..5cc59046433 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -158,7 +158,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, indicesQueriesRegistry, - null, null, null, null) { + null, null, null) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 21b71508bbb..e964b975bb2 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -232,7 +232,7 @@ public abstract class AbstractSortTestCase> extends EST } }); return new QueryShardContext(idxSettings, bitsetFilterCache, ifds, null, null, scriptService, - indicesQueriesRegistry, null, null, null, null) { + indicesQueriesRegistry, null, null, null) { @Override public MappedFieldType fieldMapper(String name) { return provideMappedFieldType(name); diff --git a/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java b/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java index fceedff8e70..84c3a03f2c8 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java @@ -20,7 +20,7 @@ package org.elasticsearch.snapshots; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.test.ESBlobStoreRepositoryIntegTestCase; +import org.elasticsearch.repositories.ESBlobStoreRepositoryIntegTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 5ed63b519a4..7ca30132a4a 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -640,7 +640,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertAcked(client.admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.builder().put("location", repositoryLocation))); - createIndex("test-idx"); + prepareCreate("test-idx").setSettings(Settings.builder().put("index.allocation.max_retries", Integer.MAX_VALUE)).get(); ensureGreen(); logger.info("--> indexing some data"); diff --git a/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java b/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java index e331678c1e9..2212f162eb6 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java @@ -59,7 +59,7 @@ public class ScalingThreadPoolTests extends ESThreadPoolTestCase { final int expectedSize; if (sizeBasedOnNumberOfProcessors < min || randomBoolean()) { - expectedSize = randomIntBetween(min, 16); + expectedSize = randomIntBetween(Math.max(1, min), 16); builder.put("threadpool." + threadPoolName + ".size", expectedSize); } else { expectedSize = sizeBasedOnNumberOfProcessors; diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index 65553a4a90e..0445da61096 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -43,7 +43,6 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; @@ -94,7 +93,7 @@ public class UpdateIT extends ESIntegTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(PutFieldValuesScriptEngine.class, PutFieldValuesScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(PutFieldValuesScriptEngine.class, PutFieldValuesScriptEngine.NAME, true)); } } @@ -181,7 +180,7 @@ public class UpdateIT extends ESIntegTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldIncrementScriptEngine.class, FieldIncrementScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldIncrementScriptEngine.class, FieldIncrementScriptEngine.NAME, true)); } } @@ -261,7 +260,7 @@ public class UpdateIT extends ESIntegTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ScriptedUpsertScriptEngine.class, ScriptedUpsertScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ScriptedUpsertScriptEngine.class, ScriptedUpsertScriptEngine.NAME, true)); } } @@ -341,7 +340,7 @@ public class UpdateIT extends ESIntegTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractContextInSourceScriptEngine.class, ExtractContextInSourceScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractContextInSourceScriptEngine.class, ExtractContextInSourceScriptEngine.NAME, true)); } } diff --git a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml index 515e4320fd2..548b186e46f 100644 --- a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml +++ b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml @@ -1,6 +1,7 @@ -# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG -es.logger.level: INFO -rootLogger: ${es.logger.level}, console +# you can override using a command-line parameter +# -E logger.level=(ERROR|WARN|INFO|DEBUG|TRACE) +logger.level: INFO +rootLogger: ${logger.level}, console logger: test: TRACE, console diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index 244ab8caa25..ac5e9afec47 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -75,6 +75,7 @@ DEFAULT_PLUGINS = ["analysis-icu", "mapper-murmur3", "mapper-size", "repository-azure", + "repository-gcs", "repository-hdfs", "repository-s3", "store-smb"] @@ -202,7 +203,7 @@ def smoke_test_release(release, files, expected_hash, plugins): headers = {} print(' Starting elasticsearch deamon from [%s]' % es_dir) try: - run('%s; %s -Ees.node.name=smoke_tester -Ees.cluster.name=prepare_release -Ees.script.inline=true -Ees.script.stored=true -Ees.repositories.url.allowed_urls=http://snapshot.test* %s -Ees.pidfile=%s -Ees.node.portsfile=true' + run('%s; %s -Enode.name=smoke_tester -Ecluster.name=prepare_release -Escript.inline=true -Escript.stored=true -Erepositories.url.allowed_urls=http://snapshot.test* %s -Epidfile=%s -Enode.portsfile=true' % (java_exe(), es_run_path, '-d', os.path.join(es_dir, 'es-smoke.pid'))) if not wait_for_node_startup(es_dir, header=headers): print("elasticsearch logs:") diff --git a/distribution/deb/src/main/packaging/init.d/elasticsearch b/distribution/deb/src/main/packaging/init.d/elasticsearch index a01643db2b3..f04008ba796 100755 --- a/distribution/deb/src/main/packaging/init.d/elasticsearch +++ b/distribution/deb/src/main/packaging/init.d/elasticsearch @@ -79,7 +79,7 @@ fi # Define other required variables PID_FILE="$PID_DIR/$NAME.pid" DAEMON=$ES_HOME/bin/elasticsearch -DAEMON_OPTS="-d -p $PID_FILE -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR" +DAEMON_OPTS="-d -p $PID_FILE -Edefault.path.logs=$LOG_DIR -Edefault.path.data=$DATA_DIR -Edefault.path.conf=$CONF_DIR" export ES_JAVA_OPTS export JAVA_HOME @@ -116,15 +116,6 @@ case "$1" in exit 0 fi - # Prepare environment - # Check $DATA_DIR for a comma - if [ "${DATA_DIR#*,}" != "$DATA_DIR" ]; then - # $DATA_DIR contains a comma, so we should not mkdir it - mkdir -p "$LOG_DIR" && chown "$ES_USER":"$ES_GROUP" "$LOG_DIR" - else - mkdir -p "$LOG_DIR" "$DATA_DIR" && chown "$ES_USER":"$ES_GROUP" "$LOG_DIR" "$DATA_DIR" - fi - # Ensure that the PID_DIR exists (it is cleaned at OS startup time) if [ -n "$PID_DIR" ] && [ ! -e "$PID_DIR" ]; then mkdir -p "$PID_DIR" && chown "$ES_USER":"$ES_GROUP" "$PID_DIR" diff --git a/distribution/rpm/src/main/packaging/init.d/elasticsearch b/distribution/rpm/src/main/packaging/init.d/elasticsearch index 7bcb5692a88..8f1d93dcbdc 100644 --- a/distribution/rpm/src/main/packaging/init.d/elasticsearch +++ b/distribution/rpm/src/main/packaging/init.d/elasticsearch @@ -114,7 +114,7 @@ start() { cd $ES_HOME echo -n $"Starting $prog: " # if not running, start it up here, usually something like "daemon $exec" - daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -Ees.default.path.home=$ES_HOME -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR + daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -Edefault.path.logs=$LOG_DIR -Edefault.path.data=$DATA_DIR -Edefault.path.conf=$CONF_DIR retval=$? echo [ $retval -eq 0 ] && touch $lockfile diff --git a/distribution/src/main/packaging/systemd/elasticsearch.service b/distribution/src/main/packaging/systemd/elasticsearch.service index ccbf4650a22..0c99464c4f6 100644 --- a/distribution/src/main/packaging/systemd/elasticsearch.service +++ b/distribution/src/main/packaging/systemd/elasticsearch.service @@ -21,9 +21,9 @@ ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ -p ${PID_DIR}/elasticsearch.pid \ - -Ees.default.path.logs=${LOG_DIR} \ - -Ees.default.path.data=${DATA_DIR} \ - -Ees.default.path.conf=${CONF_DIR} + -Edefault.path.logs=${LOG_DIR} \ + -Edefault.path.data=${DATA_DIR} \ + -Edefault.path.conf=${CONF_DIR} StandardOutput=journal StandardError=inherit diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin b/distribution/src/main/resources/bin/elasticsearch-plugin index 8a3b6676a98..06f8c5b8c27 100755 --- a/distribution/src/main/resources/bin/elasticsearch-plugin +++ b/distribution/src/main/resources/bin/elasticsearch-plugin @@ -81,10 +81,10 @@ fi HOSTNAME=`hostname | cut -d. -f1` export HOSTNAME -declare -a properties=(-Delasticsearch -Des.path.home="$ES_HOME") +declare -a args=("$@") if [ -e "$CONF_DIR" ]; then - properties=("${properties[@]}" -Des.default.path.conf="$CONF_DIR") + args=("${args[@]}" -Edefault.path.conf="$CONF_DIR") fi -exec "$JAVA" $ES_JAVA_OPTS "${properties[@]}" -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginCli "$@" +exec "$JAVA" $ES_JAVA_OPTS -Delasticsearch -Des.path.home="$ES_HOME" -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginCli "${args[@]}" diff --git a/distribution/src/main/resources/bin/service.bat b/distribution/src/main/resources/bin/service.bat index 3d73c37b823..81b6c8a5df5 100644 --- a/distribution/src/main/resources/bin/service.bat +++ b/distribution/src/main/resources/bin/service.bat @@ -163,7 +163,7 @@ set ES_JVM_OPTIONS="%ES_HOME%\config\jvm.options" if not "%ES_JAVA_OPTS%" == "" set ES_JAVA_OPTS=%ES_JAVA_OPTS: =;% @setlocal -for /F "usebackq delims=" %%a in (`findstr /b \- "%ES_JVM_OPTIONS%"`) do set JVM_OPTIONS=!JVM_OPTIONS!%%a; +for /F "usebackq delims=" %%a in (`findstr /b \- "%ES_JVM_OPTIONS%" ^| findstr /b /v "\-server \-client"`) do set JVM_OPTIONS=!JVM_OPTIONS!%%a; @endlocal & set ES_JAVA_OPTS=%JVM_OPTIONS%%ES_JAVA_OPTS% if "%ES_JAVA_OPTS:~-1%"==";" set ES_JAVA_OPTS=%ES_JAVA_OPTS:~0,-1% diff --git a/distribution/src/main/resources/config/logging.yml b/distribution/src/main/resources/config/logging.yml index 187e79cffa0..11cd181ebd0 100644 --- a/distribution/src/main/resources/config/logging.yml +++ b/distribution/src/main/resources/config/logging.yml @@ -1,6 +1,7 @@ -# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG -es.logger.level: INFO -rootLogger: ${es.logger.level}, console, file +# you can override using a command-line parameter +# -E logger.level=(ERROR|WARN|INFO|DEBUG|TRACE) +logger.level: INFO +rootLogger: ${logger.level}, console, file logger: # log action execution errors for easier debugging action: DEBUG diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 7cb7f396608..08ad129f22f 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -135,7 +135,7 @@ can do this as follows: [source,sh] --------------------- -sudo bin/elasticsearch-plugin -Ees.path.conf=/path/to/custom/config/dir install +sudo bin/elasticsearch-plugin -Epath.conf=/path/to/custom/config/dir install --------------------- You can also set the `CONF_DIR` environment variable to the custom config diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc new file mode 100644 index 00000000000..bed78b4cbbf --- /dev/null +++ b/docs/plugins/repository-gcs.asciidoc @@ -0,0 +1,216 @@ +[[repository-gcs]] +=== Google Cloud Storage Repository Plugin + +The GCS repository plugin adds support for using the https://cloud.google.com/storage/[Google Cloud Storage] +service as a repository for {ref}/modules-snapshots.html[Snapshot/Restore]. + +[[repository-gcs-install]] +[float] +==== Installation + +This plugin can be installed using the plugin manager: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin install repository-gcs +---------------------------------------------------------------- + +NOTE: The plugin requires new permission to be installed in order to work + +The plugin must be installed on every node in the cluster, and each node must +be restarted after installation. + +[[repository-gcs-remove]] +[float] +==== Removal + +The plugin can be removed with the following command: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin remove repository-gcs +---------------------------------------------------------------- + +The node must be stopped before removing the plugin. + +[[repository-gcs-usage]] +==== Getting started + +The plugin uses the https://cloud.google.com/storage/docs/json_api/[Google Cloud Storage JSON API] (v1) +to connect to the Storage service. If this is the first time you use Google Cloud Storage, you first +need to connect to the https://console.cloud.google.com/[Google Cloud Platform Console] and create a new +project. Once your project is created, you must enable the Cloud Storage Service for your project. + +[[repository-gcs-creating-bucket]] +===== Creating a Bucket + +Google Cloud Storage service uses the concept of https://cloud.google.com/storage/docs/key-terms[Bucket] +as a container for all the data. Buckets are usually created using the +https://console.cloud.google.com/[Google Cloud Platform Console]. The plugin will not automatically +create buckets. + +To create a new bucket: + +1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console] +2. Select your project +3. Got to the https://console.cloud.google.com/storage/browser[Storage Browser] +4. Click the "Create Bucket" button +5. Enter a the name of the new bucket +6. Select a storage class +7. Select a location +8. Click the "Create" button + +The bucket should now be created. + +[[repository-gcs-service-authentication]] +===== Service Authentication + +The plugin supports two authentication modes: + +* the built-in <>. This mode is +recommended if your elasticsearch node is running on a Compute Engine virtual machine. + +* the <> authentication mode. + +[[repository-gcs-using-compute-engine]] +===== Using Compute Engine +When running on Compute Engine, the plugin use the Google's built-in authentication mechanism to +authenticate on the Storage service. Compute Engine virtual machines are usually associated to a +default service account. This service account can be found in the VM instance details in the +https://console.cloud.google.com/compute/[Compute Engine console]. + +To indicate that a repository should use the built-in authentication, +the repository `service_account` setting must be set to `_default_`: + +[source,json] +---- +PUT _snapshot/my_gcs_repository_on_compute_engine +{ + "type": "gcs", + "settings": { + "bucket": "my_bucket", + "service_account": "_default_" + } +} +---- +// CONSOLE + +NOTE: The Compute Engine VM must be allowed to use the Storage service. This can be done only at VM +creation time, when "Storage" access can be configured to "Read/Write" permission. Check your +instance details at the section "Cloud API access scopes". + +[[repository-gcs-using-service-account]] +===== Using a Service Account +If your elasticsearch node is not running on Compute Engine, or if you don't want to use Google +built-in authentication mechanism, you can authenticate on the Storage service using a +https://cloud.google.com/iam/docs/overview#service_account[Service Account] file. + +To create a service account file: +1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console] +2. Select your project +3. Got to the https://console.cloud.google.com/permissions[Permission] tab +4. Select the https://console.cloud.google.com/permissions/serviceaccounts[Service Accounts] tab +5. Click on "Create service account" +6. Once created, select the new service account and download a JSON key file + +A service account file looks like this: + +[source,json] +---- +{ + "type": "service_account", + "project_id": "your-project-id", + "private_key_id": "...", + "private_key": "-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n", + "client_email": "service-account-for-your-repository@your-project-id.iam.gserviceaccount.com", + "client_id": "...", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://accounts.google.com/o/oauth2/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "..." +} +---- + +This file must be copied in the `config` directory of the elasticsearch installation and on +every node of the cluster. + +To indicate that a repository should use a service account file: + +[source,json] +---- +PUT _snapshot/my_gcs_repository +{ + "type": "gcs", + "settings": { + "bucket": "my_bucket", + "service_account": "service_account.json" + } +} +---- +// CONSOLE + + +[[repository-gcs-bucket-permission]] +===== Set Bucket Permission + +The service account used to access the bucket must have the "Writer" access to the bucket: + +1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console] +2. Select your project +3. Got to the [https://console.cloud.google.com/storage/browser]Storage Browser +4. Select the bucket and "Edit bucket permission" +5. The service account must be configured as a "User" with "Writer" access + + +[[repository-gcs-repository]] +==== Create a Repository + +Once everything is installed and every node is started, you can create a new repository that +uses Google Cloud Storage to store snapshots: + +[source,json] +---- +PUT _snapshot/my_gcs_repository +{ + "type": "gcs", + "settings": { + "bucket": "my_bucket", + "service_account": "service_account.json" + } +} +---- +// CONSOLE + +The following settings are supported: + +`bucket`:: + + The name of the bucket to be used for snapshots. (Mandatory) + +`service_account`:: + + The service account to use. It can be a relative path to a service account JSON file + or the value `_default_` that indicate to use built-in Compute Engine service account. + +`base_path`:: + + Specifies the path within bucket to repository data. Defaults to + the root of the bucket. + +`chunk_size`:: + + Big files can be broken down into chunks during snapshotting if needed. + The chunk size can be specified in bytes or by using size value notation, + i.e. `1g`, `10m`, `5k`. Defaults to `100m`. + +`compress`:: + + When set to `true` metadata files are stored in compressed format. This + setting doesn't affect index files that are already compressed by default. + Defaults to `false`. + +`application_name`:: + + Name used by the plugin when it uses the Google Cloud JSON API. Setting + a custom name can be useful to authenticate your cluster when requests + statistics are logged in the Google Cloud Platform. Default to `repository-gcs` diff --git a/docs/plugins/repository.asciidoc b/docs/plugins/repository.asciidoc index 08557b9e03e..9a4e90bebd7 100644 --- a/docs/plugins/repository.asciidoc +++ b/docs/plugins/repository.asciidoc @@ -22,6 +22,10 @@ The Azure repository plugin adds support for using Azure as a repository. The Hadoop HDFS Repository plugin adds support for using HDFS as a repository. +<>:: + +The GCS repository plugin adds support for using Google Cloud Storage service as a repository. + [float] === Community contributed repository plugins @@ -37,3 +41,4 @@ include::repository-s3.asciidoc[] include::repository-hdfs.asciidoc[] +include::repository-gcs.asciidoc[] diff --git a/docs/reference/analysis/analyzers/configuring.asciidoc b/docs/reference/analysis/analyzers/configuring.asciidoc index c93d800afb9..2ce13702e00 100644 --- a/docs/reference/analysis/analyzers/configuring.asciidoc +++ b/docs/reference/analysis/analyzers/configuring.asciidoc @@ -64,3 +64,38 @@ POST my_index/_analyze English stop words will be removed. The resulting terms are: `[ old, brown, cow ]` + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "old", + "start_offset": 4, + "end_offset": 7, + "type": "", + "position": 1 + }, + { + "token": "brown", + "start_offset": 8, + "end_offset": 13, + "type": "", + "position": 2 + }, + { + "token": "cow", + "start_offset": 14, + "end_offset": 17, + "type": "", + "position": 3 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index eccd16c23be..1707a9a399b 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -84,6 +84,48 @@ POST my_index/_analyze -------------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "is", + "start_offset": 0, + "end_offset": 2, + "type": "", + "position": 0 + }, + { + "token": "this", + "start_offset": 3, + "end_offset": 7, + "type": "", + "position": 1 + }, + { + "token": "deja", + "start_offset": 11, + "end_offset": 15, + "type": "", + "position": 2 + }, + { + "token": "vu", + "start_offset": 16, + "end_offset": 22, + "type": "", + "position": 3 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] @@ -119,13 +161,10 @@ PUT my_index "analyzer": { "my_custom_analyzer": { "type": "custom", - "char_filter": [ "emoticons" <1> ], - "tokenizer": "punctuation", <1> - "filter": [ "lowercase", "english_stop" <1> @@ -165,11 +204,54 @@ POST my_index/_analyze "text": "I'm a :) person, and you?" } -------------------------------------------------- +// CONSOLE <1> The `emoticon` character filter, `punctuation` tokenizer and `english_stop` token filter are custom implementations which are defined in the same index settings. +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "i'm", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "_happy_", + "start_offset": 6, + "end_offset": 8, + "type": "word", + "position": 2 + }, + { + "token": "person", + "start_offset": 9, + "end_offset": 15, + "type": "word", + "position": 3 + }, + { + "token": "you", + "start_offset": 21, + "end_offset": 24, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc index b393c883441..24dc92380bb 100644 --- a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc @@ -36,6 +36,27 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "and consistent godel is said sentence this yes", + "start_offset": 0, + "end_offset": 52, + "type": "fingerprint", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following single term: [source,text] @@ -58,16 +79,11 @@ The `fingerprint` analyzer accepts the following parameters: The maximum token size to emit. Defaults to `255`. Tokens larger than this size will be discarded. -`preserve_original`:: - - If `true`, emits two tokens: one with ASCII-folding of terms that contain - extended characters (if any) and one with the original characters. - Defaults to `false`. - `stopwords`:: A pre-defined stop words list like `_english_` or an array containing a list of stop words. Defaults to `_none_`. + `stopwords_path`:: The path to a file containing stop words. @@ -80,8 +96,7 @@ about stop word configuration. === Example configuration In this example, we configure the `fingerprint` analyzer to use the -pre-defined list of English stop words, and to emit a second token in -the presence of non-ASCII characters: +pre-defined list of English stop words: [source,js] ---------------------------- @@ -92,8 +107,7 @@ PUT my_index "analyzer": { "my_fingerprint_analyzer": { "type": "fingerprint", - "stopwords": "_english_", - "preserve_original": true + "stopwords": "_english_" } } } @@ -110,9 +124,30 @@ POST my_index/_analyze ---------------------------- // CONSOLE -The above example produces the following two terms: +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "consistent godel said sentence yes", + "start_offset": 0, + "end_offset": 52, + "type": "fingerprint", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following term: [source,text] --------------------------- -[ consistent godel said sentence yes, consistent gödel said sentence yes ] +[ consistent godel said sentence yes ] --------------------------- diff --git a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc index a0c1b1b0a6a..cc94f3b757e 100644 --- a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc @@ -25,6 +25,27 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone.", + "start_offset": 0, + "end_offset": 56, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following single term: [source,text] diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 6a4ca274416..2d5741c2b9e 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -30,6 +30,104 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 3 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 8 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 9 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 10 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 11 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] @@ -110,6 +208,55 @@ POST my_index/_analyze <1> The backslashes in the pattern need to be escaped when specifying the pattern as a JSON string. +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "john", + "start_offset": 0, + "end_offset": 4, + "type": "word", + "position": 0 + }, + { + "token": "smith", + "start_offset": 5, + "end_offset": 10, + "type": "word", + "position": 1 + }, + { + "token": "foo", + "start_offset": 11, + "end_offset": 14, + "type": "word", + "position": 2 + }, + { + "token": "bar", + "start_offset": 15, + "end_offset": 18, + "type": "word", + "position": 3 + }, + { + "token": "com", + "start_offset": 19, + "end_offset": 22, + "type": "word", + "position": 4 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] @@ -148,6 +295,62 @@ GET my_index/_analyze -------------------------------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "moose", + "start_offset": 0, + "end_offset": 5, + "type": "word", + "position": 0 + }, + { + "token": "x", + "start_offset": 5, + "end_offset": 6, + "type": "word", + "position": 1 + }, + { + "token": "ftp", + "start_offset": 8, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "class", + "start_offset": 11, + "end_offset": 16, + "type": "word", + "position": 3 + }, + { + "token": "2", + "start_offset": 16, + "end_offset": 17, + "type": "word", + "position": 4 + }, + { + "token": "beta", + "start_offset": 18, + "end_offset": 22, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc index 4c932bb5d3e..a57c30d8dd6 100644 --- a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc @@ -25,6 +25,97 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc index 72292e1d40d..3b948892483 100644 --- a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc @@ -33,6 +33,97 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] @@ -98,6 +189,89 @@ POST my_index/_analyze ---------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumpe", + "start_offset": 24, + "end_offset": 29, + "type": "", + "position": 5 + }, + { + "token": "d", + "start_offset": 29, + "end_offset": 30, + "type": "", + "position": 6 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 9 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 10 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 11 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc index ada9022a287..e40436342d7 100644 --- a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc @@ -29,6 +29,83 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] @@ -87,6 +164,76 @@ POST my_index/_analyze ---------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc index 0dce8db1c99..f95e5c6e4ab 100644 --- a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc @@ -25,6 +25,90 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "Brown-Foxes", + "start_offset": 12, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "word", + "position": 8 + }, + { + "token": "bone.", + "start_offset": 51, + "end_offset": 56, + "type": "word", + "position": 9 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] diff --git a/docs/reference/analysis/charfilters.asciidoc b/docs/reference/analysis/charfilters.asciidoc index c9f5805284c..cd24f5bf571 100644 --- a/docs/reference/analysis/charfilters.asciidoc +++ b/docs/reference/analysis/charfilters.asciidoc @@ -1,16 +1,36 @@ [[analysis-charfilters]] == Character Filters -Character filters are used to preprocess the string of -characters before it is passed to the <>. -A character filter may be used to strip out HTML markup, or to convert -`"&"` characters to the word `"and"`. +_Character filters_ are used to preprocess the stream of characters before it +is passed to the <>. -Elasticsearch has built in characters filters which can be -used to build <>. +A character filter receives the original text as a stream of characters and +can transform the stream by adding, removing, or changing characters. For +instance, a character filter could be used to convert Arabic numerals +(٠‎١٢٣٤٥٦٧٨‎٩‎) into their Latin equivalents (0123456789), or to strip HTML +elements like `` from the stream. -include::charfilters/mapping-charfilter.asciidoc[] + +Elasticsearch has a number of built in character filters which can be used to build +<>. + +<>:: + +The `html_strip` character filter strips out HTML elements like `` and +decodes HTML entities like `&`. + +<>:: + +The `mapping` character filter replaces any occurrences of the specified +strings with the specified replacements. + +<>:: + +The `pattern_replace` character filter replaces any characters matching a +regular expression with the specified replacement. include::charfilters/htmlstrip-charfilter.asciidoc[] +include::charfilters/mapping-charfilter.asciidoc[] + include::charfilters/pattern-replace-charfilter.asciidoc[] diff --git a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc index f12238a36ad..3d8b187d772 100644 --- a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc @@ -1,5 +1,135 @@ [[analysis-htmlstrip-charfilter]] === HTML Strip Char Filter -A char filter of type `html_strip` stripping out HTML elements from an -analyzed text. +The `html_strip` character filter strips HTML elements from the text and +replaces HTML entities with their decoded value (e.g. replacing `&` with +`&`). + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "keyword", <1> + "char_filter": [ "html_strip" ], + "text": "

I'm so happy!

" +} +--------------------------- +// CONSOLE +<1> The <> returns a single term. + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "\nI'm so happy!\n", + "start_offset": 0, + "end_offset": 32, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example returns the term: + +[source,js] +--------------------------- +[ \nI'm so happy!\n ] +--------------------------- + +The same example with the `standard` tokenizer would return the following terms: + +[source,js] +--------------------------- +[ I'm, so, happy ] +--------------------------- + +[float] +=== Configuration + +The `html_strip` character filter accepts the following parameter: + +[horizontal] +`escaped_tags`:: + + An array of HTML tags which should not be stripped from the original text. + +[float] +=== Example configuration + +In this example, we configure the `html_strip` character filter to leave `` +tags in place: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "keyword", + "char_filter": ["my_char_filter"] + } + }, + "char_filter": { + "my_char_filter": { + "type": "html_strip", + "escaped_tags": ["b"] + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "

I'm so happy!

" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "\nI'm so happy!\n", + "start_offset": 0, + "end_offset": 32, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following term: + +[source,text] +--------------------------- +[ \nI'm so happy!\n ] +--------------------------- + + + diff --git a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc index 14c316dcac5..ed90e9f6ab6 100644 --- a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc @@ -1,42 +1,202 @@ [[analysis-mapping-charfilter]] === Mapping Char Filter -A char filter of type `mapping` replacing characters of an analyzed text -with given mapping. +The `mapping` character filter accepts a map of keys and values. Whenever it +encounters a string of characters that is the same as a key, it replaces them +with the value associated with that key. + +Matching is greedy; the longest pattern matching at a given point wins. +Replacements are allowed to be the empty string. + +[float] +=== Configuration + +The `mapping` character filter accepts the following parameters: [horizontal] `mappings`:: - A list of mappings to use. + A array of mappings, with each element having the form `key => value`. `mappings_path`:: - A path, relative to the `config` directory, to a mappings file - configuration. + A path, either absolute or relative to the `config` directory, to a UTF-8 + encoded text mappings file containing a `key => value` mapping per line. -Here is a sample configuration: +Either the `mappings` or `mappings_path` parameter must be provided. + +[float] +=== Example configuration + +In this example, we configure the `mapping` character filter to replace Arabic +numerals with their Latin equivalents: [source,js] --------------------------------------------------- +---------------------------- +PUT my_index { - "index" : { - "analysis" : { - "char_filter" : { - "my_mapping" : { - "type" : "mapping", - "mappings" : [ - "ph => f", - "qu => k" - ] - } - }, - "analyzer" : { - "custom_with_char_filter" : { - "tokenizer" : "standard", - "char_filter" : ["my_mapping"] - } - } + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "keyword", + "char_filter": [ + "my_char_filter" + ] } + }, + "char_filter": { + "my_char_filter": { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2", + "٣ => 3", + "٤ => 4", + "٥ => 5", + "٦ => 6", + "٧ => 7", + "٨ => 8", + "٩ => 9" + ] + } + } } + } } --------------------------------------------------- + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "My license plate is ٢٥٠١٥" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "My license plate is 25015", + "start_offset": 0, + "end_offset": 25, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following term: + +[source,text] +--------------------------- +[ My license plate is 25015 ] +--------------------------- + +Keys and values can be strings with multiple characters. The following +example replaces the `:)` and `:(` emoticons with a text equivalent: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "standard", + "char_filter": [ + "my_char_filter" + ] + } + }, + "char_filter": { + "my_char_filter": { + "type": "mapping", + "mappings": [ + ":) => _happy_", + ":( => _sad_" + ] + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "I'm delighted about it :(" +} +---------------------------- +// CONSOLE + + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "I'm", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "delighted", + "start_offset": 4, + "end_offset": 13, + "type": "", + "position": 1 + }, + { + "token": "about", + "start_offset": 14, + "end_offset": 19, + "type": "", + "position": 2 + }, + { + "token": "it", + "start_offset": 20, + "end_offset": 22, + "type": "", + "position": 3 + }, + { + "token": "_sad_", + "start_offset": 23, + "end_offset": 25, + "type": "", + "position": 4 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ I'm, delighted, about, it, _sad_ ] +--------------------------- diff --git a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc index e3b85fd7bd1..72adefa5aec 100644 --- a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc @@ -1,37 +1,249 @@ [[analysis-pattern-replace-charfilter]] === Pattern Replace Char Filter -The `pattern_replace` char filter allows the use of a regex to -manipulate the characters in a string before analysis. The regular -expression is defined using the `pattern` parameter, and the replacement -string can be provided using the `replacement` parameter (supporting -referencing the original text, as explained -http://docs.oracle.com/javase/6/docs/api/java/util/regex/Matcher.html#appendReplacement(java.lang.StringBuffer,%20java.lang.String)[here]). -For more information check the -http://lucene.apache.org/core/4_3_1/analyzers-common/org/apache/lucene/analysis/pattern/PatternReplaceCharFilter.html[lucene -documentation] +The `pattern_replace` character filter uses a regular expression to match +characters which should be replaced with the specified replacement string. +The replacement string can refer to capture groups in the regular expression. -Here is a sample configuration: +[float] +=== Configuration + +The `pattern_replace` character filter accepts the following parameters: + +[horizontal] +`pattern`:: + + A http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html[Java regular expression]. Required. + +`replacement`:: + + The replacement string, which can reference capture groups using the + `$1`..`$9` syntax, as explained + http://docs.oracle.com/javase/8/docs/api/java/util/regex/Matcher.html#appendReplacement-java.lang.StringBuffer-java.lang.String-[here]. + +[float] +=== Example configuration + +In this example, we configure the `pattern_replace` character filter to +replace any embedded dashes in numbers with underscores, i.e `123-456-789` -> +`123_456_789`: [source,js] --------------------------------------------------- +---------------------------- +PUT my_index { - "index" : { - "analysis" : { - "char_filter" : { - "my_pattern":{ - "type":"pattern_replace", - "pattern":"sample(.*)", - "replacement":"replacedSample $1" - } - }, - "analyzer" : { - "custom_with_char_filter" : { - "tokenizer" : "standard", - "char_filter" : ["my_pattern"] - } - } + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "standard", + "char_filter": [ + "my_char_filter" + ] } + }, + "char_filter": { + "my_char_filter": { + "type": "pattern_replace", + "pattern": "(\\d+)-(?=\\d)", + "replacement": "$1_" + } + } } + } } --------------------------------------------------- + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "My credit card is 123-456-789" +} +---------------------------- +// CONSOLE +// TEST[skip:Test interprets $1 as a stashed variable] + +The above example produces the following term: + +[source,text] +--------------------------- +[ My, credit, card, is 123_456_789 ] +--------------------------- + + +WARNING: Using a replacement string that changes the length of the original +text will work for search purposes, but will result in incorrect highlighting, +as can be seen in the following example. + +This example inserts a space whenever it encounters a lower-case letter +followed by an upper-case letter (i.e. `fooBarBaz` -> `foo Bar Baz`), allowing +camelCase words to be queried individually: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "standard", + "char_filter": [ + "my_char_filter" + ], + "filter": [ + "lowercase" + ] + } + }, + "char_filter": { + "my_char_filter": { + "type": "pattern_replace", + "pattern": "(?<=\\p{Lower})(?=\\p{Upper})", + "replacement": " " + } + } + } + }, + "mappings": { + "my_type": { + "properties": { + "text": { + "type": "text", + "analyzer": "my_analyzer" + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "The fooBarBaz method" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "foo", + "start_offset": 4, + "end_offset": 6, + "type": "", + "position": 1 + }, + { + "token": "bar", + "start_offset": 7, + "end_offset": 9, + "type": "", + "position": 2 + }, + { + "token": "baz", + "start_offset": 10, + "end_offset": 13, + "type": "", + "position": 3 + }, + { + "token": "method", + "start_offset": 14, + "end_offset": 20, + "type": "", + "position": 4 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + +The above returns the following terms: + +[source,js] +---------------------------- +[ the, foo, bar, baz, method ] +---------------------------- + +Querying for `bar` will find the document correctly, but highlighting on the +result will produce incorrect highlights, because our character filter changed +the length of the original text: + +[source,js] +---------------------------- +PUT my_index/my_doc/1?refresh +{ + "text": "The fooBarBaz method" +} + +GET my_index/_search +{ + "query": { + "match": { + "text": "bar" + } + }, + "highlight": { + "fields": { + "text": {} + } + } +} +---------------------------- +// CONSOLE +// TEST[continued] + +The output from the above is: + +[source,js] +---------------------------- +{ + "timed_out": false, + "took": $body.took, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 0.4375, + "hits": [ + { + "_index": "my_index", + "_type": "my_doc", + "_id": "1", + "_score": 0.4375, + "_source": { + "text": "The fooBarBaz method" + }, + "highlight": { + "text": [ + "The fooBarBaz method" <1> + ] + } + } + ] + } +} +---------------------------- +// TESTRESPONSE[s/"took".*/"took": "$body.took",/] +<1> Note the incorrect highlight. diff --git a/docs/reference/analysis/tokenizers.asciidoc b/docs/reference/analysis/tokenizers.asciidoc index 46c02f9a4fc..b30822b6a0b 100644 --- a/docs/reference/analysis/tokenizers.asciidoc +++ b/docs/reference/analysis/tokenizers.asciidoc @@ -1,34 +1,136 @@ [[analysis-tokenizers]] == Tokenizers -Tokenizers are used to break a string down into a stream of terms -or tokens. A simple tokenizer might split the string up into terms -wherever it encounters whitespace or punctuation. +A _tokenizer_ receives a stream of characters, breaks it up into individual +_tokens_ (usually individual words), and outputs a stream of _tokens_. For +instance, a <> tokenizer breaks +text into tokens whenever it sees any whitespace. It would convert the text +`"Quick brown fox!"` into the terms `[Quick, brown, fox!]`. + +The tokenizer is also responsible for recording the order or _position_ of +each term (used for phrase and word proximity queries) and the start and end +_character offsets_ of the original word which the term represents (used for +highlighting search snippets). + +Elasticsearch has a number of built in tokenizers which can be used to build +<>. + +[float] +=== Word Oriented Tokenizers + +The following tokenizers are usually used for tokenizing full text into +individual words: + +<>:: + +The `standard` tokenizer divides text into terms on word boundaries, as +defined by the Unicode Text Segmentation algorithm. It removes most +punctuation symbols. It is the best choice for most languages. + +<>:: + +The `letter` tokenizer divides text into terms whenever it encounters a +character which is not a letter. + +<>:: + +The `lowercase` tokenizer, like the `letter` tokenizer, divides text into +terms whenever it encounters a character which is not a letter, but it also +lowercases all terms. + +<>:: + +The `whitespace` tokenizer divides text into terms whenever it encounters any +whitespace character. + +<>:: + +The `uax_url_email` tokenizer is like the `standard` tokenizer except that it +recognises URLs and email addresses as single tokens. + +<>:: + +The `classic` tokenizer is a grammar based tokenizer for the English Language. + +<>:: + +The `thai` tokenizer segments Thai text into words. + +[float] +=== Partial Word Tokenizers + +These tokenizers break up text or words into small fragments, for partial word +matching: + +<>:: + +The `ngram` tokenizer can break up text into words when it encounters any of +a list of specified characters (e.g. whitespace or punctuation), then it returns +n-grams of each word: a sliding window of continuous letters, e.g. `quick` -> +`[qu, ui, ic, ck]`. + +<>:: + +The `edge_ngram` tokenizer can break up text into words when it encounters any of +a list of specified characters (e.g. whitespace or punctuation), then it returns +n-grams of each word which are anchored to the start of the word, e.g. `quick` -> +`[q, qu, qui, quic, quick]`. + + +[float] +=== Structured Text Tokenizers + +The following tokenizers are usually used with structured text like +identifiers, email addresses, zip codes, and paths, rather than with full +text: + +<>:: + +The `keyword` tokenizer is a ``noop'' tokenizer that accepts whatever text it +is given and outputs the exact same text as a single term. It can be combined +with token filters like <> to +normalise the analysed terms. + +<>:: + +The `pattern` tokenizer uses a regular expression to either split text into +terms whenever it matches a word separator, or to capture matching text as +terms. + +<>:: + +The `path_hierarchy` tokenizer takes a hierarchical value like a filesystem +path, splits on the path separator, and emits a term for each component in the +tree, e.g. `/foo/bar/baz` -> `[/foo, /foo/bar, /foo/bar/baz ]`. + + + -Elasticsearch has a number of built in tokenizers which can be -used to build <>. include::tokenizers/standard-tokenizer.asciidoc[] -include::tokenizers/edgengram-tokenizer.asciidoc[] - -include::tokenizers/keyword-tokenizer.asciidoc[] - include::tokenizers/letter-tokenizer.asciidoc[] include::tokenizers/lowercase-tokenizer.asciidoc[] -include::tokenizers/ngram-tokenizer.asciidoc[] - include::tokenizers/whitespace-tokenizer.asciidoc[] -include::tokenizers/pattern-tokenizer.asciidoc[] - include::tokenizers/uaxurlemail-tokenizer.asciidoc[] -include::tokenizers/pathhierarchy-tokenizer.asciidoc[] - include::tokenizers/classic-tokenizer.asciidoc[] include::tokenizers/thai-tokenizer.asciidoc[] + +include::tokenizers/ngram-tokenizer.asciidoc[] + +include::tokenizers/edgengram-tokenizer.asciidoc[] + + +include::tokenizers/keyword-tokenizer.asciidoc[] + +include::tokenizers/pattern-tokenizer.asciidoc[] + +include::tokenizers/pathhierarchy-tokenizer.asciidoc[] + + diff --git a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc index 9b6315cec96..45d4ad41526 100644 --- a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc @@ -1,19 +1,269 @@ [[analysis-classic-tokenizer]] === Classic Tokenizer -A tokenizer of type `classic` providing grammar based tokenizer that is -a good tokenizer for English language documents. This tokenizer has -heuristics for special treatment of acronyms, company names, email addresses, -and internet host names. However, these rules don't always work, and -the tokenizer doesn't work well for most languages other than English. +The `classic` tokenizer is a grammar based tokenizer that is good for English +language documents. This tokenizer has heuristics for special treatment of +acronyms, company names, email addresses, and internet host names. However, +these rules don't always work, and the tokenizer doesn't work well for most +languages other than English: + +* It splits words at most punctuation characters, removing punctuation. However, a + dot that's not followed by whitespace is considered part of a token. + +* It splits words at hyphens, unless there's a number in the token, in which case + the whole token is interpreted as a product number and is not split. + +* It recognizes email addresses and internet hostnames as one token. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "classic", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumped, over, the, lazy, dog's, bone ] +--------------------------- + +[float] +=== Configuration + +The `classic` tokenizer accepts the following parameters: + +[horizontal] +`max_token_length`:: + + The maximum token length. If a token is seen that exceeds this length then + it is split at `max_token_length` intervals. Defaults to `255`. + +[float] +=== Example configuration + +In this example, we configure the `classic` tokenizer to have a +`max_token_length` of 5 (for demonstration purposes): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "classic", + "max_token_length": 5 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumpe, d, over, the, lazy, dog's, bone ] +--------------------------- -The following are settings that can be set for a `classic` tokenizer -type: -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is discarded. Defaults to `255`. -|======================================================================= diff --git a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc index 41cc2337940..2328354998e 100644 --- a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc @@ -1,80 +1,323 @@ [[analysis-edgengram-tokenizer]] === Edge NGram Tokenizer -A tokenizer of type `edgeNGram`. +The `edge_ngram` tokenizer first breaks text down into words whenever it +encounters one of a list of specified characters, then it emits +https://en.wikipedia.org/wiki/N-gram[N-grams] of each word where the start of +the N-gram is anchored to the beginning of the word. -This tokenizer is very similar to `nGram` but only keeps n-grams which -start at the beginning of a token. +Edge N-Grams are useful for _search-as-you-type_ queries. -The following are settings that can be set for a `edgeNGram` tokenizer -type: +TIP: When you need _search-as-you-type_ for text which has a widely known +order, such as movie or song titles, the +<> is a much more efficient +choice than edge N-grams. Edge N-grams have the advantage when trying to +autocomplete words that can appear in any order. -[cols="<,<,<",options="header",] -|======================================================================= -|Setting |Description |Default value -|`min_gram` |Minimum size in codepoints of a single n-gram |`1`. +[float] +=== Example output -|`max_gram` |Maximum size in codepoints of a single n-gram |`2`. +With the default settings, the `edge_ngram` tokenizer treats the initial text as a +single token and produces N-grams with minimum length `1` and maximum length +`2`: -|`token_chars` | Characters classes to keep in the -tokens, Elasticsearch will split on characters that don't belong to any -of these classes. |`[]` (Keep all characters) -|======================================================================= +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "edge_ngram", + "text": "Quick Fox" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Q", + "start_offset": 0, + "end_offset": 1, + "type": "word", + "position": 0 + }, + { + "token": "Qu", + "start_offset": 0, + "end_offset": 2, + "type": "word", + "position": 1 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// -`token_chars` accepts the following character classes: +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ Q, Qu ] +--------------------------- + +NOTE: These default gram lengths are almost entirely useless. You need to +configure the `edge_ngram` before using it. + +[float] +=== Configuration + +The `edge_ngram` tokenizer accepts the following parameters: [horizontal] -`letter`:: for example `a`, `b`, `ï` or `京` -`digit`:: for example `3` or `7` -`whitespace`:: for example `" "` or `"\n"` -`punctuation`:: for example `!` or `"` -`symbol`:: for example `$` or `√` +`min_gram`:: + Minimum length of characters in a gram. Defaults to `1`. + +`max_gram`:: + Maximum length of characters in a gram. Defaults to `2`. + +`token_chars`:: + + Character classes that should be included in a token. Elasticsearch + will split on characters that don't belong to the classes specified. + Defaults to `[]` (keep all characters). ++ +Character classes may be any of the following: ++ +* `letter` -- for example `a`, `b`, `ï` or `京` +* `digit` -- for example `3` or `7` +* `whitespace` -- for example `" "` or `"\n"` +* `punctuation` -- for example `!` or `"` +* `symbol` -- for example `$` or `√` [float] -==== Example +=== Example configuration + +In this example, we configure the `edge_ngram` tokenizer to treat letters and +digits as tokens, and to produce grams with minimum length `2` and maximum +length `10`: [source,js] --------------------------------------------------- - curl -XPUT 'localhost:9200/test' -d ' - { - "settings" : { - "analysis" : { - "analyzer" : { - "my_edge_ngram_analyzer" : { - "tokenizer" : "my_edge_ngram_tokenizer" - } - }, - "tokenizer" : { - "my_edge_ngram_tokenizer" : { - "type" : "edgeNGram", - "min_gram" : "2", - "max_gram" : "5", - "token_chars": [ "letter", "digit" ] - } - } - } +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" } - }' + }, + "tokenizer": { + "my_tokenizer": { + "type": "edge_ngram", + "min_gram": 2, + "max_gram": 10, + "token_chars": [ + "letter", + "digit" + ] + } + } + } + } +} - curl 'localhost:9200/test/_analyze?pretty=1&analyzer=my_edge_ngram_analyzer' -d 'FC Schalke 04' - # FC, Sc, Sch, Scha, Schal, 04 --------------------------------------------------- +GET _cluster/health?wait_for_status=yellow -[float] -==== `side` deprecated +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "2 Quick Foxes." +} +---------------------------- +// CONSOLE -There used to be a `side` parameter up to `0.90.1` but it is now deprecated. In -order to emulate the behavior of `"side" : "BACK"` a -<> should be used together -with the <>. The -`edgeNGram` filter must be enclosed in `reverse` filters like this: +///////////////////// [source,js] --------------------------------------------------- - "filter" : ["reverse", "edgeNGram", "reverse"] --------------------------------------------------- +---------------------------- +{ + "tokens": [ + { + "token": "Qu", + "start_offset": 2, + "end_offset": 4, + "type": "word", + "position": 0 + }, + { + "token": "Qui", + "start_offset": 2, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "Quic", + "start_offset": 2, + "end_offset": 6, + "type": "word", + "position": 2 + }, + { + "token": "Quick", + "start_offset": 2, + "end_offset": 7, + "type": "word", + "position": 3 + }, + { + "token": "Fo", + "start_offset": 8, + "end_offset": 10, + "type": "word", + "position": 4 + }, + { + "token": "Fox", + "start_offset": 8, + "end_offset": 11, + "type": "word", + "position": 5 + }, + { + "token": "Foxe", + "start_offset": 8, + "end_offset": 12, + "type": "word", + "position": 6 + }, + { + "token": "Foxes", + "start_offset": 8, + "end_offset": 13, + "type": "word", + "position": 7 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + +The above example produces the following terms: + +[source,text] +--------------------------- +[ Qu, Qui, Quic, Quick, Fo, Fox, Foxe, Foxes ] +--------------------------- + +Usually we recommend using the same `analyzer` at index time and at search +time. In the case of the `edge_ngram` tokenizer, the advice is different. It +only makes sense to use the `edge_ngram` tokenizer at index time, to ensure +that partial words are available for matching in the index. At search time, +just search for the terms the user has typed in, for instance: `Quick Fo`. + +Below is an example of how to set up a field for _search-as-you-type_: + +[source,js] +----------------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "autocomplete": { + "tokenizer": "autocomplete", + "filter": [ + "lowercase" + ] + }, + "autocomplete_search": { + "tokenizer": "lowercase" + } + }, + "tokenizer": { + "autocomplete": { + "type": "edge_ngram", + "min_gram": 2, + "max_gram": 10, + "token_chars": [ + "letter" + ] + } + } + } + }, + "mappings": { + "doc": { + "properties": { + "title": { + "type": "text", + "analyzer": "autocomplete", + "search_analyzer": "autocomplete_search" + } + } + } + } +} + +PUT my_index/doc/1 +{ + "title": "Quick Foxes" <1> +} + +POST my_index/_refresh + +GET my_index/_search +{ + "query": { + "match": { + "title": { + "query": "Quick Fo", <2> + "operator": "and" + } + } + } +} +----------------------------------- +// CONSOLE + +<1> The `autocomplete` analyzer indexes the terms `[qu, qui, quic, quick, fo, fox, foxe, foxes]`. +<2> The `autocomplete_search` analyzer searches for the terms `[quick, fo]`, both of which appear in the index. + +///////////////////// + +[source,js] +---------------------------- +{ + "took": $body.took, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 0.44194174, + "hits": [ + { + "_index": "my_index", + "_type": "doc", + "_id": "1", + "_score": 0.44194174, + "_source": { + "title": "Quick Foxes" + } + } + ] + } +} +---------------------------- +// TESTRESPONSE[s/"took".*/"took": "$body.took",/] +///////////////////// -which essentially reverses the token, builds front `EdgeNGrams` and reverses -the ngram again. This has the same effect as the previous `"side" : "BACK"` setting. diff --git a/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc index ad1652466be..27515516fe5 100644 --- a/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc @@ -1,15 +1,60 @@ [[analysis-keyword-tokenizer]] === Keyword Tokenizer -A tokenizer of type `keyword` that emits the entire input as a single -output. +The `keyword` tokenizer is a ``noop'' tokenizer that accepts whatever text it +is given and outputs the exact same text as a single term. It can be combined +with token filters to normalise output, e.g. lower-casing email addresses. -The following are settings that can be set for a `keyword` tokenizer -type: +[float] +=== Example output -[cols="<,<",options="header",] -|======================================================= -|Setting |Description -|`buffer_size` |The term buffer size. Defaults to `256`. -|======================================================= +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "keyword", + "text": "New York" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "New York", + "start_offset": 0, + "end_offset": 8, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following term: + +[source,text] +--------------------------- +[ New York ] +--------------------------- + +[float] +=== Configuration + +The `keyword` tokenizer accepts the following parameters: + +[horizontal] +`buffer_size`:: + + The number of characters read into the term buffer in a single pass. + Defaults to `256`. The term buffer will grow by this size until all the + text has been consumed. It is advisable not to change this setting. diff --git a/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc index 03025ccd303..7423a68732d 100644 --- a/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc @@ -1,7 +1,123 @@ [[analysis-letter-tokenizer]] === Letter Tokenizer -A tokenizer of type `letter` that divides text at non-letters. That's to -say, it defines tokens as maximal strings of adjacent letters. Note, -this does a decent job for most European languages, but does a terrible -job for some Asian languages, where words are not separated by spaces. +The `letter` tokenizer breaks text into terms whenever it encounters a +character which is not a letter. It does a reasonable job for most European +languages, but does a terrible job for some Asian languages, where words are +not separated by spaces. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "letter", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, QUICK, Brown, Foxes, jumped, over, the, lazy, dog, s, bone ] +--------------------------- + +[float] +=== Configuration + +The `letter` tokenizer is not configurable. diff --git a/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc index 0cdbbc387a4..5aad28b4394 100644 --- a/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc @@ -1,15 +1,128 @@ [[analysis-lowercase-tokenizer]] === Lowercase Tokenizer -A tokenizer of type `lowercase` that performs the function of -<> and -<> together. It divides text at non-letters and converts -them to lower case. While it is functionally equivalent to the -combination of -<> and -<>, there is a performance advantage to doing the two -tasks at once, hence this (redundant) implementation. + +The `lowercase` toknenizer, like the +<> breaks text into terms +whenever it encounters a character which is not a letter, but it also +lowecases all terms. It is functionally equivalent to the +<> combined with the +<>, but is more +efficient as it performs both steps in a single pass. + + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "lowercase", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ the, quick, brown, foxes, jumped, over, the, lazy, dog, s, bone ] +--------------------------- + +[float] +=== Configuration + +The `lowercase` tokenizer is not configurable. diff --git a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc index 23e6bc52dda..cf45da0627e 100644 --- a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc @@ -1,57 +1,306 @@ [[analysis-ngram-tokenizer]] === NGram Tokenizer -A tokenizer of type `nGram`. +The `ngram` tokenizer first breaks text down into words whenever it encounters +one of a list of specified characters, then it emits +https://en.wikipedia.org/wiki/N-gram[N-grams] of each word of the specified +length. -The following are settings that can be set for a `nGram` tokenizer type: - -[cols="<,<,<",options="header",] -|======================================================================= -|Setting |Description |Default value -|`min_gram` |Minimum size in codepoints of a single n-gram |`1`. - -|`max_gram` |Maximum size in codepoints of a single n-gram |`2`. - -|`token_chars` |Characters classes to keep in the -tokens, Elasticsearch will split on characters that don't belong to any -of these classes. |`[]` (Keep all characters) -|======================================================================= - -`token_chars` accepts the following character classes: - -[horizontal] -`letter`:: for example `a`, `b`, `ï` or `京` -`digit`:: for example `3` or `7` -`whitespace`:: for example `" "` or `"\n"` -`punctuation`:: for example `!` or `"` -`symbol`:: for example `$` or `√` +N-grams are like a sliding window that moves across the word - a continuous +sequence of characters of the specified length. They are useful for querying +languages that don't use spaces or that have long compound words, like German. [float] -==== Example +=== Example output + +With the default settings, the `ngram` tokenizer treats the initial text as a +single token and produces N-grams with minimum length `1` and maximum length +`2`: [source,js] --------------------------------------------------- - curl -XPUT 'localhost:9200/test' -d ' - { - "settings" : { - "analysis" : { - "analyzer" : { - "my_ngram_analyzer" : { - "tokenizer" : "my_ngram_tokenizer" - } - }, - "tokenizer" : { - "my_ngram_tokenizer" : { - "type" : "nGram", - "min_gram" : "2", - "max_gram" : "3", - "token_chars": [ "letter", "digit" ] - } - } - } - } - }' +--------------------------- +POST _analyze +{ + "tokenizer": "ngram", + "text": "Quick Fox" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Q", + "start_offset": 0, + "end_offset": 1, + "type": "word", + "position": 0 + }, + { + "token": "Qu", + "start_offset": 0, + "end_offset": 2, + "type": "word", + "position": 1 + }, + { + "token": "u", + "start_offset": 1, + "end_offset": 2, + "type": "word", + "position": 2 + }, + { + "token": "ui", + "start_offset": 1, + "end_offset": 3, + "type": "word", + "position": 3 + }, + { + "token": "i", + "start_offset": 2, + "end_offset": 3, + "type": "word", + "position": 4 + }, + { + "token": "ic", + "start_offset": 2, + "end_offset": 4, + "type": "word", + "position": 5 + }, + { + "token": "c", + "start_offset": 3, + "end_offset": 4, + "type": "word", + "position": 6 + }, + { + "token": "ck", + "start_offset": 3, + "end_offset": 5, + "type": "word", + "position": 7 + }, + { + "token": "k", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 8 + }, + { + "token": "k ", + "start_offset": 4, + "end_offset": 6, + "type": "word", + "position": 9 + }, + { + "token": " ", + "start_offset": 5, + "end_offset": 6, + "type": "word", + "position": 10 + }, + { + "token": " F", + "start_offset": 5, + "end_offset": 7, + "type": "word", + "position": 11 + }, + { + "token": "F", + "start_offset": 6, + "end_offset": 7, + "type": "word", + "position": 12 + }, + { + "token": "Fo", + "start_offset": 6, + "end_offset": 8, + "type": "word", + "position": 13 + }, + { + "token": "o", + "start_offset": 7, + "end_offset": 8, + "type": "word", + "position": 14 + }, + { + "token": "ox", + "start_offset": 7, + "end_offset": 9, + "type": "word", + "position": 15 + }, + { + "token": "x", + "start_offset": 8, + "end_offset": 9, + "type": "word", + "position": 16 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ Q, Qu, u, ui, i, ic, c, ck, k, "k ", " ", " F", F, Fo, o, ox, x ] +--------------------------- + +[float] +=== Configuration + +The `ngram` tokenizer accepts the following parameters: + +[horizontal] +`min_gram`:: + Minimum length of characters in a gram. Defaults to `1`. + +`max_gram`:: + Maximum length of characters in a gram. Defaults to `2`. + +`token_chars`:: + + Character classes that should be included in a token. Elasticsearch + will split on characters that don't belong to the classes specified. + Defaults to `[]` (keep all characters). ++ +Character classes may be any of the following: ++ +* `letter` -- for example `a`, `b`, `ï` or `京` +* `digit` -- for example `3` or `7` +* `whitespace` -- for example `" "` or `"\n"` +* `punctuation` -- for example `!` or `"` +* `symbol` -- for example `$` or `√` + +TIP: It usually makes sense to set `min_gram` and `max_gram` to the same +value. The smaller the length, the more documents will match but the lower +the quality of the matches. The longer the length, the more specific the +matches. A tri-gram (length `3`) is a good place to start. + +[float] +=== Example configuration + +In this example, we configure the `ngram` tokenizer to treat letters and +digits as tokens, and to produce tri-grams (grams of length `3`): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "ngram", + "min_gram": 3, + "max_gram": 3, + "token_chars": [ + "letter", + "digit" + ] + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "2 Quick Foxes." +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Qui", + "start_offset": 2, + "end_offset": 5, + "type": "word", + "position": 0 + }, + { + "token": "uic", + "start_offset": 3, + "end_offset": 6, + "type": "word", + "position": 1 + }, + { + "token": "ick", + "start_offset": 4, + "end_offset": 7, + "type": "word", + "position": 2 + }, + { + "token": "Fox", + "start_offset": 8, + "end_offset": 11, + "type": "word", + "position": 3 + }, + { + "token": "oxe", + "start_offset": 9, + "end_offset": 12, + "type": "word", + "position": 4 + }, + { + "token": "xes", + "start_offset": 10, + "end_offset": 13, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ Qui, uic, ick, Fox, oxe, xes ] +--------------------------- + - curl 'localhost:9200/test/_analyze?pretty=1&analyzer=my_ngram_analyzer' -d 'FC Schalke 04' - # FC, Sc, Sch, ch, cha, ha, hal, al, alk, lk, lke, ke, 04 --------------------------------------------------- diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index e6876f55bc6..b656e67eaec 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -1,32 +1,175 @@ [[analysis-pathhierarchy-tokenizer]] === Path Hierarchy Tokenizer -The `path_hierarchy` tokenizer takes something like this: +The `path_hierarchy` tokenizer takes a hierarchical value like a filesystem +path, splits on the path separator, and emits a term for each component in the +tree. -------------------------- -/something/something/else -------------------------- +[float] +=== Example output -And produces tokens: +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "path_hierarchy", + "text": "/one/two/three" +} +--------------------------- +// CONSOLE -------------------------- -/something -/something/something -/something/something/else -------------------------- +///////////////////// -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`delimiter` |The character delimiter to use, defaults to `/`. +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "/one", + "start_offset": 0, + "end_offset": 4, + "type": "word", + "position": 0 + }, + { + "token": "/one/two", + "start_offset": 0, + "end_offset": 8, + "type": "word", + "position": 0 + }, + { + "token": "/one/two/three", + "start_offset": 0, + "end_offset": 14, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE -|`replacement` |An optional replacement character to use. Defaults to -the `delimiter`. +///////////////////// -|`buffer_size` |The buffer size to use, defaults to `1024`. -|`reverse` |Generates tokens in reverse order, defaults to `false`. -|`skip` |Controls initial tokens to skip, defaults to `0`. -|======================================================================= +The above text would produce the following terms: + +[source,text] +--------------------------- +[ /one, /one/two, /one/two/three ] +--------------------------- + +[float] +=== Configuration + +The `path_hierarchy` tokenizer accepts the following parameters: + +[horizontal] +`delimiter`:: + The character to use as the path separator. Defaults to `/`. + +`replacement`:: + An optional replacement character to use for the delimiter. + Defaults to the `delimiter`. + +`buffer_size`:: + The number of characters read into the term buffer in a single pass. + Defaults to `1024`. The term buffer will grow by this size until all the + text has been consumed. It is advisable not to change this setting. + +`reverse`:: + If set to `true`, emits the tokens in reverse order. Defaults to `false`. + +`skip`:: + The number of initial tokens to skip. Defaults to `0`. + +[float] +=== Example configuration + +In this example, we configure the `path_hierarchy` tokenizer to split on `-` +characters, and to replace them with `/`. The first two tokens are skipped: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "path_hierarchy", + "delimiter": "-", + "replacement": "/", + "skip": 2 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "one-two-three-four-five" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "/three", + "start_offset": 7, + "end_offset": 13, + "type": "word", + "position": 0 + }, + { + "token": "/three/four", + "start_offset": 7, + "end_offset": 18, + "type": "word", + "position": 0 + }, + { + "token": "/three/four/five", + "start_offset": 7, + "end_offset": 23, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ /three, /three/four, /three/four/five ] +--------------------------- + +If we were to set `reverse` to `true`, it would produce the following: + +[source,text] +--------------------------- +[ one/two/three/, two/three/, three/ ] +--------------------------- diff --git a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc index 9a148456195..ca902a4e5f2 100644 --- a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc @@ -1,38 +1,268 @@ [[analysis-pattern-tokenizer]] === Pattern Tokenizer -A tokenizer of type `pattern` that can flexibly separate text into terms -via a regular expression. Accepts the following settings: +The `pattern` tokenizer uses a regular expression to either split text into +terms whenever it matches a word separator, or to capture matching text as +terms. -[cols="<,<",options="header",] -|====================================================================== -|Setting |Description -|`pattern` |The regular expression pattern, defaults to `\W+`. -|`flags` |The regular expression flags. -|`group` |Which group to extract into tokens. Defaults to `-1` (split). -|====================================================================== +The default pattern is `\W+`, which splits text whenever it encounters +non-word characters. -*IMPORTANT*: The regular expression should match the *token separators*, -not the tokens themselves. +[float] +=== Example output -********************************************* -Note that you may need to escape `pattern` string literal according to -your client language rules. For example, in many programming languages -a string literal for `\W+` pattern is written as `"\\W+"`. -There is nothing special about `pattern` (you may have to escape other -string literals as well); escaping `pattern` is common just because it -often contains characters that should be escaped. -********************************************* +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "pattern", + "text": "The foo_bar_size's default is 5." +} +--------------------------- +// CONSOLE -`group` set to `-1` (the default) is equivalent to "split". Using group ->= 0 selects the matching group as the token. For example, if you have: +///////////////////// ------------------------- -pattern = '([^']+)' -group = 0 -input = aaa 'bbb' 'ccc' ------------------------- +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "foo_bar_size", + "start_offset": 4, + "end_offset": 16, + "type": "word", + "position": 1 + }, + { + "token": "s", + "start_offset": 17, + "end_offset": 18, + "type": "word", + "position": 2 + }, + { + "token": "default", + "start_offset": 19, + "end_offset": 26, + "type": "word", + "position": 3 + }, + { + "token": "is", + "start_offset": 27, + "end_offset": 29, + "type": "word", + "position": 4 + }, + { + "token": "5", + "start_offset": 30, + "end_offset": 31, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE -the output will be two tokens: `'bbb'` and `'ccc'` (including the `'` -marks). With the same input but using group=1, the output would be: -`bbb` and `ccc` (no `'` marks). +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, foo_bar_size, s, default, is, 5 ] +--------------------------- + +[float] +=== Configuration + +The `pattern` tokenizer accepts the following parameters: + +[horizontal] +`pattern`:: + + A http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html[Java regular expression], defaults to `\W+`. + +`flags`:: + + Java regular expression http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html#field.summary[flags]. + lags should be pipe-separated, eg `"CASE_INSENSITIVE|COMMENTS"`. + +`group`:: + + Which capture group to extract as tokens. Defaults to `-1` (split). + +[float] +=== Example configuration + +In this example, we configure the `pattern` tokenizer to break text into +tokens when it encounters commas: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "pattern", + "pattern": "," + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "comma,separated,values" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "comma", + "start_offset": 0, + "end_offset": 5, + "type": "word", + "position": 0 + }, + { + "token": "separated", + "start_offset": 6, + "end_offset": 15, + "type": "word", + "position": 1 + }, + { + "token": "values", + "start_offset": 16, + "end_offset": 22, + "type": "word", + "position": 2 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ comma, separated, values ] +--------------------------- + +In the next example, we configure the `pattern` tokenizer to capture values +enclosed in double quotes (ignoring embedded escaped quotes `\"`). The regex +itself looks like this: + + "((?:\\"|[^"]|\\")*)" + +And reads as follows: + +* A literal `"` +* Start capturing: +** A literal `\"` OR any character except `"` +** Repeat until no more characters match +* A literal closing `"` + +When the pattern is specified in JSON, the `"` and `\` characters need to be +escaped, so the pattern ends up looking like: + + \"((?:\\\\\"|[^\"]|\\\\\")+)\" + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "pattern", + "pattern": "\"((?:\\\\\"|[^\"]|\\\\\")+)\"", + "group": 1 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "\"value\", \"value with embedded \\\" quote\"" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "value", + "start_offset": 1, + "end_offset": 6, + "type": "word", + "position": 0 + }, + { + "token": "value with embedded \\\" quote", + "start_offset": 10, + "end_offset": 38, + "type": "word", + "position": 1 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + +The above example produces the following two terms: + +[source,text] +--------------------------- +[ value, value with embedded \" quote ] +--------------------------- diff --git a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc index 42dbe5a864a..ee052529b43 100644 --- a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc @@ -1,18 +1,274 @@ [[analysis-standard-tokenizer]] === Standard Tokenizer -A tokenizer of type `standard` providing grammar based tokenizer that is -a good tokenizer for most European language documents. The tokenizer -implements the Unicode Text Segmentation algorithm, as specified in -http://unicode.org/reports/tr29/[Unicode Standard Annex #29]. +The `standard` tokenizer provides grammar based tokenization (based on the +Unicode Text Segmentation algorithm, as specified in +http://unicode.org/reports/tr29/[Unicode Standard Annex #29]) and works well +for most languages. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "standard", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumped, over, the, lazy, dog's, bone ] +--------------------------- + +[float] +=== Configuration + +The `standard` tokenizer accepts the following parameters: + +[horizontal] +`max_token_length`:: + + The maximum token length. If a token is seen that exceeds this length then + it is split at `max_token_length` intervals. Defaults to `255`. + +[float] +=== Example configuration + +In this example, we configure the `standard` tokenizer to have a +`max_token_length` of 5 (for demonstration purposes): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "standard", + "max_token_length": 5 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumpe", + "start_offset": 24, + "end_offset": 29, + "type": "", + "position": 5 + }, + { + "token": "d", + "start_offset": 29, + "end_offset": 30, + "type": "", + "position": 6 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 7 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 8 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 9 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 10 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 11 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumpe, d, over, the, lazy, dog's, bone ] +--------------------------- -The following are settings that can be set for a `standard` tokenizer -type: -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is split at `max_token_length` intervals. Defaults to `255`. -|======================================================================= diff --git a/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc index 06f0b6892e7..3e9904d116e 100644 --- a/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc @@ -1,7 +1,106 @@ [[analysis-thai-tokenizer]] === Thai Tokenizer -A tokenizer of type `thai` that segments Thai text into words. This tokenizer -uses the built-in Thai segmentation algorithm included with Java to divide -up Thai text. Text in other languages in general will be treated the same -as `standard`. +The `thai` tokenizer segments Thai text into words, using the Thai +segmentation algorithm included with Java. Text in other languages in general +will be treated the same as the +<>. + +WARNING: This tokenizer may not be supported by all JREs. It is known to work +with Sun/Oracle and OpenJDK. If your application needs to be fully portable, +consider using the {plugins}/analysis-icu-tokenizer.html[ICU Tokenizer] instead. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "thai", + "text": "การที่ได้ต้องแสดงว่างานดี" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "การ", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "ที่", + "start_offset": 3, + "end_offset": 6, + "type": "word", + "position": 1 + }, + { + "token": "ได้", + "start_offset": 6, + "end_offset": 9, + "type": "word", + "position": 2 + }, + { + "token": "ต้อง", + "start_offset": 9, + "end_offset": 13, + "type": "word", + "position": 3 + }, + { + "token": "แสดง", + "start_offset": 13, + "end_offset": 17, + "type": "word", + "position": 4 + }, + { + "token": "ว่า", + "start_offset": 17, + "end_offset": 20, + "type": "word", + "position": 5 + }, + { + "token": "งาน", + "start_offset": 20, + "end_offset": 23, + "type": "word", + "position": 6 + }, + { + "token": "ดี", + "start_offset": 23, + "end_offset": 25, + "type": "word", + "position": 7 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ การ, ที่, ได้, ต้อง, แสดง, ว่า, งาน, ดี ] +--------------------------- + +[float] +=== Configuration + +The `thai` tokenizer is not configurable. diff --git a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc index 9ed28e60b91..500a5e191f1 100644 --- a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc @@ -1,16 +1,199 @@ [[analysis-uaxurlemail-tokenizer]] -=== UAX Email URL Tokenizer +=== UAX URL Email Tokenizer -A tokenizer of type `uax_url_email` which works exactly like the -`standard` tokenizer, but tokenizes emails and urls as single tokens. +The `uax_url_email` tokenizer is like the <> except that it +recognises URLs and email addresses as single tokens. -The following are settings that can be set for a `uax_url_email` -tokenizer type: +[float] +=== Example output -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is discarded. Defaults to `255`. -|======================================================================= +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "uax_url_email", + "text": "Email me at john.smith@global-international.com" +} +--------------------------- +// CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Email", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "me", + "start_offset": 6, + "end_offset": 8, + "type": "", + "position": 1 + }, + { + "token": "at", + "start_offset": 9, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "john.smith@global-international.com", + "start_offset": 12, + "end_offset": 47, + "type": "", + "position": 3 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ Email, me, at, john.smith@global-international.com ] +--------------------------- + +while the `standard` tokenizer would produce: + +[source,text] +--------------------------- +[ Email, me, at, john.smith, global, international.com ] +--------------------------- + +[float] +=== Configuration + +The `uax_url_email` tokenizer accepts the following parameters: + +[horizontal] +`max_token_length`:: + + The maximum token length. If a token is seen that exceeds this length then + it is split at `max_token_length` intervals. Defaults to `255`. + +[float] +=== Example configuration + +In this example, we configure the `uax_url_email` tokenizer to have a +`max_token_length` of 5 (for demonstration purposes): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "uax_url_email", + "max_token_length": 5 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "john.smith@global-international.com" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "john", + "start_offset": 0, + "end_offset": 4, + "type": "", + "position": 0 + }, + { + "token": "smith", + "start_offset": 5, + "end_offset": 10, + "type": "", + "position": 1 + }, + { + "token": "globa", + "start_offset": 11, + "end_offset": 16, + "type": "", + "position": 2 + }, + { + "token": "l", + "start_offset": 16, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "inter", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "natio", + "start_offset": 23, + "end_offset": 28, + "type": "", + "position": 5 + }, + { + "token": "nal.c", + "start_offset": 28, + "end_offset": 33, + "type": "", + "position": 6 + }, + { + "token": "om", + "start_offset": 33, + "end_offset": 35, + "type": "", + "position": 7 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ john, smith, globa, l, inter, natio, nal.c, om ] +--------------------------- diff --git a/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc index f0e1ce28a12..9d06ea28d55 100644 --- a/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc @@ -1,4 +1,114 @@ [[analysis-whitespace-tokenizer]] -=== Whitespace Tokenizer +=== Whitespace Analyzer -A tokenizer of type `whitespace` that divides text at whitespace. +The `whitespace` tokenizer breaks text into terms whenever it encounters a +whitespace character. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "whitespace", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "Brown-Foxes", + "start_offset": 12, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "word", + "position": 8 + }, + { + "token": "bone.", + "start_offset": 51, + "end_offset": 56, + "type": "word", + "position": 9 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown-Foxes, jumped, over, the, lazy, dog's, bone. ] +--------------------------- + +[float] +=== Configuration + +The `whitespace` tokenizer is not configurable. diff --git a/docs/reference/cluster/reroute.asciidoc b/docs/reference/cluster/reroute.asciidoc index 99e754df529..bb48a00fbe5 100644 --- a/docs/reference/cluster/reroute.asciidoc +++ b/docs/reference/cluster/reroute.asciidoc @@ -103,3 +103,16 @@ are available: To ensure that these implications are well-understood, this command requires the special field `accept_data_loss` to be explicitly set to `true` for it to work. + +[float] +=== Retry failed shards + +The cluster will attempt to allocate a shard a maximum of +`index.allocation.max_retries` times in a row (defaults to `5`), before giving +up and leaving the shard unallocated. This scenario can be caused by +structural problems such as having an analyzer which refers to a stopwords +file which doesn't exist on all nodes. + +Once the problem has been corrected, allocation can be manually retried by +calling the <> API with `?retry_failed`, which +will attempt a single retry round for these shards. \ No newline at end of file diff --git a/docs/reference/docs.asciidoc b/docs/reference/docs.asciidoc index 465d2e60c77..f3b30e7f0c3 100644 --- a/docs/reference/docs.asciidoc +++ b/docs/reference/docs.asciidoc @@ -27,6 +27,8 @@ include::docs/get.asciidoc[] include::docs/delete.asciidoc[] +include::docs/delete-by-query.asciidoc[] + include::docs/update.asciidoc[] include::docs/update-by-query.asciidoc[] diff --git a/docs/reference/docs/delete-by-query.asciidoc b/docs/reference/docs/delete-by-query.asciidoc new file mode 100644 index 00000000000..1562d8c515d --- /dev/null +++ b/docs/reference/docs/delete-by-query.asciidoc @@ -0,0 +1,318 @@ +[[docs-delete-by-query]] +== Delete By Query API + +experimental[The delete-by-query API is new and should still be considered experimental. The API may change in ways that are not backwards compatible] + +The simplest usage of `_delete_by_query` just performs a deletion on every +document that match a query. Here is the API: + +[source,js] +-------------------------------------------------- +POST twitter/_delete_by_query +{ + "query": { <1> + "match": { + "message": "some message" + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:big_twitter] + +<1> The query must be passed as a value to the `query` key, in the same +way as the <>. You can also use the `q` +parameter in the same way as the search api. + +That will return something like this: + +[source,js] +-------------------------------------------------- +{ + "took" : 147, + "timed_out": false, + "deleted": 119, + "batches": 1, + "version_conflicts": 0, + "noops": 0, + "retries": { + "bulk": 0, + "search": 0 + }, + "throttled_millis": 0, + "requests_per_second": "unlimited", + "throttled_until_millis": 0, + "total": 119, + "failures" : [ ] +} +-------------------------------------------------- +// TESTRESPONSE[s/"took" : 147/"took" : "$body.took"/] + +`_delete_by_query` gets a snapshot of the index when it starts and deletes what +it finds using `internal` versioning. That means that you'll get a version +conflict if the document changes between the time when the snapshot was taken +and when the delete request is processed. When the versions match the document +is deleted. + +During the `_delete_by_query` execution, multiple search requests are sequentially +executed in order to find all the matching documents to delete. Every time a batch +of documents is found, a corresponding bulk request is executed to delete all +these documents. In case a search or bulk request got rejected, `_delete_by_query` + relies on a default policy to retry rejected requests (up to 10 times, with + exponential back off). Reaching the maximum retries limit causes the `_delete_by_query` + to abort and all failures are returned in the `failures` of the response. + The deletions that have been performed still stick. In other words, the process + is not rolled back, only aborted. While the first failure causes the abort all + failures that are returned by the failing bulk request are returned in the `failures` + element so it's possible for there to be quite a few. + +If you'd like to count version conflicts rather than cause them to abort then +set `conflicts=proceed` on the url or `"conflicts": "proceed"` in the request body. + +Back to the API format, you can limit `_delete_by_query` to a single type. This +will only delete `tweet` documents from the `twitter` index: + +[source,js] +-------------------------------------------------- +POST twitter/tweet/_delete_by_query?conflicts=proceed +{ + "query": { + "match_all": {} + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + +It's also possible to delete documents of multiple indexes and multiple +types at once, just like the search API: + +[source,js] +-------------------------------------------------- +POST twitter,blog/tweet,post/_delete_by_query +{ + "query": { + "match_all": {} + } +} +-------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT twitter\nPUT blog\nGET _cluster\/health?wait_for_status=yellow\n/] + +If you provide `routing` then the routing is copied to the scroll query, +limiting the process to the shards that match that routing value: + +[source,js] +-------------------------------------------------- +POST twitter/_delete_by_query?routing=1 +{ + "query": { + "range" : { + "age" : { + "gte" : 10 + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + +By default `_delete_by_query` uses scroll batches of 1000. You can change the +batch size with the `scroll_size` URL parameter: + +[source,js] +-------------------------------------------------- +POST twitter/_delete_by_query?scroll_size=5000 +{ + "query": { + "term": { + "user": "kimchy" + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + + +[float] +=== URL Parameters + +In addition to the standard parameters like `pretty`, the Delete By Query API +also supports `refresh`, `wait_for_completion`, `consistency`, and `timeout`. + +Sending the `refresh` will refresh all shards involved in the delete by query +once the request completes. This is different than the Delete API's `refresh` +parameter which causes just the shard that received the delete request +to be refreshed. + +If the request contains `wait_for_completion=false` then Elasticsearch will +perform some preflight checks, launch the request, and then return a `task` +which can be used with <> to cancel +or get the status of the task. For now, once the request is finished the task +is gone and the only place to look for the ultimate result of the task is in +the Elasticsearch log file. This will be fixed soon. + +`consistency` controls how many copies of a shard must respond to each write +request. `timeout` controls how long each write request waits for unavailable +shards to become available. Both work exactly how they work in the +<>. + +`requests_per_second` can be set to any decimal number (`1.4`, `6`, `1000`, etc) +and throttles the number of requests per second that the delete by query issues. +The throttling is done waiting between bulk batches so that it can manipulate +the scroll timeout. The wait time is the difference between the time it took the +batch to complete and the time `requests_per_second * requests_in_the_batch`. +Since the batch isn't broken into multiple bulk requests large batch sizes will +cause Elasticsearch to create many requests and then wait for a while before +starting the next set. This is "bursty" instead of "smooth". The default is +`unlimited` which is also the only non-number value that it accepts. + +[float] +=== Response body + +The JSON response looks like this: + +[source,js] +-------------------------------------------------- +{ + "took" : 639, + "deleted": 0, + "batches": 1, + "version_conflicts": 2, + "retries": 0, + "throttled_millis": 0, + "failures" : [ ] +} +-------------------------------------------------- + +`took`:: + +The number of milliseconds from start to end of the whole operation. + +`deleted`:: + +The number of documents that were successfully deleted. + +`batches`:: + +The number of scroll responses pulled back by the the delete by query. + +`version_conflicts`:: + +The number of version conflicts that the delete by query hit. + +`retries`:: + +The number of retries that the delete by query did in response to a full queue. + +`throttled_millis`:: + +Number of milliseconds the request slept to conform to `requests_per_second`. + +`failures`:: + +Array of all indexing failures. If this is non-empty then the request aborted +because of those failures. See `conflicts` for how to prevent version conflicts +from aborting the operation. + + +[float] +[[docs-delete-by-query-task-api]] +=== Works with the Task API + +While Delete By Query is running you can fetch their status using the +<>: + +[source,js] +-------------------------------------------------- +GET _tasks?detailed=true&action=*/delete/byquery +-------------------------------------------------- +// CONSOLE + +The responses looks like: + +[source,js] +-------------------------------------------------- +{ + "nodes" : { + "r1A2WoRbTwKZ516z6NEs5A" : { + "name" : "Tyrannus", + "transport_address" : "127.0.0.1:9300", + "host" : "127.0.0.1", + "ip" : "127.0.0.1:9300", + "attributes" : { + "testattr" : "test", + "portsfile" : "true" + }, + "tasks" : { + "r1A2WoRbTwKZ516z6NEs5A:36619" : { + "node" : "r1A2WoRbTwKZ516z6NEs5A", + "id" : 36619, + "type" : "transport", + "action" : "indices:data/write/delete/byquery", + "status" : { <1> + "total" : 6154, + "updated" : 0, + "created" : 0, + "deleted" : 3500, + "batches" : 36, + "version_conflicts" : 0, + "noops" : 0, + "retries": 0, + "throttled_millis": 0 + }, + "description" : "" + } + } + } + } +} +-------------------------------------------------- + +<1> this object contains the actual status. It is just like the response json +with the important addition of the `total` field. `total` is the total number +of operations that the reindex expects to perform. You can estimate the +progress by adding the `updated`, `created`, and `deleted` fields. The request +will finish when their sum is equal to the `total` field. + + +[float] +[[docs-delete-by-query-cancel-task-api]] +=== Works with the Cancel Task API + +Any Delete By Query can be canceled using the <>: + +[source,js] +-------------------------------------------------- +POST _tasks/taskid:1/_cancel +-------------------------------------------------- +// CONSOLE + +The `task_id` can be found using the tasks API above. + +Cancelation should happen quickly but might take a few seconds. The task status +API above will continue to list the task until it is wakes to cancel itself. + + +[float] +[[docs-delete-by-query-rethrottle]] +=== Rethrottling + +The value of `requests_per_second` can be changed on a running delete by query +using the `_rethrottle` API: + +[source,js] +-------------------------------------------------- +POST _delete_by_query/taskid:1/_rethrottle?requests_per_second=unlimited +-------------------------------------------------- +// CONSOLE + +The `task_id` can be found using the tasks API above. + +Just like when setting it on the `_delete_by_query` API `requests_per_second` +can be either `unlimited` to disable throttling or any decimal number like `1.7` +or `12` to throttle to that level. Rethrottling that speeds up the query takes +effect immediately but rethrotting that slows down the query will take effect +on after completing the current batch. This prevents scroll timeouts. diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index e5a62ddf33a..ac4da4251be 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -61,7 +61,7 @@ and the time when it attempted to update the document. This is fine because that update will have picked up the online mapping update. Back to the API format, you can limit `_update_by_query` to a single type. This -will only update `tweet`s from the `twitter` index: +will only update `tweet` documents from the `twitter` index: [source,js] -------------------------------------------------- @@ -119,7 +119,7 @@ Just as in <> you can set `ctx.op = "noop"` if your script decides that it doesn't have to make any changes. That will cause `_update_by_query` to omit that document from its updates. Setting `ctx.op` to anything else is an error. If you want to delete by a query you can use the -{plugins}/plugins-delete-by-query.html[Delete by Query plugin] instead. Setting any +<> instead. Setting any other field in `ctx` is an error. Note that we stopped specifying `conflicts=proceed`. In this case we want a diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 977cb4e5a1d..132b287bd46 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -163,7 +163,7 @@ As mentioned previously, we can override either the cluster or node name. This c [source,sh] -------------------------------------------------- -./elasticsearch -Ees.cluster.name=my_cluster_name -Ees.node.name=my_node_name +./elasticsearch -Ecluster.name=my_cluster_name -Enode.name=my_node_name -------------------------------------------------- Also note the line marked http with information about the HTTP address (`192.168.8.112`) and port (`9200`) that our node is reachable from. By default, Elasticsearch uses port `9200` to provide access to its REST API. This port is configurable if necessary. diff --git a/docs/reference/index-modules/allocation/filtering.asciidoc b/docs/reference/index-modules/allocation/filtering.asciidoc index be45cd2a1ac..05007b46188 100644 --- a/docs/reference/index-modules/allocation/filtering.asciidoc +++ b/docs/reference/index-modules/allocation/filtering.asciidoc @@ -14,7 +14,7 @@ attribute as follows: [source,sh] ------------------------ -bin/elasticsearch -Ees.node.attr.rack=rack1 -Ees.node.attr.size=big <1> +bin/elasticsearch -Enode.attr.rack=rack1 -Enode.attr.size=big <1> ------------------------ <1> These attribute settings can also be specified in the `elasticsearch.yml` config file. diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index 07591dc277b..8173b98d505 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -118,6 +118,8 @@ This similarity has the following options: [horizontal] `independence_measure`:: Possible values `standardized`, `saturated`, `chisquared`. +Type name: `DFI` + [float] [[ib]] ==== IB similarity. diff --git a/docs/reference/migration/migrate_5_0/java.asciidoc b/docs/reference/migration/migrate_5_0/java.asciidoc index cdc471d19f2..da97d360b43 100644 --- a/docs/reference/migration/migrate_5_0/java.asciidoc +++ b/docs/reference/migration/migrate_5_0/java.asciidoc @@ -84,12 +84,14 @@ static factory methods in QueryBuilders accordingly. Making sure that query contains at least one clause by making initial clause mandatory in constructor. +Renaming method to add clauses from `clause(SpanQueryBuilder)` to `addClause(SpanQueryBuilder)`. ===== SpanNearQueryBuilder Removed setter for mandatory slop parameter, needs to be set in constructor now. Also making sure that query contains at least one clause by making initial clause mandatory in constructor. Updated the static factory methods in QueryBuilders accordingly. +Renaming method to add clauses from `clause(SpanQueryBuilder)` to `addClause(SpanQueryBuilder)`. ===== SpanNotQueryBuilder diff --git a/docs/reference/migration/migrate_5_0/packaging.asciidoc b/docs/reference/migration/migrate_5_0/packaging.asciidoc index 5911b964b6b..977e20a76b1 100644 --- a/docs/reference/migration/migrate_5_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_5_0/packaging.asciidoc @@ -43,3 +43,15 @@ Previously, the scripts used to start Elasticsearch and run plugin commands only required a Bourne-compatible shell. Starting in Elasticsearch 5.0.0, the bash shell is now required and `/bin/bash` is a hard-dependency for the RPM and Debian packages. + +==== Environmental Settings + +Previously, Elasticsearch could be configured via environment variables +in two ways: first by using the placeholder syntax +`${env.ENV_VAR_NAME}` and the second by using the same syntax without +the `env` prefix: `${ENV_VAR_NAME}`. The first method has been removed +from Elasticsearch. + +Additionally, it was previously possible to set any setting in +Elasticsearch via JVM system properties. This has been removed from +Elasticsearch. diff --git a/docs/reference/migration/migrate_5_0/percolator.asciidoc b/docs/reference/migration/migrate_5_0/percolator.asciidoc index 7a3fa97fe92..d3b1ccb97a3 100644 --- a/docs/reference/migration/migrate_5_0/percolator.asciidoc +++ b/docs/reference/migration/migrate_5_0/percolator.asciidoc @@ -46,4 +46,4 @@ the existing document. ==== Percolate Stats -Percolate stats have been replaced with `percolate` query cache stats in nodes stats and cluster stats APIs. \ No newline at end of file +The percolate stats have been removed. This is because the percolator no longer caches the percolator queries. \ No newline at end of file diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index e916ec6b24c..0fa7d42e874 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -202,19 +202,14 @@ the cache implementation used for the request cache and the field data cache. ==== Using system properties to configure Elasticsearch -Elasticsearch can be configured by setting system properties on the -command line via `-Des.name.of.property=value.of.property`. This will be -removed in a future version of Elasticsearch. Instead, use -`-E es.name.of.setting=value.of.setting`. Note that in all cases the -name of the setting must be prefixed with `es.`. +Elasticsearch can no longer be configured by setting system properties. +Instead, use `-Ename.of.setting=value.of.setting`. ==== Removed using double-dashes to configure Elasticsearch Elasticsearch could previously be configured on the command line by setting settings via `--name.of.setting value.of.setting`. This feature -has been removed. Instead, use -`-Ees.name.of.setting=value.of.setting`. Note that in all cases the -name of the setting must be prefixed with `es.`. +has been removed. Instead, use `-Ename.of.setting=value.of.setting`. ==== Discovery Settings @@ -258,3 +253,9 @@ Previously script mode settings (e.g., "script.inline: true", Prior to 5.0 a third option could be specified for the `script.inline` and `script.stored` settings ("sandbox"). This has been removed, You can now only set `script.line: true` or `script.stored: true`. + +==== Search settings + +The setting `index.query.bool.max_clause_count` has been removed. In order to +set the maximum number of boolean clauses `indices.query.bool.max_clause_count` +should be used instead. diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index 383252e23b3..f4e61fb0da1 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -21,7 +21,7 @@ attribute called `rack_id` -- we could use any attribute name. For example: [source,sh] ---------------------- -./bin/elasticsearch -Ees.node.attr.rack_id=rack_one <1> +./bin/elasticsearch -Enode.attr.rack_id=rack_one <1> ---------------------- <1> This setting could also be specified in the `elasticsearch.yml` config file. diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index 124d68f1d6d..2f1caa42ad8 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -265,7 +265,7 @@ Like all node settings, it can also be specified on the command line as: [source,sh] ----------------------- -./bin/elasticsearch -Ees.path.data=/var/elasticsearch/data +./bin/elasticsearch -Epath.data=/var/elasticsearch/data ----------------------- TIP: When using the `.zip` or `.tar.gz` distributions, the `path.data` setting diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc index 6c5245ce646..0f87744d317 100644 --- a/docs/reference/modules/snapshots.asciidoc +++ b/docs/reference/modules/snapshots.asciidoc @@ -162,6 +162,7 @@ Other repository backends are available in these official plugins: * {plugins}/repository-s3.html[repository-s3] for S3 repository support * {plugins}/repository-hdfs.html[repository-hdfs] for HDFS repository support in Hadoop environments * {plugins}/repository-azure.html[repository-azure] for Azure storage repositories +* {plugins}/repository-gcs.html[repository-gcs] for Google Cloud Storage repositories [float] ===== Repository Verification diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index 267647c9dbb..48ba500c98d 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -327,13 +327,10 @@ the document defined in the `percolate` query. ==== How it Works Under the Hood When indexing a document into an index that has the <> mapping configured, the query -part of the documents gets parsed into a Lucene query and is kept in memory until that percolator document is removed. -So, all the active percolator queries are kept in memory. +part of the documents gets parsed into a Lucene query and are stored into the Lucene index. A binary representation +of the query gets stored, but also the query's terms are analyzed and stored into an indexed field. At search time, the document specified in the request gets parsed into a Lucene document and is stored in a in-memory -temporary Lucene index. This in-memory index can just hold this one document and it is optimized for that. Then all the queries -that are registered to the index that the search request is targeted for, are going to be executed on this single document -in-memory index. This happens on each shard the search request needs to execute. - -By using `routing` or additional queries the amount of percolator queries that need to be executed can be reduced and thus -the time the search API needs to run can be decreased. \ No newline at end of file +temporary Lucene index. This in-memory index can just hold this one document and it is optimized for that. After this +a special query is build based on the terms in the in-memory index that select candidate percolator queries based on +their indexed query terms. These queries are then evaluated by the in-memory index if they actually match. \ No newline at end of file diff --git a/docs/reference/search/count.asciidoc b/docs/reference/search/count.asciidoc index 9be219f5e74..859455e89b7 100644 --- a/docs/reference/search/count.asciidoc +++ b/docs/reference/search/count.asciidoc @@ -10,15 +10,21 @@ body. Here is an example: [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/twitter/tweet/_count?q=user:kimchy' +PUT /twitter/tweet/1?refresh +{ + "user": "kimchy" +} -$ curl -XGET 'http://localhost:9200/twitter/tweet/_count' -d ' +GET /twitter/tweet/_count?q=user:kimchy + +GET /twitter/tweet/_count { "query" : { "term" : { "user" : "kimchy" } } -}' +} -------------------------------------------------- +//CONSOLE NOTE: The query being sent in the body must be nested in a `query` key, same as the <> works @@ -37,6 +43,7 @@ tweets from the twitter index for a certain user. The result is: } } -------------------------------------------------- +// TESTRESPONSE The query is optional, and when not provided, it will use `match_all` to count all the docs. diff --git a/docs/reference/search/request/explain.asciidoc b/docs/reference/search/request/explain.asciidoc index 81dc110c263..9bcaecb4840 100644 --- a/docs/reference/search/request/explain.asciidoc +++ b/docs/reference/search/request/explain.asciidoc @@ -5,6 +5,7 @@ Enables explanation for each hit on how its score was computed. [source,js] -------------------------------------------------- +GET /_search { "explain": true, "query" : { @@ -12,3 +13,4 @@ Enables explanation for each hit on how its score was computed. } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/search/request/fielddata-fields.asciidoc b/docs/reference/search/request/fielddata-fields.asciidoc index aaaa606980e..f3a3508b144 100644 --- a/docs/reference/search/request/fielddata-fields.asciidoc +++ b/docs/reference/search/request/fielddata-fields.asciidoc @@ -6,13 +6,15 @@ example: [source,js] -------------------------------------------------- +GET /_search { "query" : { - ... + "match_all": {} }, "fielddata_fields" : ["test1", "test2"] } -------------------------------------------------- +// CONSOLE Field data fields can work on fields that are not stored. diff --git a/docs/reference/search/request/fields.asciidoc b/docs/reference/search/request/fields.asciidoc index e929928d427..3483d470ee2 100644 --- a/docs/reference/search/request/fields.asciidoc +++ b/docs/reference/search/request/fields.asciidoc @@ -11,6 +11,7 @@ by a search hit. [source,js] -------------------------------------------------- +GET /_search { "fields" : ["user", "postDate"], "query" : { @@ -18,6 +19,7 @@ by a search hit. } } -------------------------------------------------- +// CONSOLE `*` can be used to load all stored fields from the document. @@ -26,6 +28,7 @@ returned, for example: [source,js] -------------------------------------------------- +GET /_search { "fields" : [], "query" : { @@ -33,6 +36,7 @@ returned, for example: } } -------------------------------------------------- +// CONSOLE For backwards compatibility, if the fields parameter specifies fields which are not stored (`store` mapping set to diff --git a/docs/reference/search/request/index-boost.asciidoc b/docs/reference/search/request/index-boost.asciidoc index 29d1da3885c..bf766ce8a8c 100644 --- a/docs/reference/search/request/index-boost.asciidoc +++ b/docs/reference/search/request/index-boost.asciidoc @@ -8,6 +8,7 @@ graph where each user has an index). [source,js] -------------------------------------------------- +GET /_search { "indices_boost" : { "index1" : 1.4, @@ -15,3 +16,4 @@ graph where each user has an index). } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/search/request/min-score.asciidoc b/docs/reference/search/request/min-score.asciidoc index f5a212ebf8e..d9dbef99ddf 100644 --- a/docs/reference/search/request/min-score.asciidoc +++ b/docs/reference/search/request/min-score.asciidoc @@ -6,6 +6,7 @@ in `min_score`: [source,js] -------------------------------------------------- +GET /_search { "min_score": 0.5, "query" : { @@ -13,6 +14,7 @@ in `min_score`: } } -------------------------------------------------- +// CONSOLE Note, most times, this does not make much sense, but is provided for advanced use cases. diff --git a/docs/reference/search/request/named-queries-and-filters.asciidoc b/docs/reference/search/request/named-queries-and-filters.asciidoc index 96d7c1357a9..0fb60253938 100644 --- a/docs/reference/search/request/named-queries-and-filters.asciidoc +++ b/docs/reference/search/request/named-queries-and-filters.asciidoc @@ -5,21 +5,25 @@ Each filter and query can accept a `_name` in its top level definition. [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "should" : [ - {"match" : { "name.first" : {"query" : "shay", "_name" : "first"} }}, - {"match" : { "name.last" : {"query" : "banon", "_name" : "last"} }} - ], - "filter" : { - "terms" : { - "name.last" : ["banon", "kimchy"], - "_name" : "test" + "query": { + "bool" : { + "should" : [ + {"match" : { "name.first" : {"query" : "shay", "_name" : "first"} }}, + {"match" : { "name.last" : {"query" : "banon", "_name" : "last"} }} + ], + "filter" : { + "terms" : { + "name.last" : ["banon", "kimchy"], + "_name" : "test" + } } } } } -------------------------------------------------- +// CONSOLE The search response will include for each hit the `matched_queries` it matched on. The tagging of queries and filters only make sense for the `bool` query. diff --git a/docs/reference/search/request/post-filter.asciidoc b/docs/reference/search/request/post-filter.asciidoc index 7bd95400312..493b4261c82 100644 --- a/docs/reference/search/request/post-filter.asciidoc +++ b/docs/reference/search/request/post-filter.asciidoc @@ -5,14 +5,43 @@ The `post_filter` is applied to the search `hits` at the very end of a search request, after aggregations have already been calculated. Its purpose is best explained by example: -Imagine that you are selling shirts, and the user has specified two filters: +Imagine that you are selling shirts that have the following properties: + +[source,js] +------------------------------------------------- +PUT /shirts +{ + "mappings": { + "item": { + "properties": { + "brand": { "type": "keyword"}, + "color": { "type": "keyword"}, + "model": { "type": "keyword"} + } + } + } +} + +PUT /shirts/item/1?refresh +{ + "brand": "gucci", + "color": "red", + "model": "slim" +} +------------------------------------------------ +// CONSOLE +// TESTSETUP + + +Imagine a user has specified two filters: + `color:red` and `brand:gucci`. You only want to show them red shirts made by Gucci in the search results. Normally you would do this with a <>: [source,js] -------------------------------------------------- -curl -XGET localhost:9200/shirts/_search -d ' +GET /shirts/_search { "query": { "bool": { @@ -23,8 +52,8 @@ curl -XGET localhost:9200/shirts/_search -d ' } } } -' -------------------------------------------------- +// CONSOLE However, you would also like to use _faceted navigation_ to display a list of other options that the user could click on. Perhaps you have a `model` field @@ -36,7 +65,7 @@ This can be done with a [source,js] -------------------------------------------------- -curl -XGET localhost:9200/shirts/_search -d ' +GET /shirts/_search { "query": { "bool": { @@ -52,8 +81,8 @@ curl -XGET localhost:9200/shirts/_search -d ' } } } -' -------------------------------------------------- +// CONSOLE <1> Returns the most popular models of red shirts by Gucci. But perhaps you would also like to tell the user how many Gucci shirts are @@ -67,12 +96,12 @@ the `post_filter`: [source,js] -------------------------------------------------- -curl -XGET localhost:9200/shirts/_search -d ' +GET /shirts/_search { "query": { "bool": { "filter": { - { "term": { "brand": "gucci" }} <1> + "term": { "brand": "gucci" } <1> } } }, @@ -95,8 +124,8 @@ curl -XGET localhost:9200/shirts/_search -d ' "term": { "color": "red" } } } -' -------------------------------------------------- +// CONSOLE <1> The main query now finds all shirts by Gucci, regardless of color. <2> The `colors` agg returns popular colors for shirts by Gucci. <3> The `color_red` agg limits the `models` sub-aggregation diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc index 0d07f29475e..3d6c6b40cb9 100644 --- a/docs/reference/search/request/preference.asciidoc +++ b/docs/reference/search/request/preference.asciidoc @@ -56,7 +56,7 @@ for the user: [source,js] ------------------------------------------------ -curl localhost:9200/_search?preference=xyzabc123 -d ' +GET /_search?preference=xyzabc123 { "query": { "match": { @@ -64,7 +64,6 @@ curl localhost:9200/_search?preference=xyzabc123 -d ' } } } -' ------------------------------------------------ - +// CONSOLE diff --git a/docs/reference/search/request/query.asciidoc b/docs/reference/search/request/query.asciidoc index e496320bd97..fa06d0d9bb4 100644 --- a/docs/reference/search/request/query.asciidoc +++ b/docs/reference/search/request/query.asciidoc @@ -6,9 +6,11 @@ query using the <>. [source,js] -------------------------------------------------- +GET /_search { "query" : { "term" : { "user" : "kimchy" } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/search/request/script-fields.asciidoc b/docs/reference/search/request/script-fields.asciidoc index 596aba31d82..6e054f02e1c 100644 --- a/docs/reference/search/request/script-fields.asciidoc +++ b/docs/reference/search/request/script-fields.asciidoc @@ -6,9 +6,10 @@ evaluation>> (based on different fields) for each hit, for example: [source,js] -------------------------------------------------- +GET /_search { "query" : { - ... + "match_all": {} }, "script_fields" : { "test1" : { @@ -25,6 +26,8 @@ evaluation>> (based on different fields) for each hit, for example: } } -------------------------------------------------- +// CONSOLE + Script fields can work on fields that are not stored (`my_field_name` in the above case), and allow to return custom values to be returned (the @@ -36,9 +39,10 @@ type). Here is an example: [source,js] -------------------------------------------------- +GET /_search { "query" : { - ... + "match_all": {} }, "script_fields" : { "test1" : { @@ -47,6 +51,7 @@ type). Here is an example: } } -------------------------------------------------- +// CONSOLE Note the `_source` keyword here to navigate the json-like model. diff --git a/docs/reference/search/request/source-filtering.asciidoc b/docs/reference/search/request/source-filtering.asciidoc index 8458d37806c..08625751eec 100644 --- a/docs/reference/search/request/source-filtering.asciidoc +++ b/docs/reference/search/request/source-filtering.asciidoc @@ -13,6 +13,7 @@ To disable `_source` retrieval set to `false`: [source,js] -------------------------------------------------- +GET /_search { "_source": false, "query" : { @@ -20,6 +21,7 @@ To disable `_source` retrieval set to `false`: } } -------------------------------------------------- +// CONSOLE The `_source` also accepts one or more wildcard patterns to control what parts of the `_source` should be returned: @@ -27,6 +29,7 @@ For example: [source,js] -------------------------------------------------- +GET /_search { "_source": "obj.*", "query" : { @@ -34,11 +37,13 @@ For example: } } -------------------------------------------------- +// CONSOLE Or [source,js] -------------------------------------------------- +GET /_search { "_source": [ "obj1.*", "obj2.*" ], "query" : { @@ -46,11 +51,13 @@ Or } } -------------------------------------------------- +// CONSOLE Finally, for complete control, you can specify both include and exclude patterns: [source,js] -------------------------------------------------- +GET /_search { "_source": { "include": [ "obj1.*", "obj2.*" ], @@ -61,3 +68,4 @@ Finally, for complete control, you can specify both include and exclude patterns } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/search/request/version.asciidoc b/docs/reference/search/request/version.asciidoc index 3b2329a828a..57c6ce27feb 100644 --- a/docs/reference/search/request/version.asciidoc +++ b/docs/reference/search/request/version.asciidoc @@ -5,6 +5,7 @@ Returns a version for each search hit. [source,js] -------------------------------------------------- +GET /_search { "version": true, "query" : { @@ -12,3 +13,4 @@ Returns a version for each search hit. } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index ceb3d8c38d3..68f73fc96b8 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -26,7 +26,7 @@ setting, as follows: [source,sh] ------------------------------- -./bin/elasticsearch -E es.path.conf=/path/to/my/config/ +./bin/elasticsearch -Epath.conf=/path/to/my/config/ ------------------------------- [float] @@ -93,15 +93,14 @@ is used in the settings and the process is run as a service or in the background === Setting default settings New default settings may be specified on the command line using the -`es.default.` prefix instead of the `es.` prefix. This will specify a value -that will be used by default unless another value is specified in the config -file. +`default.` prefix. This will specify a value that will be used by +default unless another value is specified in the config file. For instance, if Elasticsearch is started as follows: [source,sh] --------------------------- -./bin/elasticsearch -E es.default.node.name=My_Node +./bin/elasticsearch -Edefault.node.name=My_Node --------------------------- the value for `node.name` will be `My_Node`, unless it is overwritten on the diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index e3c7622d064..0d2e8bf04f6 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -45,15 +45,14 @@ file by default. The format of this config file is explained in <>. Any settings that can be specified in the config file can also be specified on -the command line, using the `-E` syntax, and prepending `es.` to the setting -name, as follows: +the command line, using the `-E` syntax as follows: [source,sh] -------------------------------------------- -./bin/elasticsearch -E es.cluster.name=my_cluster -E es.node.name=node_1 +./bin/elasticsearch -Ecluster.name=my_cluster -Enode.name=node_1 -------------------------------------------- -NOTE: Values that contain spaces must be surrounded with quotes. For instance `-E es.path.logs="C:\My Logs\logs"`. +NOTE: Values that contain spaces must be surrounded with quotes. For instance `-Epath.logs="C:\My Logs\logs"`. TIP: Typically, any cluster-wide settings (like `cluster.name`) should be added to the `elasticsearch.yml` config file, while any node-specific settings diff --git a/docs/reference/setup/install/zip-targz.asciidoc b/docs/reference/setup/install/zip-targz.asciidoc index 0ed67cb9ce9..7fc41a0f3f8 100644 --- a/docs/reference/setup/install/zip-targz.asciidoc +++ b/docs/reference/setup/install/zip-targz.asciidoc @@ -93,7 +93,7 @@ name, as follows: [source,sh] -------------------------------------------- -./bin/elasticsearch -d -E es.cluster.name=my_cluster -E es.node.name=node_1 +./bin/elasticsearch -d -Ecluster.name=my_cluster -Enode.name=node_1 -------------------------------------------- TIP: Typically, any cluster-wide settings (like `cluster.name`) should be diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java index 1cb21887944..cd95f13f63a 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java @@ -21,7 +21,6 @@ package org.elasticsearch.script.expression; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; public class ExpressionPlugin extends Plugin { @@ -38,6 +37,6 @@ public class ExpressionPlugin extends Plugin { public void onModule(ScriptModule module) { module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExpressionScriptEngineService.class, - ExpressionScriptEngineService.NAME, ScriptMode.ON)); + ExpressionScriptEngineService.NAME, true)); } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java index 6f83746d4ce..deed43abf12 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.elasticsearch.search.aggregations.metrics.sum.Sum; @@ -122,7 +122,7 @@ public class EquivalenceTests extends ESIntegTestCase { } } - RangeAggregatorBuilder query = range("range").field("values"); + RangeAggregationBuilder query = range("range").field("values"); for (int i = 0; i < ranges.length; ++i) { String key = Integer.toString(i); if (ranges[i][0] == Double.NEGATIVE_INFINITY) { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index 1c17c1966e5..662d4d2f30c 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTime; @@ -113,17 +113,17 @@ public class MinDocCountTests extends AbstractTermsTestCase { private enum Script { NO { @Override - TermsAggregatorBuilder apply(TermsAggregatorBuilder builder, String field) { + TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) { return builder.field(field); } }, YES { @Override - TermsAggregatorBuilder apply(TermsAggregatorBuilder builder, String field) { + TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) { return builder.script(new org.elasticsearch.script.Script("doc['" + field + "'].values")); } }; - abstract TermsAggregatorBuilder apply(TermsAggregatorBuilder builder, String field); + abstract TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field); } // check that terms2 is a subset of terms1 diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java index 2e59b798297..7e76b3f03eb 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import java.util.Arrays; @@ -88,7 +88,7 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { return percents; } - private static PercentileRanksAggregatorBuilder randomCompression(PercentileRanksAggregatorBuilder builder) { + private static PercentileRanksAggregationBuilder randomCompression(PercentileRanksAggregationBuilder builder) { if (randomBoolean()) { builder.compression(randomIntBetween(20, 120) + randomDouble()); } @@ -462,4 +462,4 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { } } -} \ No newline at end of file +} diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java index 69d3c281ca8..712c9ebd951 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import java.util.Arrays; @@ -87,7 +87,7 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { return percentiles; } - private static PercentilesAggregatorBuilder randomCompression(PercentilesAggregatorBuilder builder) { + private static PercentilesAggregationBuilder randomCompression(PercentilesAggregationBuilder builder) { if (randomBoolean()) { builder.compression(randomIntBetween(20, 120) + randomDouble()); } @@ -446,4 +446,4 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { } } -} \ No newline at end of file +} diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index a468517a512..5ef3ad766e6 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -21,7 +21,6 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; public class MustachePlugin extends Plugin { @@ -38,6 +37,6 @@ public class MustachePlugin extends Plugin { public void onModule(ScriptModule module) { module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class, - MustacheScriptEngineService.NAME, ScriptMode.ON)); + MustacheScriptEngineService.NAME, true)); } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 139d8dbf0b8..7097718c6fc 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -59,7 +59,6 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.mustache.MustacheScriptEngineService; @@ -107,7 +106,7 @@ public class TemplateQueryParserTests extends ESTestCase { ScriptModule scriptModule = new ScriptModule(); scriptModule.prepareSettings(settingsModule); // TODO: make this use a mock engine instead of mustache and it will no longer be messy! - scriptModule.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class, MustacheScriptEngineService.NAME, ScriptMode.ON)); + scriptModule.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class, MustacheScriptEngineService.NAME, true)); settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), @@ -153,7 +152,7 @@ public class TemplateQueryParserTests extends ESTestCase { }); IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); contextFactory = () -> new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, - similarityService, scriptService, indicesQueriesRegistry, proxy, null, null, null); + similarityService, scriptService, indicesQueriesRegistry, proxy, null, null); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java index f215e39ad01..e81e828c875 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java @@ -26,10 +26,10 @@ import org.elasticsearch.painless.node.SSource; * Runs the analysis phase of compilation using the Painless AST. */ final class Analyzer { - static Variables analyze(final CompilerSettings settings, final Definition definition, + static Variables analyze(final CompilerSettings settings, final Reserved shortcut, final SSource root) { - final Variables variables = new Variables(settings, definition, shortcut); - root.analyze(settings, definition, variables); + final Variables variables = new Variables(settings, shortcut); + root.analyze(variables); return variables; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index 3228ff47e92..a16a9818ab3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -20,223 +20,770 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Transform; import org.elasticsearch.painless.Definition.Type; -import java.lang.reflect.InvocationTargetException; - /** * Used during the analysis phase to collect legal type casts and promotions * for type-checking and later to write necessary casts in the bytecode. */ public final class AnalyzerCaster { - public static Cast getLegalCast(final Definition definition, - final String location, final Type actual, final Type expected, final boolean explicit) { - final Cast cast = new Cast(actual, expected, explicit); - + public static Cast getLegalCast(String location, Type actual, Type expected, boolean explicit, boolean internal) { if (actual.equals(expected)) { return null; } - Cast transform = definition.transformsMap.get(cast); + switch (actual.sort) { + case BOOL: + switch (expected.sort) { + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case BOOL_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + } - if (transform == null && explicit) { - transform = definition.transformsMap.get(new Cast(actual, expected, false)); + break; + case BYTE: + switch (expected.sort) { + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case CHAR: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case BYTE_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case SHORT_OBJ: + if (internal) + return new Cast(actual,Definition.SHORT_TYPE, explicit, false, false, false, true); + + break; + case INT_OBJ: + if (internal) + return new Cast(actual, Definition.INT_TYPE, explicit, false, false, false, true); + + break; + case LONG_OBJ: + if (internal) + return new Cast(actual, Definition.LONG_TYPE, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, explicit, false, false, false, true); + + break; + } + + break; + case SHORT: + switch (expected.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case CHAR: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case SHORT_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case INT_OBJ: + if (internal) + return new Cast(actual, Definition.INT_TYPE, explicit, false, false, false, true); + + break; + case LONG_OBJ: + if (internal) + return new Cast(actual, Definition.LONG_TYPE, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + } + + break; + case CHAR: + switch (expected.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case SHORT: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case CHAR_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case STRING: + return new Cast(actual, Definition.STRING_TYPE, explicit, false, false, false, false); + case INT_OBJ: + if (internal) + return new Cast(actual, Definition.INT_TYPE, explicit, false, false, false, true); + + break; + case LONG_OBJ: + if (internal) + return new Cast(actual, Definition.LONG_TYPE, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + } + + break; + case INT: + switch (expected.sort) { + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case SHORT: + case CHAR: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case INT_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case LONG_OBJ: + if (internal) + return new Cast(actual, Definition.LONG_TYPE, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + } + + break; + case LONG: + switch (expected.sort) { + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case SHORT: + case CHAR: + case INT: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case LONG_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + case INT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.INT_TYPE, true, false, false, false, true); + + break; + } + + break; + case FLOAT: + switch (expected.sort) { + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case SHORT: + case CHAR: + case INT: + case FLOAT: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case FLOAT_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + case INT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.INT_TYPE, true, false, false, false, true); + + break; + case LONG_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.LONG_TYPE, true, false, false, false, true); + + break; + } + + break; + case DOUBLE: + switch (expected.sort) { + case BYTE: + case SHORT: + case CHAR: + case INT: + case FLOAT: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + case INT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.INT_TYPE, true, false, false, false, true); + + break; + case LONG_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.LONG_TYPE, true, false, false, false, true); + + break; + case FLOAT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.FLOAT_TYPE, true, false, false, false, true); + + break; + } + + break; + case OBJECT: + case NUMBER: + switch (expected.sort) { + case BYTE: + if (internal && explicit) + return new Cast(actual, Definition.BYTE_OBJ_TYPE, true, false, true, false, false); + + break; + case SHORT: + if (internal && explicit) + return new Cast(actual, Definition.SHORT_OBJ_TYPE, true, false, true, false, false); + + break; + case CHAR: + if (internal && explicit) + return new Cast(actual, Definition.CHAR_OBJ_TYPE, true, false, true, false, false); + + break; + case INT: + if (internal && explicit) + return new Cast(actual, Definition.INT_OBJ_TYPE, true, false, true, false, false); + + break; + case LONG: + if (internal && explicit) + return new Cast(actual, Definition.LONG_OBJ_TYPE, true, false, true, false, false); + + break; + case FLOAT: + if (internal && explicit) + return new Cast(actual, Definition.FLOAT_OBJ_TYPE, true, false, true, false, false); + + break; + case DOUBLE: + if (internal && explicit) + return new Cast(actual, Definition.DOUBLE_OBJ_TYPE, true, false, true, false, false); + + break; + } + + break; + case BOOL_OBJ: + switch (expected.sort) { + case BOOL: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + } + + break; + case BYTE_OBJ: + switch (expected.sort) { + case BYTE: + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case CHAR: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case SHORT_OBJ: + switch (expected.sort) { + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case CHAR: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case CHAR_OBJ: + switch (expected.sort) { + case CHAR: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case INT_OBJ: + switch (expected.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + case CHAR: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case LONG_OBJ: + switch (expected.sort) { + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + case CHAR: + case INT: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case FLOAT_OBJ: + switch (expected.sort) { + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case DOUBLE_OBJ: + switch (expected.sort) { + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case DEF: + switch (expected.sort) { + case BOOL: + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit, true, false, false, false); + } + + break; + case STRING: + switch (expected.sort) { + case CHAR: + if (explicit) + return new Cast(actual, expected, true, false, false, false, false); + + break; + } + + break; } - if (transform != null) { - return transform; - } - - if (expected.clazz.isAssignableFrom(actual.clazz) || - ((explicit || expected.sort == Sort.DEF) && actual.clazz.isAssignableFrom(expected.clazz))) { - return cast; + if (actual.sort == Sort.DEF || expected.sort == Sort.DEF || + expected.clazz.isAssignableFrom(actual.clazz) || + explicit && actual.clazz.isAssignableFrom(expected.clazz)) { + return new Cast(actual, expected, explicit); } else { throw new ClassCastException("Error" + location + ": Cannot cast from [" + actual.name + "] to [" + expected.name + "]."); } } public static Object constCast(final String location, final Object constant, final Cast cast) { - if (cast instanceof Transform) { - final Transform transform = (Transform)cast; - return invokeTransform(location, transform, constant); + final Sort fsort = cast.from.sort; + final Sort tsort = cast.to.sort; + + if (fsort == tsort) { + return constant; + } else if (fsort == Sort.STRING && tsort == Sort.CHAR) { + return Utility.StringTochar((String)constant); + } else if (fsort == Sort.CHAR && tsort == Sort.STRING) { + return Utility.charToString((char)constant); + } else if (fsort.numeric && tsort.numeric) { + final Number number; + + if (fsort == Sort.CHAR) { + number = (int)(char)constant; + } else { + number = (Number)constant; + } + + switch (tsort) { + case BYTE: return number.byteValue(); + case SHORT: return number.shortValue(); + case CHAR: return (char)number.intValue(); + case INT: return number.intValue(); + case LONG: return number.longValue(); + case FLOAT: return number.floatValue(); + case DOUBLE: return number.doubleValue(); + default: + throw new IllegalStateException("Error" + location + ": Cannot cast from " + + "[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "]."); + } } else { - final Sort fsort = cast.from.sort; - final Sort tsort = cast.to.sort; - - if (fsort == tsort) { - return constant; - } else if (fsort.numeric && tsort.numeric) { - Number number; - - if (fsort == Sort.CHAR) { - number = (int)(char)constant; - } else { - number = (Number)constant; - } - - switch (tsort) { - case BYTE: return number.byteValue(); - case SHORT: return number.shortValue(); - case CHAR: return (char)number.intValue(); - case INT: return number.intValue(); - case LONG: return number.longValue(); - case FLOAT: return number.floatValue(); - case DOUBLE: return number.doubleValue(); - default: - throw new IllegalStateException("Error" + location + ": Cannot cast from " + - "[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "]."); - } - } else { - throw new IllegalStateException("Error" + location + ": Cannot cast from " + - "[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "]."); - } + throw new IllegalStateException("Error" + location + ": Cannot cast from " + + "[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "]."); } } - private static Object invokeTransform(final String location, final Transform transform, final Object object) { - final Method method = transform.method; - final java.lang.reflect.Method jmethod = method.reflect; - final int modifiers = jmethod.getModifiers(); - - try { - if (java.lang.reflect.Modifier.isStatic(modifiers)) { - return jmethod.invoke(null, object); - } else { - return jmethod.invoke(object); - } - } catch (final IllegalAccessException | IllegalArgumentException | - InvocationTargetException | NullPointerException | ExceptionInInitializerError exception) { - throw new ClassCastException( - "Error" + location + ": Cannot cast from [" + transform.from.name + "] to [" + transform.to.name + "]."); - } - } - - public static Type promoteNumeric(final Definition definition, final Type from, final boolean decimal, final boolean primitive) { + public static Type promoteNumeric(Type from, boolean decimal) { final Sort sort = from.sort; if (sort == Sort.DEF) { - return definition.defType; - } else if ((sort == Sort.DOUBLE || sort == Sort.DOUBLE_OBJ) && decimal) { - return primitive ? definition.doubleType : definition.doubleobjType; - } else if ((sort == Sort.FLOAT || sort == Sort.FLOAT_OBJ) && decimal) { - return primitive ? definition.floatType : definition.floatobjType; - } else if (sort == Sort.LONG || sort == Sort.LONG_OBJ) { - return primitive ? definition.longType : definition.longobjType; - } else if (sort == Sort.INT || sort == Sort.INT_OBJ || - sort == Sort.CHAR || sort == Sort.CHAR_OBJ || - sort == Sort.SHORT || sort == Sort.SHORT_OBJ || - sort == Sort.BYTE || sort == Sort.BYTE_OBJ) { - return primitive ? definition.intType : definition.intobjType; + return Definition.DEF_TYPE; + } else if ((sort == Sort.DOUBLE) && decimal) { + return Definition.DOUBLE_TYPE; + } else if ((sort == Sort.FLOAT) && decimal) { + return Definition.FLOAT_TYPE; + } else if (sort == Sort.LONG) { + return Definition.LONG_TYPE; + } else if (sort == Sort.INT || sort == Sort.CHAR || sort == Sort.SHORT || sort == Sort.BYTE) { + return Definition.INT_TYPE; } return null; } - public static Type promoteNumeric(final Definition definition, - final Type from0, final Type from1, final boolean decimal, final boolean primitive) { + public static Type promoteNumeric(Type from0, Type from1, boolean decimal) { final Sort sort0 = from0.sort; final Sort sort1 = from1.sort; if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; + return Definition.DEF_TYPE; } if (decimal) { - if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || - sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) { - return primitive ? definition.doubleType : definition.doubleobjType; - } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { - return primitive ? definition.floatType : definition.floatobjType; + if (sort0 == Sort.DOUBLE || sort1 == Sort.DOUBLE) { + return Definition.DOUBLE_TYPE; + } else if (sort0 == Sort.FLOAT || sort1 == Sort.FLOAT) { + return Definition.FLOAT_TYPE; } } - if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || - sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) { - return primitive ? definition.longType : definition.longobjType; - } else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ || - sort1 == Sort.INT || sort1 == Sort.INT_OBJ || - sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ || - sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ || - sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ || - sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ || - sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ || - sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - return primitive ? definition.intType : definition.intobjType; + if (sort0 == Sort.LONG || sort1 == Sort.LONG) { + return Definition.LONG_TYPE; + } else if (sort0 == Sort.INT || sort1 == Sort.INT || + sort0 == Sort.CHAR || sort1 == Sort.CHAR || + sort0 == Sort.SHORT || sort1 == Sort.SHORT || + sort0 == Sort.BYTE || sort1 == Sort.BYTE) { + return Definition.INT_TYPE; } return null; } - public static Type promoteAdd(final Definition definition, final Type from0, final Type from1) { + public static Type promoteAdd(final Type from0, final Type from1) { final Sort sort0 = from0.sort; final Sort sort1 = from1.sort; if (sort0 == Sort.STRING || sort1 == Sort.STRING) { - return definition.stringType; + return Definition.STRING_TYPE; } - return promoteNumeric(definition, from0, from1, true, true); + return promoteNumeric(from0, from1, true); } - public static Type promoteXor(final Definition definition, final Type from0, final Type from1) { + public static Type promoteXor(final Type from0, final Type from1) { final Sort sort0 = from0.sort; final Sort sort1 = from1.sort; if (sort0.bool || sort1.bool) { - return definition.booleanType; + return Definition.BOOLEAN_TYPE; } - return promoteNumeric(definition, from0, from1, false, true); + return promoteNumeric(from0, from1, false); } - public static Type promoteEquality(final Definition definition, final Type from0, final Type from1) { + public static Type promoteEquality(final Type from0, final Type from1) { final Sort sort0 = from0.sort; final Sort sort1 = from1.sort; if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; - } - - final boolean primitive = sort0.primitive && sort1.primitive; - - if (sort0.bool && sort1.bool) { - return primitive ? definition.booleanType : definition.booleanobjType; - } - - if (sort0.numeric && sort1.numeric) { - return promoteNumeric(definition, from0, from1, true, primitive); - } - - return definition.objectType; - } - - public static Type promoteReference(final Definition definition, final Type from0, final Type from1) { - final Sort sort0 = from0.sort; - final Sort sort1 = from1.sort; - - if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; + return Definition.DEF_TYPE; } if (sort0.primitive && sort1.primitive) { if (sort0.bool && sort1.bool) { - return definition.booleanType; + return Definition.BOOLEAN_TYPE; } if (sort0.numeric && sort1.numeric) { - return promoteNumeric(definition, from0, from1, true, true); + return promoteNumeric(from0, from1, true); } } - return definition.objectType; + return Definition.OBJECT_TYPE; } - public static Type promoteConditional(final Definition definition, - final Type from0, final Type from1, final Object const0, final Object const1) { + public static Type promoteConditional(final Type from0, final Type from1, final Object const0, final Object const1) { if (from0.equals(from1)) { return from0; } @@ -245,126 +792,124 @@ public final class AnalyzerCaster { final Sort sort1 = from1.sort; if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; + return Definition.DEF_TYPE; } - final boolean primitive = sort0.primitive && sort1.primitive; + if (sort0.primitive && sort1.primitive) { + if (sort0.bool && sort1.bool) { + return Definition.BOOLEAN_TYPE; + } - if (sort0.bool && sort1.bool) { - return primitive ? definition.booleanType : definition.booleanobjType; - } - - if (sort0.numeric && sort1.numeric) { - if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) { - return primitive ? definition.doubleType : definition.doubleobjType; - } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { - return primitive ? definition.floatType : definition.floatobjType; - } else if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) { - return sort0.primitive && sort1.primitive ? definition.longType : definition.longobjType; + if (sort0 == Sort.DOUBLE || sort1 == Sort.DOUBLE) { + return Definition.DOUBLE_TYPE; + } else if (sort0 == Sort.FLOAT || sort1 == Sort.FLOAT) { + return Definition.FLOAT_TYPE; + } else if (sort0 == Sort.LONG || sort1 == Sort.LONG) { + return Definition.LONG_TYPE; } else { - if (sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - return primitive ? definition.byteType : definition.byteobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + if (sort0 == Sort.BYTE) { + if (sort1 == Sort.BYTE) { + return Definition.BYTE_TYPE; + } else if (sort1 == Sort.SHORT) { if (const1 != null) { final short constant = (short)const1; if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.shortType : definition.shortobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + return Definition.SHORT_TYPE; + } else if (sort1 == Sort.CHAR) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.INT) { if (const1 != null) { final int constant = (int)const1; if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; + return Definition.INT_TYPE; } - } else if (sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + } else if (sort0 == Sort.SHORT) { + if (sort1 == Sort.BYTE) { if (const0 != null) { final short constant = (short)const0; if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.shortType : definition.shortobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { - return primitive ? definition.shortType : definition.shortobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + return Definition.SHORT_TYPE; + } else if (sort1 == Sort.SHORT) { + return Definition.SHORT_TYPE; + } else if (sort1 == Sort.CHAR) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.INT) { if (const1 != null) { final int constant = (int)const1; if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { - return primitive ? definition.shortType : definition.shortobjType; + return Definition.SHORT_TYPE; } } - return primitive ? definition.intType : definition.intobjType; + return Definition.INT_TYPE; } - } else if (sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - return primitive ? definition.charType : definition.charobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + } else if (sort0 == Sort.CHAR) { + if (sort1 == Sort.BYTE) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.SHORT) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.CHAR) { + return Definition.CHAR_TYPE; + } else if (sort1 == Sort.INT) { if (const1 != null) { final int constant = (int)const1; if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; + return Definition.INT_TYPE; } - } else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + } else if (sort0 == Sort.INT) { + if (sort1 == Sort.BYTE) { if (const0 != null) { final int constant = (int)const0; if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.SHORT) { if (const0 != null) { final int constant = (int)const0; if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.CHAR) { if (const0 != null) { final int constant = (int)const0; if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { - return primitive ? definition.intType : definition.intobjType; + return Definition.INT_TYPE; + } else if (sort1 == Sort.INT) { + return Definition.INT_TYPE; } } } @@ -374,7 +919,7 @@ public final class AnalyzerCaster { // to calculate the highest upper bound for the two types and return that. // However, for now we just return objectType that may require an extra cast. - return definition.objectType; + return Definition.OBJECT_TYPE; } private AnalyzerCaster() {} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index c487dddba71..16ad355177f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -112,10 +112,10 @@ final class Compiler { } final Reserved reserved = new Reserved(); - final SSource root = Walker.buildPainlessTree(source, reserved); - final Variables variables = Analyzer.analyze(settings, Definition.INSTANCE, reserved, root); + final SSource root = Walker.buildPainlessTree(source, reserved, settings); + final Variables variables = Analyzer.analyze(settings, reserved, root); - return Writer.write(settings, Definition.INSTANCE, name, source, variables, root); + return Writer.write(settings, name, source, variables, root); } /** diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java index 9ec26bf345a..4cafe32bb56 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java @@ -24,47 +24,16 @@ package org.elasticsearch.painless; */ public final class CompilerSettings { - /** - * Constant to be used when specifying numeric overflow when compiling a script. - */ - public static final String NUMERIC_OVERFLOW = "numeric_overflow"; - /** * Constant to be used when specifying the maximum loop counter when compiling a script. */ public static final String MAX_LOOP_COUNTER = "max_loop_counter"; - /** - * Whether or not to allow numeric values to overflow without exception. - */ - private boolean numericOverflow = true; - /** * The maximum number of statements allowed to be run in a loop. */ private int maxLoopCounter = 10000; - /** - * Returns {@code true} if numeric operations should overflow, {@code false} - * if they should signal an exception. - *

- * If this value is {@code true} (default), then things behave like java: - * overflow for integer types can result in unexpected values / unexpected - * signs, and overflow for floating point types can result in infinite or - * {@code NaN} values. - */ - public final boolean getNumericOverflow() { - return numericOverflow; - } - - /** - * Set {@code true} for numerics to overflow, false to deliver exceptions. - * @see #getNumericOverflow - */ - public final void setNumericOverflow(boolean allow) { - this.numericOverflow = allow; - } - /** * Returns the value for the cumulative total number of statements that can be made in all loops * in a script before an exception is thrown. This attempts to prevent infinite loops. Note if diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 9226fc3f098..bd8e09f504b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -52,9 +52,10 @@ public final class Def { // TODO: Once Java has a factory for those in java.lang.invoke.MethodHandles, use it: /** Helper class for isolating MethodHandles and methods to get the length of arrays - * (to emulate a "arraystore" byteoode using MethodHandles). + * (to emulate a "arraystore" bytecode using MethodHandles). * This should really be a method in {@link MethodHandles} class! */ + @SuppressWarnings("unused") // getArrayLength() methods are are actually used, javac just does not know :) private static final class ArrayLengthHelper { private static final Lookup PRIV_LOOKUP = MethodHandles.lookup(); @@ -134,17 +135,16 @@ public final class Def { * @param receiverClass Class of the object to invoke the method on. * @param name Name of the method. * @param type Callsite signature. Need not match exactly, except the number of parameters. - * @param definition Whitelist to check. * @return pointer to matching method to invoke. never returns null. * @throws IllegalArgumentException if no matching whitelisted method was found. */ - static MethodHandle lookupMethod(Class receiverClass, String name, MethodType type, Definition definition) { + static MethodHandle lookupMethod(Class receiverClass, String name, MethodType type) { // we don't consider receiver an argument/counting towards arity type = type.dropParameterTypes(0, 1); Definition.MethodKey key = new Definition.MethodKey(name, type.parameterCount()); // check whitelist for matching method for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - RuntimeClass struct = definition.runtimeMap.get(clazz); + RuntimeClass struct = Definition.getRuntimeClass(clazz); if (struct != null) { Method method = struct.methods.get(key); @@ -154,7 +154,7 @@ public final class Def { } for (final Class iface : clazz.getInterfaces()) { - struct = definition.runtimeMap.get(iface); + struct = Definition.getRuntimeClass(iface); if (struct != null) { Method method = struct.methods.get(key); @@ -192,14 +192,13 @@ public final class Def { *

* @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. - * @param definition Whitelist to check. * @return pointer to matching field. never returns null. * @throws IllegalArgumentException if no matching whitelisted field was found. */ - static MethodHandle lookupGetter(Class receiverClass, String name, Definition definition) { + static MethodHandle lookupGetter(Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - RuntimeClass struct = definition.runtimeMap.get(clazz); + RuntimeClass struct = Definition.getRuntimeClass(clazz); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -209,7 +208,7 @@ public final class Def { } for (final Class iface : clazz.getInterfaces()) { - struct = definition.runtimeMap.get(iface); + struct = Definition.getRuntimeClass(iface); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -263,14 +262,13 @@ public final class Def { *

* @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. - * @param definition Whitelist to check. * @return pointer to matching field. never returns null. * @throws IllegalArgumentException if no matching whitelisted field was found. */ - static MethodHandle lookupSetter(Class receiverClass, String name, Definition definition) { + static MethodHandle lookupSetter(Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - RuntimeClass struct = definition.runtimeMap.get(clazz); + RuntimeClass struct = Definition.getRuntimeClass(clazz); if (struct != null) { MethodHandle handle = struct.setters.get(name); @@ -280,7 +278,7 @@ public final class Def { } for (final Class iface : clazz.getInterfaces()) { - struct = definition.runtimeMap.get(iface); + struct = Definition.getRuntimeClass(iface); if (struct != null) { MethodHandle handle = struct.setters.get(name); @@ -971,6 +969,10 @@ public final class Def { // Conversion methods for Def to primitive types. + public static boolean DefToboolean(final Object value) { + return (boolean)value; + } + public static byte DefTobyteImplicit(final Object value) { return (byte)value; } @@ -1051,79 +1053,6 @@ public final class Def { } } - public static Byte DefToByteImplicit(final Object value) { - return (Byte)value; - } - - public static Short DefToShortImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte) { - return ((Byte)value).shortValue(); - } else { - return (Short)value; - } - } - - public static Character DefToCharacterImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte) { - return (char)(byte)value; - } else { - return (Character)value; - } - } - - public static Integer DefToIntegerImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte || value instanceof Short) { - return ((Number)value).intValue(); - } else if (value instanceof Character) { - return (int)(char)value; - } else { - return (Integer)value; - } - } - - public static Long DefToLongImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte || value instanceof Short || value instanceof Integer) { - return ((Number)value).longValue(); - } else if (value instanceof Character) { - return (long)(char)value; - } else { - return (Long)value; - } - } - - public static Float DefToFloatImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte || value instanceof Short || value instanceof Integer || value instanceof Long) { - return ((Number)value).floatValue(); - } else if (value instanceof Character) { - return (float)(char)value; - } else { - return (Float)value; - } - } - - public static Double DefToDoubleImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte || value instanceof Short || - value instanceof Integer || value instanceof Long || value instanceof Float) { - return ((Number)value).doubleValue(); - } else if (value instanceof Character) { - return (double)(char)value; - } else { - return (Double)value; - } - } - public static byte DefTobyteExplicit(final Object value) { if (value instanceof Character) { return (byte)(char)value; @@ -1179,74 +1108,4 @@ public final class Def { return ((Number)value).doubleValue(); } } - - public static Byte DefToByteExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (byte)(char)value; - } else { - return ((Number)value).byteValue(); - } - } - - public static Short DefToShortExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (short)(char)value; - } else { - return ((Number)value).shortValue(); - } - } - - public static Character DefToCharacterExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return ((Character)value); - } else { - return (char)((Number)value).intValue(); - } - } - - public static Integer DefToIntegerExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (int)(char)value; - } else { - return ((Number)value).intValue(); - } - } - - public static Long DefToLongExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (long)(char)value; - } else { - return ((Number)value).longValue(); - } - } - - public static Float DefToFloatExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (float)(char)value; - } else { - return ((Number)value).floatValue(); - } - } - - public static Double DefToDoubleExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (double)(char)value; - } else { - return ((Number)value).doubleValue(); - } - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java index 380f5455ab3..40b9cc6cbe8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java @@ -94,11 +94,11 @@ public final class DefBootstrap { private static MethodHandle lookup(int flavor, Class clazz, String name, MethodType type) { switch(flavor) { case METHOD_CALL: - return Def.lookupMethod(clazz, name, type, Definition.INSTANCE); + return Def.lookupMethod(clazz, name, type); case LOAD: - return Def.lookupGetter(clazz, name, Definition.INSTANCE); + return Def.lookupGetter(clazz, name); case STORE: - return Def.lookupSetter(clazz, name, Definition.INSTANCE); + return Def.lookupSetter(clazz, name); case ARRAY_LOAD: return Def.lookupArrayLoad(clazz); case ARRAY_STORE: diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index f44db5cb504..05c12c30239 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -19,34 +19,51 @@ package org.elasticsearch.painless; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.fielddata.ScriptDocValues; - +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.LineNumberReader; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.util.ArrayList; +import java.nio.charset.StandardCharsets; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; /** * The entire API for Painless. Also used as a whitelist for checking for legal * methods and fields during at both compile-time and runtime. */ public final class Definition { + + private static final String DEFINITION_FILE = "definition.txt"; - /** - * The default language API to be used with Painless. The second construction is used - * to finalize all the variables, so there is no mistake of modification afterwards. - */ - static Definition INSTANCE = new Definition(new Definition()); + private static final Definition INSTANCE = new Definition(); + + /** Some native types as constants: */ + public static final Type VOID_TYPE = getType("void"); + public static final Type BOOLEAN_TYPE = getType("boolean"); + public static final Type BOOLEAN_OBJ_TYPE = getType("Boolean"); + public static final Type BYTE_TYPE = getType("byte"); + public static final Type BYTE_OBJ_TYPE = getType("Byte"); + public static final Type SHORT_TYPE = getType("short"); + public static final Type SHORT_OBJ_TYPE = getType("Short"); + public static final Type INT_TYPE = getType("int"); + public static final Type INT_OBJ_TYPE = getType("Integer"); + public static final Type LONG_TYPE = getType("long"); + public static final Type LONG_OBJ_TYPE = getType("Long"); + public static final Type FLOAT_TYPE = getType("float"); + public static final Type FLOAT_OBJ_TYPE = getType("Float"); + public static final Type DOUBLE_TYPE = getType("double"); + public static final Type DOUBLE_OBJ_TYPE = getType("Double"); + public static final Type CHAR_TYPE = getType("char"); + public static final Type CHAR_OBJ_TYPE = getType("Character"); + public static final Type OBJECT_TYPE = getType("Object"); + public static final Type DEF_TYPE = getType("def"); + public static final Type STRING_TYPE = getType("String"); + public static final Type EXCEPTION_TYPE = getType("Exception"); public enum Sort { VOID( void.class , 0 , true , false , false , false ), @@ -178,17 +195,15 @@ public final class Definition { public static final class Field { public final String name; public final Struct owner; - public final Type generic; public final Type type; public final java.lang.reflect.Field reflect; public final MethodHandle getter; public final MethodHandle setter; - private Field(final String name, final Struct owner, final Type generic, final Type type, + private Field(final String name, final Struct owner, final Type type, final java.lang.reflect.Field reflect, final MethodHandle getter, final MethodHandle setter) { this.name = name; this.owner = owner; - this.generic = generic; this.type = type; this.reflect = reflect; this.getter = getter; @@ -275,7 +290,7 @@ public final class Definition { staticMembers = new HashMap<>(); members = new HashMap<>(); } - + private Struct(final Struct struct) { name = struct.name; clazz = struct.clazz; @@ -289,6 +304,10 @@ public final class Definition { members = Collections.unmodifiableMap(struct.members); } + private Struct freeze() { + return new Struct(this); + } + @Override public boolean equals(Object object) { if (this == object) { @@ -314,50 +333,32 @@ public final class Definition { public final Type from; public final Type to; public final boolean explicit; + public final boolean unboxFrom; + public final boolean unboxTo; + public final boolean boxFrom; + public final boolean boxTo; public Cast(final Type from, final Type to, final boolean explicit) { this.from = from; this.to = to; this.explicit = explicit; + this.unboxFrom = false; + this.unboxTo = false; + this.boxFrom = false; + this.boxTo = false; } - @Override - public boolean equals(final Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - final Cast cast = (Cast)object; - - return from.equals(cast.from) && to.equals(cast.to) && explicit == cast.explicit; + public Cast(final Type from, final Type to, final boolean explicit, + final boolean unboxFrom, final boolean unboxTo, final boolean boxFrom, final boolean boxTo) { + this.from = from; + this.to = to; + this.explicit = explicit; + this.unboxFrom = unboxFrom; + this.unboxTo = unboxTo; + this.boxFrom = boxFrom; + this.boxTo = boxTo; } - @Override - public int hashCode() { - int result = from.hashCode(); - result = 31 * result + to.hashCode(); - result = 31 * result + (explicit ? 1 : 0); - - return result; - } - } - - public static final class Transform extends Cast { - public final Method method; - public final Type upcast; - public final Type downcast; - - public Transform(final Cast cast, Method method, final Type upcast, final Type downcast) { - super(cast.from, cast.to, cast.explicit); - - this.method = method; - this.upcast = upcast; - this.downcast = downcast; - } } public static final class RuntimeClass { @@ -367,1224 +368,150 @@ public final class Definition { private RuntimeClass(final Map methods, final Map getters, final Map setters) { - this.methods = methods; - this.getters = getters; - this.setters = setters; + this.methods = Collections.unmodifiableMap(methods); + this.getters = Collections.unmodifiableMap(getters); + this.setters = Collections.unmodifiableMap(setters); } } - public final Map structsMap; - public final Map transformsMap; - public final Map, RuntimeClass> runtimeMap; + /** Gets the type given by its name */ + public static Type getType(final String name) { + return INSTANCE.getTypeInternal(name); + } - public final Type voidType; - public final Type booleanType; - public final Type byteType; - public final Type shortType; - public final Type charType; - public final Type intType; - public final Type longType; - public final Type floatType; - public final Type doubleType; - - public final Type voidobjType; - public final Type booleanobjType; - public final Type byteobjType; - public final Type shortobjType; - public final Type charobjType; - public final Type intobjType; - public final Type longobjType; - public final Type floatobjType; - public final Type doubleobjType; - - public final Type objectType; - public final Type defType; - public final Type numberType; - public final Type charseqType; - public final Type stringType; - public final Type mathType; - public final Type utilityType; - public final Type defobjType; - - public final Type itrType; - public final Type oitrType; - public final Type sitrType; - - public final Type collectionType; - public final Type ocollectionType; - public final Type scollectionType; - - public final Type listType; - public final Type arraylistType; - public final Type olistType; - public final Type oarraylistType; - public final Type slistType; - public final Type sarraylistType; - - public final Type setType; - public final Type hashsetType; - public final Type osetType; - public final Type ohashsetType; - public final Type ssetType; - public final Type shashsetType; - - public final Type mapType; - public final Type hashmapType; - public final Type oomapType; - public final Type oohashmapType; - public final Type smapType; - public final Type shashmapType; - public final Type somapType; - public final Type sohashmapType; - - public final Type execType; - - public final Type exceptionType; - public final Type arithexcepType; - public final Type iargexcepType; - public final Type istateexceptType; - public final Type nfexcepType; - - // docvalues accessors - public final Type geoPointType; - public final Type stringsType; - // TODO: add ReadableDateTime? or don't expose the joda stuff? - public final Type longsType; - public final Type doublesType; - public final Type geoPointsType; - - // for testing features not currently "used" by the whitelist (we should not rush the API for that!) - public final Type featureTestType; + /** Creates an array type from the given Struct. */ + public static Type getType(final Struct struct, final int dimensions) { + return INSTANCE.getTypeInternal(struct, dimensions); + } + + public static RuntimeClass getRuntimeClass(Class clazz) { + return INSTANCE.runtimeMap.get(clazz); + } + + // INTERNAL IMPLEMENTATION: + + private final Map, RuntimeClass> runtimeMap; + private final Map structsMap; + private final Map simpleTypesMap; private Definition() { structsMap = new HashMap<>(); - transformsMap = new HashMap<>(); + simpleTypesMap = new HashMap<>(); runtimeMap = new HashMap<>(); - addStructs(); - - voidType = getType("void"); - booleanType = getType("boolean"); - byteType = getType("byte"); - shortType = getType("short"); - charType = getType("char"); - intType = getType("int"); - longType = getType("long"); - floatType = getType("float"); - doubleType = getType("double"); - - voidobjType = getType("Void"); - booleanobjType = getType("Boolean"); - byteobjType = getType("Byte"); - shortobjType = getType("Short"); - charobjType = getType("Character"); - intobjType = getType("Integer"); - longobjType = getType("Long"); - floatobjType = getType("Float"); - doubleobjType = getType("Double"); - - objectType = getType("Object"); - defType = getType("def"); - numberType = getType("Number"); - charseqType = getType("CharSequence"); - stringType = getType("String"); - mathType = getType("Math"); - utilityType = getType("Utility"); - defobjType = getType("Def"); - - itrType = getType("Iterator"); - oitrType = getType("Iterator"); - sitrType = getType("Iterator"); - - collectionType = getType("Collection"); - ocollectionType = getType("Collection"); - scollectionType = getType("Collection"); - - listType = getType("List"); - arraylistType = getType("ArrayList"); - olistType = getType("List"); - oarraylistType = getType("ArrayList"); - slistType = getType("List"); - sarraylistType = getType("ArrayList"); - - setType = getType("Set"); - hashsetType = getType("HashSet"); - osetType = getType("Set"); - ohashsetType = getType("HashSet"); - ssetType = getType("Set"); - shashsetType = getType("HashSet"); - - mapType = getType("Map"); - hashmapType = getType("HashMap"); - oomapType = getType("Map"); - oohashmapType = getType("HashMap"); - smapType = getType("Map"); - shashmapType = getType("HashMap"); - somapType = getType("Map"); - sohashmapType = getType("HashMap"); - - execType = getType("Executable"); - - exceptionType = getType("Exception"); - arithexcepType = getType("ArithmeticException"); - iargexcepType = getType("IllegalArgumentException"); - istateexceptType = getType("IllegalStateException"); - nfexcepType = getType("NumberFormatException"); - - geoPointType = getType("GeoPoint"); - stringsType = getType("Strings"); - longsType = getType("Longs"); - doublesType = getType("Doubles"); - geoPointsType = getType("GeoPoints"); - - featureTestType = getType("FeatureTest"); - + // parse the classes and return hierarchy (map of class name -> superclasses/interfaces) + Map> hierarchy = addStructs(); + // add every method for each class addElements(); - copyStructs(); - addTransforms(); - addRuntimeClasses(); - } - - private Definition(final Definition definition) { - final Map structs = new HashMap<>(); - - for (final Struct struct : definition.structsMap.values()) { - structs.put(struct.name, new Struct(struct)); + // apply hierarchy: this means e.g. copying Object's methods into String (thats how subclasses work) + for (Map.Entry> clazz : hierarchy.entrySet()) { + copyStruct(clazz.getKey(), clazz.getValue()); + } + // precompute runtime classes + for (Struct struct : structsMap.values()) { + addRuntimeClass(struct); + } + // copy all structs to make them unmodifiable for outside users: + for (final Map.Entry entry : structsMap.entrySet()) { + entry.setValue(entry.getValue().freeze()); } - - this.structsMap = Collections.unmodifiableMap(structs); - this.transformsMap = Collections.unmodifiableMap(definition.transformsMap); - this.runtimeMap = Collections.unmodifiableMap(definition.runtimeMap); - - this.voidType = definition.voidType; - this.booleanType = definition.booleanType; - this.byteType = definition.byteType; - this.shortType = definition.shortType; - this.charType = definition.charType; - this.intType = definition.intType; - this.longType = definition.longType; - this.floatType = definition.floatType; - this.doubleType = definition.doubleType; - - this.voidobjType = definition.voidobjType; - this.booleanobjType = definition.booleanobjType; - this.byteobjType = definition.byteobjType; - this.shortobjType = definition.shortobjType; - this.charobjType = definition.charobjType; - this.intobjType = definition.intobjType; - this.longobjType = definition.longobjType; - this.floatobjType = definition.floatobjType; - this.doubleobjType = definition.doubleobjType; - - this.objectType = definition.objectType; - this.defType = definition.defType; - this.numberType = definition.numberType; - this.charseqType = definition.charseqType; - this.stringType = definition.stringType; - this.mathType = definition.mathType; - this.utilityType = definition.utilityType; - this.defobjType = definition.defobjType; - - this.itrType = definition.itrType; - this.oitrType = definition.oitrType; - this.sitrType = definition.sitrType; - - this.collectionType = definition.collectionType; - this.ocollectionType = definition.ocollectionType; - this.scollectionType = definition.scollectionType; - - this.listType = definition.listType; - this.arraylistType = definition.arraylistType; - this.olistType = definition.olistType; - this.oarraylistType = definition.oarraylistType; - this.slistType = definition.slistType; - this.sarraylistType = definition.sarraylistType; - - this.setType = definition.setType; - this.hashsetType = definition.hashsetType; - this.osetType = definition.osetType; - this.ohashsetType = definition.ohashsetType; - this.ssetType = definition.ssetType; - this.shashsetType = definition.shashsetType; - - this.mapType = definition.mapType; - this.hashmapType = definition.hashmapType; - this.oomapType = definition.oomapType; - this.oohashmapType = definition.oohashmapType; - this.smapType = definition.smapType; - this.shashmapType = definition.shashmapType; - this.somapType = definition.somapType; - this.sohashmapType = definition.sohashmapType; - - this.execType = definition.execType; - - this.exceptionType = definition.exceptionType; - this.arithexcepType = definition.arithexcepType; - this.iargexcepType = definition.iargexcepType; - this.istateexceptType = definition.istateexceptType; - this.nfexcepType = definition.nfexcepType; - - this.geoPointType = definition.geoPointType; - this.stringsType = definition.stringsType; - this.longsType = definition.longsType; - this.doublesType = definition.doublesType; - this.geoPointsType = definition.geoPointsType; - - this.featureTestType = definition.featureTestType; } - private void addStructs() { - addStruct( "void" , void.class ); - addStruct( "boolean" , boolean.class ); - addStruct( "byte" , byte.class ); - addStruct( "short" , short.class ); - addStruct( "char" , char.class ); - addStruct( "int" , int.class ); - addStruct( "long" , long.class ); - addStruct( "float" , float.class ); - addStruct( "double" , double.class ); - - addStruct( "Void" , Void.class ); - addStruct( "Boolean" , Boolean.class ); - addStruct( "Byte" , Byte.class ); - addStruct( "Short" , Short.class ); - addStruct( "Character" , Character.class ); - addStruct( "Integer" , Integer.class ); - addStruct( "Long" , Long.class ); - addStruct( "Float" , Float.class ); - addStruct( "Double" , Double.class ); - - addStruct( "Object" , Object.class ); - addStruct( "def" , Object.class ); - addStruct( "Number" , Number.class ); - addStruct( "CharSequence" , CharSequence.class ); - addStruct( "String" , String.class ); - addStruct( "Math" , Math.class ); - addStruct( "Utility" , Utility.class ); - addStruct( "Def" , Def.class ); - - addStruct( "Iterator" , Iterator.class ); - addStruct( "Iterator" , Iterator.class ); - addStruct( "Iterator" , Iterator.class ); - - addStruct( "Collection" , Collection.class ); - addStruct( "Collection" , Collection.class ); - addStruct( "Collection" , Collection.class ); - - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - - addStruct( "Set" , Set.class ); - addStruct( "HashSet" , HashSet.class ); - addStruct( "Set" , Set.class ); - addStruct( "HashSet" , HashSet.class ); - addStruct( "Set" , Set.class ); - addStruct( "HashSet" , HashSet.class ); - - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - - addStruct( "Executable" , Executable.class ); - - addStruct( "Exception" , Exception.class); - addStruct( "ArithmeticException" , ArithmeticException.class); - addStruct( "IllegalArgumentException" , IllegalArgumentException.class); - addStruct( "IllegalStateException" , IllegalStateException.class); - addStruct( "NumberFormatException" , NumberFormatException.class); - - addStruct( "GeoPoint" , GeoPoint.class); - addStruct( "Strings" , ScriptDocValues.Strings.class); - addStruct( "Longs" , ScriptDocValues.Longs.class); - addStruct( "Doubles" , ScriptDocValues.Doubles.class); - addStruct( "GeoPoints" , ScriptDocValues.GeoPoints.class); - - addStruct( "FeatureTest", FeatureTest.class); + /** adds classes from definition. returns hierarchy */ + private Map> addStructs() { + final Map> hierarchy = new HashMap<>(); + int currentLine = -1; + try { + try (InputStream stream = Definition.class.getResourceAsStream(DEFINITION_FILE); + LineNumberReader reader = new LineNumberReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) { + String line = null; + while ((line = reader.readLine()) != null) { + currentLine = reader.getLineNumber(); + line = line.trim(); + if (line.length() == 0 || line.charAt(0) == '#') { + continue; + } + if (line.startsWith("class ")) { + String elements[] = line.split("\u0020"); + assert elements[2].equals("->"); + if (elements.length == 7) { + hierarchy.put(elements[1], Arrays.asList(elements[5].split(","))); + } else { + assert elements.length == 5; + } + String className = elements[1]; + String javaPeer = elements[3]; + final Class javaClazz; + switch (javaPeer) { + case "void": + javaClazz = void.class; + break; + case "boolean": + javaClazz = boolean.class; + break; + case "byte": + javaClazz = byte.class; + break; + case "short": + javaClazz = short.class; + break; + case "char": + javaClazz = char.class; + break; + case "int": + javaClazz = int.class; + break; + case "long": + javaClazz = long.class; + break; + case "float": + javaClazz = float.class; + break; + case "double": + javaClazz = double.class; + break; + default: + javaClazz = Class.forName(javaPeer); + break; + } + addStruct(className, javaClazz); + } + } + } + } catch (Exception e) { + throw new RuntimeException("syntax error in definition line: " + currentLine, e); + } + return hierarchy; } + /** adds class methods/fields/ctors */ private void addElements() { - addMethod("Object", "equals", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Object", "hashCode", null, false, intType, new Type[] {}, null, null); - addMethod("Object", "toString", null, false, stringType, new Type[] {}, null, null); - - addMethod("def", "equals", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("def", "hashCode", null, false, intType, new Type[] {}, null, null); - addMethod("def", "toString", null, false, stringType, new Type[] {}, null, null); - - addConstructor("Boolean", "new", new Type[] {booleanType}, null); - addMethod("Boolean", "booleanValue", null, false, booleanType, new Type[] {}, null, null); - addMethod("Boolean", "compare", null, true, intType, new Type[] {booleanType,booleanType}, null, null); - addMethod("Boolean", "compareTo", null, false, intType, new Type[] {booleanobjType}, null, null); - addMethod("Boolean", "parseBoolean", null, true, booleanType, new Type[] {stringType}, null, null); - addMethod("Boolean", "valueOf", null, true, booleanobjType, new Type[] {booleanType}, null, null); - addField("Boolean", "FALSE", null, true, booleanobjType, null); - addField("Boolean", "TRUE", null, true, booleanobjType, null); - - addConstructor("Byte", "new", new Type[] {byteType}, null); - addMethod("Byte", "compare", null, true, intType, new Type[] {byteType,byteType}, null, null); - addMethod("Byte", "compareTo", null, false, intType, new Type[] {byteobjType}, null, null); - addMethod("Byte", "parseByte", null, true, byteType, new Type[] {stringType}, null, null); - addMethod("Byte", "valueOf", null, true, byteobjType, new Type[] {byteType}, null, null); - addField("Byte", "MIN_VALUE", null, true, byteType, null); - addField("Byte", "MAX_VALUE", null, true, byteType, null); - - addConstructor("Short", "new", new Type[] {shortType}, null); - addMethod("Short", "compare", null, true, intType, new Type[] {shortType,shortType}, null, null); - addMethod("Short", "compareTo", null, false, intType, new Type[] {shortobjType}, null, null); - addMethod("Short", "parseShort", null, true, shortType, new Type[] {stringType}, null, null); - addMethod("Short", "valueOf", null, true, shortobjType, new Type[] {shortType}, null, null); - addField("Short", "MIN_VALUE", null, true, shortType, null); - addField("Short", "MAX_VALUE", null, true, shortType, null); - - addConstructor("Character", "new", new Type[] {charType}, null); - addMethod("Character", "charCount", null, true, intType, new Type[] {intType}, null, null); - addMethod("Character", "charValue", null, false, charType, new Type[] {}, null, null); - addMethod("Character", "compare", null, true, intType, new Type[] {charType,charType}, null, null); - addMethod("Character", "compareTo", null, false, intType, new Type[] {charobjType}, null, null); - addMethod("Character", "digit", null, true, intType, new Type[] {intType,intType}, null, null); - addMethod("Character", "forDigit", null, true, charType, new Type[] {intType,intType}, null, null); - addMethod("Character", "getName", null, true, stringType, new Type[] {intType}, null, null); - addMethod("Character", "getNumericValue", null, true, intType, new Type[] {intType}, null, null); - addMethod("Character", "isAlphabetic", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isDefined", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isDigit", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isIdeographic", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isLetter", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isLetterOrDigit", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isLowerCase", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isMirrored", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isSpaceChar", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isTitleCase", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isUpperCase", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isWhitespace", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "valueOf", null, true, charobjType, new Type[] {charType}, null, null); - addField("Character", "MIN_VALUE", null, true, charType, null); - addField("Character", "MAX_VALUE", null, true, charType, null); - - addConstructor("Integer", "new", new Type[] {intType}, null); - addMethod("Integer", "compare", null, true, intType, new Type[] {intType,intType}, null, null); - addMethod("Integer", "compareTo", null, false, intType, new Type[] {intobjType}, null, null); - addMethod("Integer", "min", null, true, intType, new Type[] {intType,intType}, null, null); - addMethod("Integer", "max", null, true, intType, new Type[] {intType,intType}, null, null); - addMethod("Integer", "parseInt", null, true, intType, new Type[] {stringType}, null, null); - addMethod("Integer", "signum", null, true, intType, new Type[] {intType}, null, null); - addMethod("Integer", "toHexString", null, true, stringType, new Type[] {intType}, null, null); - addMethod("Integer", "valueOf", null, true, intobjType, new Type[] {intType}, null, null); - addField("Integer", "MIN_VALUE", null, true, intType, null); - addField("Integer", "MAX_VALUE", null, true, intType, null); - - addConstructor("Long", "new", new Type[] {longType}, null); - addMethod("Long", "compare", null, true, intType, new Type[] {longType,longType}, null, null); - addMethod("Long", "compareTo", null, false, intType, new Type[] {longobjType}, null, null); - addMethod("Long", "min", null, true, longType, new Type[] {longType,longType}, null, null); - addMethod("Long", "max", null, true, longType, new Type[] {longType,longType}, null, null); - addMethod("Long", "parseLong", null, true, longType, new Type[] {stringType}, null, null); - addMethod("Long", "signum", null, true, intType, new Type[] {longType}, null, null); - addMethod("Long", "toHexString", null, true, stringType, new Type[] {longType}, null, null); - addMethod("Long", "valueOf", null, true, longobjType, new Type[] {longType}, null, null); - addField("Long", "MIN_VALUE", null, true, longType, null); - addField("Long", "MAX_VALUE", null, true, longType, null); - - addConstructor("Float", "new", new Type[] {floatType}, null); - addMethod("Float", "compare", null, true, intType, new Type[] {floatType,floatType}, null, null); - addMethod("Float", "compareTo", null, false, intType, new Type[] {floatobjType}, null, null); - addMethod("Float", "min", null, true, floatType, new Type[] {floatType,floatType}, null, null); - addMethod("Float", "max", null, true, floatType, new Type[] {floatType,floatType}, null, null); - addMethod("Float", "parseFloat", null, true, floatType, new Type[] {stringType}, null, null); - addMethod("Float", "toHexString", null, true, stringType, new Type[] {floatType}, null, null); - addMethod("Float", "valueOf", null, true, floatobjType, new Type[] {floatType}, null, null); - addField("Float", "MIN_VALUE", null, true, floatType, null); - addField("Float", "MAX_VALUE", null, true, floatType, null); - - addConstructor("Double", "new", new Type[] {doubleType}, null); - addMethod("Double", "compare", null, true, intType, new Type[] {doubleType,doubleType}, null, null); - addMethod("Double", "compareTo", null, false, intType, new Type[] {doubleobjType}, null, null); - addMethod("Double", "min", null, true, doubleType, new Type[] {doubleType,doubleType}, null, null); - addMethod("Double", "max", null, true, doubleType, new Type[] {doubleType,doubleType}, null, null); - addMethod("Double", "parseDouble", null, true, doubleType, new Type[] {stringType}, null, null); - addMethod("Double", "toHexString", null, true, stringType, new Type[] {doubleType}, null, null); - addMethod("Double", "valueOf", null, true, doubleobjType, new Type[] {doubleType}, null, null); - addField("Double", "MIN_VALUE", null, true, doubleType, null); - addField("Double", "MAX_VALUE", null, true, doubleType, null); - - addMethod("Number", "byteValue", null, false, byteType, new Type[] {}, null, null); - addMethod("Number", "shortValue", null, false, shortType, new Type[] {}, null, null); - addMethod("Number", "intValue", null, false, intType, new Type[] {}, null, null); - addMethod("Number", "longValue", null, false, longType, new Type[] {}, null, null); - addMethod("Number", "floatValue", null, false, floatType, new Type[] {}, null, null); - addMethod("Number", "doubleValue", null, false, doubleType, new Type[] {}, null, null); - - addMethod("CharSequence", "charAt", null, false, charType, new Type[] {intType}, null, null); - addMethod("CharSequence", "length", null, false, intType, new Type[] {}, null, null); - - addConstructor("String", "new", new Type[] {}, null); - addMethod("String", "codePointAt", null, false, intType, new Type[] {intType}, null, null); - addMethod("String", "compareTo", null, false, intType, new Type[] {stringType}, null, null); - addMethod("String", "concat", null, false, stringType, new Type[] {stringType}, null, null); - addMethod("String", "endsWith", null, false, booleanType, new Type[] {stringType}, null, null); - addMethod("String", "indexOf", null, false, intType, new Type[] {stringType}, null, null); - addMethod("String", "indexOf", null, false, intType, new Type[] {stringType, intType}, null, null); - addMethod("String", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("String", "replace", null, false, stringType, new Type[] {charseqType, charseqType}, null, null); - addMethod("String", "startsWith", null, false, booleanType, new Type[] {stringType}, null, null); - addMethod("String", "substring", null, false, stringType, new Type[] {intType, intType}, null, null); - addMethod("String", "toCharArray", null, false, getType(charType.struct, 1), new Type[] {}, null, null); - addMethod("String", "trim", null, false, stringType, new Type[] {}, null, null); - - addMethod("Utility", "NumberToboolean", null, true, booleanType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberTochar", null, true, charType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToBoolean", null, true, booleanobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToByte", null, true, byteobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToShort", null, true, shortobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToCharacter", null, true, charobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToInteger", null, true, intobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToLong", null, true, longobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToFloat", null, true, floatobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToDouble", null, true, doubleobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "booleanTobyte", null, true, byteType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanToshort", null, true, shortType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanTochar", null, true, charType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanToint", null, true, intType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanTolong", null, true, longType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanTofloat", null, true, floatType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanTodouble", null, true, doubleType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanToInteger", null, true, intobjType, new Type[] {booleanType}, null, null); - addMethod("Utility", "BooleanTobyte", null, true, byteType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToshort", null, true, shortType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanTochar", null, true, charType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToint", null, true, intType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanTolong", null, true, longType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanTofloat", null, true, floatType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanTodouble", null, true, doubleType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToByte", null, true, byteobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToShort", null, true, shortobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToCharacter", null, true, charobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToInteger", null, true, intobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToLong", null, true, longobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToFloat", null, true, floatobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToDouble", null, true, doubleobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "byteToboolean", null, true, booleanType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToShort", null, true, shortobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToCharacter", null, true, charobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToInteger", null, true, intobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToLong", null, true, longobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToFloat", null, true, floatobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToDouble", null, true, doubleobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "ByteToboolean", null, true, booleanType, new Type[] {byteobjType}, null, null); - addMethod("Utility", "ByteTochar", null, true, charType, new Type[] {byteobjType}, null, null); - addMethod("Utility", "shortToboolean", null, true, booleanType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToByte", null, true, byteobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToCharacter", null, true, charobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToInteger", null, true, intobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToLong", null, true, longobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToFloat", null, true, floatobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToDouble", null, true, doubleobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "ShortToboolean", null, true, booleanType, new Type[] {shortobjType}, null, null); - addMethod("Utility", "ShortTochar", null, true, charType, new Type[] {shortobjType}, null, null); - addMethod("Utility", "charToboolean", null, true, booleanType, new Type[] {charType}, null, null); - addMethod("Utility", "charToByte", null, true, byteobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToShort", null, true, shortobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToInteger", null, true, intobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToLong", null, true, longobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToFloat", null, true, floatobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToDouble", null, true, doubleobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToString", null, true, stringType, new Type[] {charType}, null, null); - addMethod("Utility", "CharacterToboolean", null, true, booleanType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterTobyte", null, true, byteType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToshort", null, true, shortType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToint", null, true, intType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterTolong", null, true, longType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterTofloat", null, true, floatType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterTodouble", null, true, doubleType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToBoolean", null, true, booleanobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToByte", null, true, byteobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToShort", null, true, shortobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToInteger", null, true, intobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToLong", null, true, longobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToFloat", null, true, floatobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToDouble", null, true, doubleobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToString", null, true, stringType, new Type[] {charobjType}, null, null); - addMethod("Utility", "intToboolean", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Utility", "intToByte", null, true, byteobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToShort", null, true, shortobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToCharacter", null, true, charobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToLong", null, true, longobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToFloat", null, true, floatobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToDouble", null, true, doubleobjType, new Type[] {intType}, null, null); - addMethod("Utility", "IntegerToboolean", null, true, booleanType, new Type[] {intobjType}, null, null); - addMethod("Utility", "IntegerTochar", null, true, charType, new Type[] {intobjType}, null, null); - addMethod("Utility", "longToboolean", null, true, booleanType, new Type[] {longType}, null, null); - addMethod("Utility", "longToByte", null, true, byteobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToShort", null, true, shortobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToCharacter", null, true, charobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToInteger", null, true, intobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToFloat", null, true, floatobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToDouble", null, true, doubleobjType, new Type[] {longType}, null, null); - addMethod("Utility", "LongToboolean", null, true, booleanType, new Type[] {longobjType}, null, null); - addMethod("Utility", "LongTochar", null, true, charType, new Type[] {longobjType}, null, null); - addMethod("Utility", "floatToboolean", null, true, booleanType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToByte", null, true, byteobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToShort", null, true, shortobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToCharacter", null, true, charobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToInteger", null, true, intobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToLong", null, true, longobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToDouble", null, true, doubleobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "FloatToboolean", null, true, booleanType, new Type[] {floatobjType}, null, null); - addMethod("Utility", "FloatTochar", null, true, charType, new Type[] {floatobjType}, null, null); - addMethod("Utility", "doubleToboolean", null, true, booleanType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToByte", null, true, byteobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToShort", null, true, shortobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToCharacter", null, true, charobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToInteger", null, true, intobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToLong", null, true, longobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToFloat", null, true, floatobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "DoubleToboolean", null, true, booleanType, new Type[] {doubleobjType}, null, null); - addMethod("Utility", "DoubleTochar", null, true, charType, new Type[] {doubleobjType}, null, null); - addMethod("Utility", "StringTochar", null, true, charType, new Type[] {stringType}, null, null); - addMethod("Utility", "StringToCharacter", null, true, charobjType, new Type[] {stringType}, null, null); - - addMethod("Math", "abs", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "acos", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "asin", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "atan", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "atan2", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "cbrt", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "ceil", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "cos", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "cosh", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "exp", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "expm1", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "floor", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "hypot", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "log", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "log10", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "log1p", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "max", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "min", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "pow", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "random", null, true, doubleType, new Type[] {}, null, null); - addMethod("Math", "rint", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "round", null, true, longType, new Type[] {doubleType}, null, null); - addMethod("Math", "sin", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "sinh", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "sqrt", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "tan", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "tanh", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "toDegrees", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "toRadians", null, true, doubleType, new Type[] {doubleType}, null, null); - addField("Math", "E", null, true, doubleType, null); - addField("Math", "PI", null, true, doubleType, null); - - addMethod("Def", "DefTobyteImplicit", null, true, byteType, new Type[] {defType}, null, null); - addMethod("Def", "DefToshortImplicit", null, true, shortType, new Type[] {defType}, null, null); - addMethod("Def", "DefTocharImplicit", null, true, charType, new Type[] {defType}, null, null); - addMethod("Def", "DefTointImplicit", null, true, intType, new Type[] {defType}, null, null); - addMethod("Def", "DefTolongImplicit", null, true, longType, new Type[] {defType}, null, null); - addMethod("Def", "DefTofloatImplicit", null, true, floatType, new Type[] {defType}, null, null); - addMethod("Def", "DefTodoubleImplicit", null, true, doubleType, new Type[] {defType}, null, null); - addMethod("Def", "DefToByteImplicit", null, true, byteobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToShortImplicit", null, true, shortobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToCharacterImplicit", null, true, charobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToIntegerImplicit", null, true, intobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToLongImplicit", null, true, longobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToFloatImplicit", null, true, floatobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToDoubleImplicit", null, true, doubleobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefTobyteExplicit", null, true, byteType, new Type[] {defType}, null, null); - addMethod("Def", "DefToshortExplicit", null, true, shortType, new Type[] {defType}, null, null); - addMethod("Def", "DefTocharExplicit", null, true, charType, new Type[] {defType}, null, null); - addMethod("Def", "DefTointExplicit", null, true, intType, new Type[] {defType}, null, null); - addMethod("Def", "DefTolongExplicit", null, true, longType, new Type[] {defType}, null, null); - addMethod("Def", "DefTofloatExplicit", null, true, floatType, new Type[] {defType}, null, null); - addMethod("Def", "DefTodoubleExplicit", null, true, doubleType, new Type[] {defType}, null, null); - addMethod("Def", "DefToByteExplicit", null, true, byteobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToShortExplicit", null, true, shortobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToCharacterExplicit", null, true, charobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToIntegerExplicit", null, true, intobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToLongExplicit", null, true, longobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToFloatExplicit", null, true, floatobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToDoubleExplicit", null, true, doubleobjType, new Type[] {defType}, null, null); - - addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); - addMethod("Iterator", "next", null, false, objectType, new Type[] {}, defType, null); - addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); - - addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); - addMethod("Iterator", "next", null, false, objectType, new Type[] {}, null, null); - addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); - - addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); - addMethod("Iterator", "next", null, false, objectType, new Type[] {}, stringType, null); - addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); - - addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); - addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Collection", "iterator", null, false, itrType, new Type[] {}, null, null); - addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); - - addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); - addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Collection", "iterator", null, false, oitrType, new Type[] {}, null, null); - addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); - - addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); - addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Collection", "iterator", null, false, sitrType, new Type[] {}, null, null); - addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); - - addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, defType, new Type[] {intType, defType}); - addMethod("List", "get", null, false, objectType, new Type[] {intType}, defType, null); - addMethod("List", "remove", null, false, objectType, new Type[] {intType}, defType, null); - addMethod("List", "getLength", "size", false, intType, new Type[] {}, null, null); - - addConstructor("ArrayList", "new", new Type[] {}, null); - - addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, null, null); - addMethod("List", "get", null, false, objectType, new Type[] {intType}, null, null); - addMethod("List", "remove", null, false, objectType, new Type[] {intType}, null, null); - addMethod("List", "getLength", "size", false, intType, new Type[] {}, null, null); - - addConstructor("ArrayList", "new", new Type[] {}, null); - - addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, stringType, - new Type[] {intType, stringType}); - addMethod("List", "get", null, false, objectType, new Type[] {intType}, stringType, null); - addMethod("List", "remove", null, false, objectType, new Type[] {intType}, stringType, null); - addMethod("List", "getLength", "size", false, intType, new Type[] {}, null, null); - - addConstructor("ArrayList", "new", new Type[] {}, null); - - addConstructor("HashSet", "new", new Type[] {}, null); - - addConstructor("HashSet", "new", new Type[] {}, null); - - addConstructor("HashSet", "new", new Type[] {}, null); - - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {defType, defType}); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {defType}); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Map", "keySet", null, false, osetType, new Type[] {}, setType, null); - addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, collectionType, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, null); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Map", "keySet", null, false, osetType, new Type[] {}, null, null); - addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, null, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, - new Type[] {stringType, defType}); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Map", "keySet", null, false, osetType, new Type[] {}, ssetType, null); - addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, collectionType, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, - new Type[] {stringType, objectType}); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Map", "keySet", null, false, osetType, new Type[] {}, ssetType, null); - addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, null, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("Exception", "getMessage", null, false, stringType, new Type[] {}, null, null); - - addConstructor("ArithmeticException", "new", new Type[] {stringType}, null); - - addConstructor("IllegalArgumentException", "new", new Type[] {stringType}, null); - - addConstructor("IllegalStateException", "new", new Type[] {stringType}, null); - - addConstructor("NumberFormatException", "new", new Type[] {stringType}, null); - - addMethod("GeoPoint", "getLat", null, false, doubleType, new Type[] {}, null, null); - addMethod("GeoPoint", "getLon", null, false, doubleType, new Type[] {}, null, null); - addMethod("Strings", "getValue", null, false, stringType, new Type[] {}, null, null); - addMethod("Strings", "getValues", null, false, slistType, new Type[] {}, null, null); - addMethod("Longs", "getValue", null, false, longType, new Type[] {}, null, null); - addMethod("Longs", "getValues", null, false, olistType, new Type[] {}, null, null); - // TODO: add better date support for Longs here? (carefully?) - addMethod("Doubles", "getValue", null, false, doubleType, new Type[] {}, null, null); - addMethod("Doubles", "getValues", null, false, olistType, new Type[] {}, null, null); - addMethod("GeoPoints", "getValue", null, false, geoPointType, new Type[] {}, null, null); - addMethod("GeoPoints", "getValues", null, false, olistType, new Type[] {}, null, null); - addMethod("GeoPoints", "getLat", null, false, doubleType, new Type[] {}, null, null); - addMethod("GeoPoints", "getLon", null, false, doubleType, new Type[] {}, null, null); - addMethod("GeoPoints", "getLats", null, false, getType(doubleType.struct, 1), new Type[] {}, null, null); - addMethod("GeoPoints", "getLons", null, false, getType(doubleType.struct, 1), new Type[] {}, null, null); - // geo distance functions... so many... - addMethod("GeoPoints", "factorDistance", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "factorDistanceWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "factorDistance02", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "factorDistance13", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistance", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceInKm", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceInKmWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceInMiles", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceInMilesWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distance", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceInKm", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceInKmWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceInMiles", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceInMilesWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "geohashDistance", null, false, doubleType, - new Type[] { stringType }, null, null); - addMethod("GeoPoints", "geohashDistanceInKm", null, false, doubleType, - new Type[] { stringType }, null, null); - addMethod("GeoPoints", "geohashDistanceInMiles", null, false, doubleType, - new Type[] { stringType }, null, null); - - // currently FeatureTest exposes overloaded constructor, field load store, and overloaded static methods - addConstructor("FeatureTest", "new", new Type[] {}, null); - addConstructor("FeatureTest", "new", new Type[] {intType, intType}, null); - addMethod("FeatureTest", "getX", null, false, intType, new Type[] {}, null, null); - addMethod("FeatureTest", "getY", null, false, intType, new Type[] {}, null, null); - addMethod("FeatureTest", "setX", null, false, voidType, new Type[] {intType}, null, null); - addMethod("FeatureTest", "setY", null, false, voidType, new Type[] {intType}, null, null); - addMethod("FeatureTest", "overloadedStatic", null, true, booleanType, new Type[] {}, null, null); - addMethod("FeatureTest", "overloadedStatic", null, true, booleanType, new Type[] {booleanType}, null, null); - } - - private void copyStructs() { - copyStruct("Void", "Object"); - copyStruct("Boolean", "Object"); - copyStruct("Byte", "Number", "Object"); - copyStruct("Short", "Number", "Object"); - copyStruct("Character", "Object"); - copyStruct("Integer", "Number", "Object"); - copyStruct("Long", "Number", "Object"); - copyStruct("Float", "Number", "Object"); - copyStruct("Double", "Number", "Object"); - - copyStruct("Number", "Object"); - copyStruct("CharSequence", "Object"); - copyStruct("String", "CharSequence", "Object"); - - copyStruct("List", "Collection", "Object"); - copyStruct("ArrayList", "List", "Collection", "Object"); - copyStruct("List", "Collection", "Object"); - copyStruct("ArrayList", "List", "Collection", "Object"); - copyStruct("List", "Collection", "Object"); - copyStruct("ArrayList", "List", "Collection", "Object"); - - copyStruct("Set", "Collection", "Object"); - copyStruct("HashSet", "Set", "Collection", "Object"); - copyStruct("Set", "Collection", "Object"); - copyStruct("HashSet", "Set", "Collection", "Object"); - copyStruct("Set", "Collection", "Object"); - copyStruct("HashSet", "Set", "Collection", "Object"); - - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - - copyStruct("Executable", "Object"); - - copyStruct("Exception", "Object"); - copyStruct("ArithmeticException", "Exception", "Object"); - copyStruct("IllegalArgumentException", "Exception", "Object"); - copyStruct("IllegalStateException", "Exception", "Object"); - copyStruct("NumberFormatException", "Exception", "Object"); - - copyStruct("GeoPoint", "Object"); - copyStruct("Strings", "List", "Collection", "Object"); - copyStruct("Longs", "List", "Collection", "Object"); - copyStruct("Doubles", "List", "Collection", "Object"); - copyStruct("GeoPoints", "List", "Collection", "Object"); - - copyStruct("FeatureTest", "Object"); - } - - private void addTransforms() { - addTransform(booleanType, objectType, "Boolean", "valueOf", true, false); - addTransform(booleanType, defType, "Boolean", "valueOf", true, false); - addTransform(booleanType, booleanobjType, "Boolean", "valueOf", true, false); - - addTransform(byteType, shortType, false); - addTransform(byteType, charType, true); - addTransform(byteType, intType, false); - addTransform(byteType, longType, false); - addTransform(byteType, floatType, false); - addTransform(byteType, doubleType, false); - addTransform(byteType, objectType, "Byte", "valueOf", true, false); - addTransform(byteType, defType, "Byte", "valueOf", true, false); - addTransform(byteType, numberType, "Byte", "valueOf", true, false); - addTransform(byteType, byteobjType, "Byte", "valueOf", true, false); - addTransform(byteType, shortobjType, "Utility", "byteToShort", true, false); - addTransform(byteType, charobjType, "Utility", "byteToCharacter", true, true); - addTransform(byteType, intobjType, "Utility", "byteToInteger", true, false); - addTransform(byteType, longobjType, "Utility", "byteToLong", true, false); - addTransform(byteType, floatobjType, "Utility", "byteToFloat", true, false); - addTransform(byteType, doubleobjType, "Utility", "byteToDouble", true, false); - - addTransform(shortType, byteType, true); - addTransform(shortType, charType, true); - addTransform(shortType, intType, false); - addTransform(shortType, longType, false); - addTransform(shortType, floatType, false); - addTransform(shortType, doubleType, false); - addTransform(shortType, objectType, "Short", "valueOf", true, false); - addTransform(shortType, defType, "Short", "valueOf", true, false); - addTransform(shortType, numberType, "Short", "valueOf", true, false); - addTransform(shortType, byteobjType, "Utility", "shortToByte", true, true); - addTransform(shortType, shortobjType, "Short", "valueOf", true, false); - addTransform(shortType, charobjType, "Utility", "shortToCharacter", true, true); - addTransform(shortType, intobjType, "Utility", "shortToInteger", true, false); - addTransform(shortType, longobjType, "Utility", "shortToLong", true, false); - addTransform(shortType, floatobjType, "Utility", "shortToFloat", true, false); - addTransform(shortType, doubleobjType, "Utility", "shortToDouble", true, false); - - addTransform(charType, byteType, true); - addTransform(charType, shortType, true); - addTransform(charType, intType, false); - addTransform(charType, longType, false); - addTransform(charType, floatType, false); - addTransform(charType, doubleType, false); - addTransform(charType, objectType, "Character", "valueOf", true, false); - addTransform(charType, defType, "Character", "valueOf", true, false); - addTransform(charType, numberType, "Utility", "charToInteger", true, false); - addTransform(charType, byteobjType, "Utility", "charToByte", true, true); - addTransform(charType, shortobjType, "Utility", "charToShort", true, true); - addTransform(charType, charobjType, "Character", "valueOf", true, false); - addTransform(charType, intobjType, "Utility", "charToInteger", true, false); - addTransform(charType, longobjType, "Utility", "charToLong", true, false); - addTransform(charType, floatobjType, "Utility", "charToFloat", true, false); - addTransform(charType, doubleobjType, "Utility", "charToDouble", true, false); - addTransform(charType, stringType, "Utility", "charToString", true, true); - - addTransform(intType, byteType, true); - addTransform(intType, shortType, true); - addTransform(intType, charType, true); - addTransform(intType, longType, false); - addTransform(intType, floatType, false); - addTransform(intType, doubleType, false); - addTransform(intType, objectType, "Integer", "valueOf", true, false); - addTransform(intType, defType, "Integer", "valueOf", true, false); - addTransform(intType, numberType, "Integer", "valueOf", true, false); - addTransform(intType, byteobjType, "Utility", "intToByte", true, true); - addTransform(intType, shortobjType, "Utility", "intToShort", true, true); - addTransform(intType, charobjType, "Utility", "intToCharacter", true, true); - addTransform(intType, intobjType, "Integer", "valueOf", true, false); - addTransform(intType, longobjType, "Utility", "intToLong", true, false); - addTransform(intType, floatobjType, "Utility", "intToFloat", true, false); - addTransform(intType, doubleobjType, "Utility", "intToDouble", true, false); - - addTransform(longType, byteType, true); - addTransform(longType, shortType, true); - addTransform(longType, charType, true); - addTransform(longType, intType, false); - addTransform(longType, floatType, false); - addTransform(longType, doubleType, false); - addTransform(longType, objectType, "Long", "valueOf", true, false); - addTransform(longType, defType, "Long", "valueOf", true, false); - addTransform(longType, numberType, "Long", "valueOf", true, false); - addTransform(longType, byteobjType, "Utility", "longToByte", true, true); - addTransform(longType, shortobjType, "Utility", "longToShort", true, true); - addTransform(longType, charobjType, "Utility", "longToCharacter", true, true); - addTransform(longType, intobjType, "Utility", "longToInteger", true, true); - addTransform(longType, longobjType, "Long", "valueOf", true, false); - addTransform(longType, floatobjType, "Utility", "longToFloat", true, false); - addTransform(longType, doubleobjType, "Utility", "longToDouble", true, false); - - addTransform(floatType, byteType, true); - addTransform(floatType, shortType, true); - addTransform(floatType, charType, true); - addTransform(floatType, intType, true); - addTransform(floatType, longType, false); - addTransform(floatType, doubleType, false); - addTransform(floatType, objectType, "Float", "valueOf", true, false); - addTransform(floatType, defType, "Float", "valueOf", true, false); - addTransform(floatType, numberType, "Float", "valueOf", true, false); - addTransform(floatType, byteobjType, "Utility", "floatToByte", true, true); - addTransform(floatType, shortobjType, "Utility", "floatToShort", true, true); - addTransform(floatType, charobjType, "Utility", "floatToCharacter", true, true); - addTransform(floatType, intobjType, "Utility", "floatToInteger", true, true); - addTransform(floatType, longobjType, "Utility", "floatToLong", true, true); - addTransform(floatType, floatobjType, "Float", "valueOf", true, false); - addTransform(floatType, doubleobjType, "Utility", "floatToDouble", true, false); - - addTransform(doubleType, byteType, true); - addTransform(doubleType, shortType, true); - addTransform(doubleType, charType, true); - addTransform(doubleType, intType, true); - addTransform(doubleType, longType, true); - addTransform(doubleType, floatType, false); - addTransform(doubleType, objectType, "Double", "valueOf", true, false); - addTransform(doubleType, defType, "Double", "valueOf", true, false); - addTransform(doubleType, numberType, "Double", "valueOf", true, false); - addTransform(doubleType, byteobjType, "Utility", "doubleToByte", true, true); - addTransform(doubleType, shortobjType, "Utility", "doubleToShort", true, true); - addTransform(doubleType, charobjType, "Utility", "doubleToCharacter", true, true); - addTransform(doubleType, intobjType, "Utility", "doubleToInteger", true, true); - addTransform(doubleType, longobjType, "Utility", "doubleToLong", true, true); - addTransform(doubleType, floatobjType, "Utility", "doubleToFloat", true, true); - addTransform(doubleType, doubleobjType, "Double", "valueOf", true, false); - - addTransform(objectType, booleanType, "Boolean", "booleanValue", false, true); - addTransform(objectType, byteType, "Number", "byteValue", false, true); - addTransform(objectType, shortType, "Number", "shortValue", false, true); - addTransform(objectType, charType, "Character", "charValue", false, true); - addTransform(objectType, intType, "Number", "intValue", false, true); - addTransform(objectType, longType, "Number", "longValue", false, true); - addTransform(objectType, floatType, "Number", "floatValue", false, true); - addTransform(objectType, doubleType, "Number", "doubleValue", false, true); - - addTransform(defType, booleanType, "Boolean", "booleanValue", false, false); - addTransform(defType, byteType, "Def", "DefTobyteImplicit", true, false); - addTransform(defType, shortType, "Def", "DefToshortImplicit", true, false); - addTransform(defType, charType, "Def", "DefTocharImplicit", true, false); - addTransform(defType, intType, "Def", "DefTointImplicit", true, false); - addTransform(defType, longType, "Def", "DefTolongImplicit", true, false); - addTransform(defType, floatType, "Def", "DefTofloatImplicit", true, false); - addTransform(defType, doubleType, "Def", "DefTodoubleImplicit", true, false); - addTransform(defType, byteobjType, "Def", "DefToByteImplicit", true, false); - addTransform(defType, shortobjType, "Def", "DefToShortImplicit", true, false); - addTransform(defType, charobjType, "Def", "DefToCharacterImplicit", true, false); - addTransform(defType, intobjType, "Def", "DefToIntegerImplicit", true, false); - addTransform(defType, longobjType, "Def", "DefToLongImplicit", true, false); - addTransform(defType, floatobjType, "Def", "DefToFloatImplicit", true, false); - addTransform(defType, doubleobjType, "Def", "DefToDoubleImplicit", true, false); - addTransform(defType, byteType, "Def", "DefTobyteExplicit", true, true); - addTransform(defType, shortType, "Def", "DefToshortExplicit", true, true); - addTransform(defType, charType, "Def", "DefTocharExplicit", true, true); - addTransform(defType, intType, "Def", "DefTointExplicit", true, true); - addTransform(defType, longType, "Def", "DefTolongExplicit", true, true); - addTransform(defType, floatType, "Def", "DefTofloatExplicit", true, true); - addTransform(defType, doubleType, "Def", "DefTodoubleExplicit", true, true); - addTransform(defType, byteobjType, "Def", "DefToByteExplicit", true, true); - addTransform(defType, shortobjType, "Def", "DefToShortExplicit", true, true); - addTransform(defType, charobjType, "Def", "DefToCharacterExplicit", true, true); - addTransform(defType, intobjType, "Def", "DefToIntegerExplicit", true, true); - addTransform(defType, longobjType, "Def", "DefToLongExplicit", true, true); - addTransform(defType, floatobjType, "Def", "DefToFloatExplicit", true, true); - addTransform(defType, doubleobjType, "Def", "DefToDoubleExplicit", true, true); - - addTransform(numberType, byteType, "Number", "byteValue", false, true); - addTransform(numberType, shortType, "Number", "shortValue", false, true); - addTransform(numberType, charType, "Utility", "NumberTochar", true, true); - addTransform(numberType, intType, "Number", "intValue", false, true); - addTransform(numberType, longType, "Number", "longValue", false, true); - addTransform(numberType, floatType, "Number", "floatValue", false, true); - addTransform(numberType, doubleType, "Number", "doubleValue", false, true); - addTransform(numberType, booleanobjType, "Utility", "NumberToBoolean", true, true); - addTransform(numberType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(numberType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(numberType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(numberType, intobjType, "Utility", "NumberToInteger", true, true); - addTransform(numberType, longobjType, "Utility", "NumberToLong", true, true); - addTransform(numberType, floatobjType, "Utility", "NumberToFloat", true, true); - addTransform(numberType, doubleobjType, "Utility", "NumberToDouble", true, true); - - addTransform(booleanobjType, booleanType, "Boolean", "booleanValue", false, false); - - addTransform(byteobjType, byteType, "Byte", "byteValue", false, false); - addTransform(byteobjType, shortType, "Byte", "shortValue", false, false); - addTransform(byteobjType, charType, "Utility", "ByteTochar", true, false); - addTransform(byteobjType, intType, "Byte", "intValue", false, false); - addTransform(byteobjType, longType, "Byte", "longValue", false, false); - addTransform(byteobjType, floatType, "Byte", "floatValue", false, false); - addTransform(byteobjType, doubleType, "Byte", "doubleValue", false, false); - addTransform(byteobjType, shortobjType, "Utility", "NumberToShort", true, false); - addTransform(byteobjType, charobjType, "Utility", "NumberToCharacter", true, false); - addTransform(byteobjType, intobjType, "Utility", "NumberToInteger", true, false); - addTransform(byteobjType, longobjType, "Utility", "NumberToLong", true, false); - addTransform(byteobjType, floatobjType, "Utility", "NumberToFloat", true, false); - addTransform(byteobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(shortobjType, byteType, "Short", "byteValue", false, true); - addTransform(shortobjType, shortType, "Short", "shortValue", false, true); - addTransform(shortobjType, charType, "Utility", "ShortTochar", true, false); - addTransform(shortobjType, intType, "Short", "intValue", false, false); - addTransform(shortobjType, longType, "Short", "longValue", false, false); - addTransform(shortobjType, floatType, "Short", "floatValue", false, false); - addTransform(shortobjType, doubleType, "Short", "doubleValue", false, false); - addTransform(shortobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(shortobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(shortobjType, intobjType, "Utility", "NumberToInteger", true, false); - addTransform(shortobjType, longobjType, "Utility", "NumberToLong", true, false); - addTransform(shortobjType, floatobjType, "Utility", "NumberToFloat", true, false); - addTransform(shortobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(charobjType, byteType, "Utility", "CharacterTobyte", true, true); - addTransform(charobjType, shortType, "Utility", "CharacterToshort", true, false); - addTransform(charobjType, charType, "Character", "charValue", false, true); - addTransform(charobjType, intType, "Utility", "CharacterToint", true, false); - addTransform(charobjType, longType, "Utility", "CharacterTolong", true, false); - addTransform(charobjType, floatType, "Utility", "CharacterTofloat", true, false); - addTransform(charobjType, doubleType, "Utility", "CharacterTodouble", true, false); - addTransform(charobjType, byteobjType, "Utility", "CharacterToByte", true, true); - addTransform(charobjType, shortobjType, "Utility", "CharacterToShort", true, true); - addTransform(charobjType, intobjType, "Utility", "CharacterToInteger", true, false); - addTransform(charobjType, longobjType, "Utility", "CharacterToLong", true, false); - addTransform(charobjType, floatobjType, "Utility", "CharacterToFloat", true, false); - addTransform(charobjType, doubleobjType, "Utility", "CharacterToDouble", true, false); - addTransform(charobjType, stringType, "Utility", "CharacterToString", true, true); - - addTransform(intobjType, byteType, "Integer", "byteValue", false, true); - addTransform(intobjType, shortType, "Integer", "shortValue", false, true); - addTransform(intobjType, charType, "Utility", "IntegerTochar", true, true); - addTransform(intobjType, intType, "Integer", "intValue", false, false); - addTransform(intobjType, longType, "Integer", "longValue", false, false); - addTransform(intobjType, floatType, "Integer", "floatValue", false, false); - addTransform(intobjType, doubleType, "Integer", "doubleValue", false, false); - addTransform(intobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(intobjType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(intobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(intobjType, longobjType, "Utility", "NumberToLong", true, false); - addTransform(intobjType, floatobjType, "Utility", "NumberToFloat", true, false); - addTransform(intobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(longobjType, byteType, "Long", "byteValue", false, true); - addTransform(longobjType, shortType, "Long", "shortValue", false, true); - addTransform(longobjType, charType, "Utility", "LongTochar", true, true); - addTransform(longobjType, intType, "Long", "intValue", false, true); - addTransform(longobjType, longType, "Long", "longValue", false, false); - addTransform(longobjType, floatType, "Long", "floatValue", false, false); - addTransform(longobjType, doubleType, "Long", "doubleValue", false, false); - addTransform(longobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(longobjType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(longobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(longobjType, intobjType, "Utility", "NumberToInteger", true, true); - addTransform(longobjType, floatobjType, "Utility", "NumberToFloat", true, false); - addTransform(longobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(floatobjType, byteType, "Float", "byteValue", false, true); - addTransform(floatobjType, shortType, "Float", "shortValue", false, true); - addTransform(floatobjType, charType, "Utility", "FloatTochar", true, true); - addTransform(floatobjType, intType, "Float", "intValue", false, true); - addTransform(floatobjType, longType, "Float", "longValue", false, true); - addTransform(floatobjType, floatType, "Float", "floatValue", false, false); - addTransform(floatobjType, doubleType, "Float", "doubleValue", false, false); - addTransform(floatobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(floatobjType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(floatobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(floatobjType, intobjType, "Utility", "NumberToInteger", true, true); - addTransform(floatobjType, longobjType, "Utility", "NumberToLong", true, true); - addTransform(floatobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(doubleobjType, byteType, "Double", "byteValue", false, true); - addTransform(doubleobjType, shortType, "Double", "shortValue", false, true); - addTransform(doubleobjType, charType, "Utility", "DoubleTochar", true, true); - addTransform(doubleobjType, intType, "Double", "intValue", false, true); - addTransform(doubleobjType, longType, "Double", "longValue", false, true); - addTransform(doubleobjType, floatType, "Double", "floatValue", false, true); - addTransform(doubleobjType, doubleType, "Double", "doubleValue", false, false); - addTransform(doubleobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(doubleobjType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(doubleobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(doubleobjType, intobjType, "Utility", "NumberToInteger", true, true); - addTransform(doubleobjType, longobjType, "Utility", "NumberToLong", true, true); - addTransform(doubleobjType, floatobjType, "Utility", "NumberToFloat", true, true); - - addTransform(stringType, charType, "Utility", "StringTochar", true, true); - addTransform(stringType, charobjType, "Utility", "StringToCharacter", true, true); - } - - private void addRuntimeClasses() { - addRuntimeClass(booleanType.struct); - addRuntimeClass(byteType.struct); - addRuntimeClass(shortType.struct); - addRuntimeClass(charType.struct); - addRuntimeClass(intType.struct); - addRuntimeClass(longType.struct); - addRuntimeClass(floatType.struct); - addRuntimeClass(doubleType.struct); - - addRuntimeClass(booleanobjType.struct); - addRuntimeClass(byteobjType.struct); - addRuntimeClass(shortobjType.struct); - addRuntimeClass(charobjType.struct); - addRuntimeClass(intobjType.struct); - addRuntimeClass(longobjType.struct); - addRuntimeClass(floatobjType.struct); - addRuntimeClass(doubleobjType.struct); - - addRuntimeClass(objectType.struct); - addRuntimeClass(numberType.struct); - addRuntimeClass(charseqType.struct); - addRuntimeClass(stringType.struct); - - addRuntimeClass(oitrType.struct); - addRuntimeClass(ocollectionType.struct); - addRuntimeClass(olistType.struct); - addRuntimeClass(oarraylistType.struct); - addRuntimeClass(osetType.struct); - addRuntimeClass(ohashsetType.struct); - addRuntimeClass(oomapType.struct); - addRuntimeClass(oohashmapType.struct); - - addRuntimeClass(exceptionType.struct); - - addRuntimeClass(geoPointType.struct); - addRuntimeClass(stringsType.struct); - addRuntimeClass(longsType.struct); - addRuntimeClass(doublesType.struct); - addRuntimeClass(geoPointsType.struct); - - addRuntimeClass(featureTestType.struct); + int currentLine = -1; + try { + try (InputStream stream = Definition.class.getResourceAsStream(DEFINITION_FILE); + LineNumberReader reader = new LineNumberReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) { + String line = null; + String currentClass = null; + while ((line = reader.readLine()) != null) { + currentLine = reader.getLineNumber(); + line = line.trim(); + if (line.length() == 0 || line.charAt(0) == '#') { + continue; + } else if (line.startsWith("class ")) { + assert currentClass == null; + currentClass = line.split("\u0020")[1]; + } else if (line.equals("}")) { + assert currentClass != null; + currentClass = null; + } else { + assert currentClass != null; + addSignature(currentClass, line); + } + } + } + } catch (Exception e) { + throw new RuntimeException("syntax error in definition line: " + currentLine, e); + } } private final void addStruct(final String name, final Class clazz) { @@ -1599,9 +526,10 @@ public final class Definition { final Struct struct = new Struct(name, clazz, org.objectweb.asm.Type.getType(clazz)); structsMap.put(name, struct); + simpleTypesMap.put(name, getTypeInternal(name)); } - private final void addConstructor(final String struct, final String name, final Type[] args, final Type[] genargs) { + private final void addConstructorInternal(final String struct, final String name, final Type[] args) { final Struct owner = structsMap.get(struct); if (owner == null) { @@ -1634,14 +562,6 @@ public final class Definition { final Class[] classes = new Class[args.length]; for (int count = 0; count < classes.length; ++count) { - if (genargs != null) { - if (!args[count].clazz.isAssignableFrom(genargs[count].clazz)) { - throw new ClassCastException("Generic argument [" + genargs[count].name + "]" + - " is not a sub class of [" + args[count].name + "] in the constructor" + - " [" + name + " ] from the struct [" + owner.name + "]."); - } - } - classes[count] = args[count].clazz; } @@ -1655,79 +575,96 @@ public final class Definition { } final org.objectweb.asm.commons.Method asm = org.objectweb.asm.commons.Method.getMethod(reflect); - final Constructor constructor = - new Constructor(name, owner, Arrays.asList(genargs != null ? genargs : args), asm, reflect); + final Constructor constructor = new Constructor(name, owner, Arrays.asList(args), asm, reflect); owner.constructors.put(methodKey, constructor); } - private final void addMethod(final String struct, final String name, final String alias, final boolean statik, - final Type rtn, final Type[] args, final Type genrtn, final Type[] genargs) { + /** + * Adds a new signature to the definition. + *

+ * Signatures have the following forms: + *

    + *
  • {@code void method(String,int)} + *
  • {@code boolean field} + *
  • {@code Class (String)} + *
+ * no spaces allowed. + */ + private final void addSignature(String className, String signature) { + String elements[] = signature.split("\u0020"); + if (elements.length != 2) { + throw new IllegalArgumentException("Malformed signature: " + signature); + } + // method or field type (e.g. return type) + Type rtn = getTypeInternal(elements[0]); + int parenIndex = elements[1].indexOf('('); + if (parenIndex != -1) { + // method or ctor + int parenEnd = elements[1].indexOf(')'); + final Type args[]; + if (parenEnd > parenIndex + 1) { + String arguments[] = elements[1].substring(parenIndex + 1, parenEnd).split(","); + args = new Type[arguments.length]; + for (int i = 0; i < arguments.length; i++) { + args[i] = getTypeInternal(arguments[i]); + } + } else { + args = new Type[0]; + } + String methodName = elements[1].substring(0, parenIndex); + if (methodName.equals("")) { + if (!elements[0].equals(className)) { + throw new IllegalArgumentException("Constructors must return their own type"); + } + addConstructorInternal(className, "new", args); + } else { + if (methodName.indexOf('/') >= 0) { + String nameAndAlias[] = methodName.split("/"); + if (nameAndAlias.length != 2) { + throw new IllegalArgumentException("Currently only two aliases are allowed!"); + } + addMethodInternal(className, nameAndAlias[0], nameAndAlias[1], rtn, args); + } else { + addMethodInternal(className, methodName, null, rtn, args); + } + } + } else { + // field + addFieldInternal(className, elements[1], null, rtn); + } + } + + private final void addMethodInternal(final String struct, final String name, final String alias, + final Type rtn, final Type[] args) { final Struct owner = structsMap.get(struct); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined" + - " for " + (statik ? "function" : "method") + " [" + name + "]."); + " for method [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { - throw new IllegalArgumentException("Invalid " + (statik ? "static method" : "method") + - " name [" + name + "] with the struct [" + owner.name + "]."); + throw new IllegalArgumentException("Invalid method name" + + " [" + name + "] with the struct [" + owner.name + "]."); } MethodKey methodKey = new MethodKey(name, args.length); if (owner.constructors.containsKey(methodKey)) { - throw new IllegalArgumentException("Constructors and " + (statik ? "static methods" : "methods") + + throw new IllegalArgumentException("Constructors and methods" + " may not have the same signature [" + methodKey + "] within the same struct" + " [" + owner.name + "]."); } - if (owner.staticMethods.containsKey(methodKey)) { - if (statik) { - throw new IllegalArgumentException( - "Duplicate static method signature [" + methodKey + "] found within the struct [" + owner.name + "]."); - } else { - throw new IllegalArgumentException("Static methods and methods may not have the same signature" + - " [" + methodKey + "] within the same struct [" + owner.name + "]."); - } - } - - if (owner.methods.containsKey(methodKey)) { - if (statik) { - throw new IllegalArgumentException("Static methods and methods may not have the same signature" + - " [" + methodKey + "] within the same struct [" + owner.name + "]."); - } else { - throw new IllegalArgumentException("Duplicate method signature [" + methodKey + "]" + - " found within the struct [" + owner.name + "]."); - } - } - - if (genrtn != null) { - if (!rtn.clazz.isAssignableFrom(genrtn.clazz)) { - throw new ClassCastException("Generic return [" + genrtn.clazz.getCanonicalName() + "]" + - " is not a sub class of [" + rtn.clazz.getCanonicalName() + "] in the method" + - " [" + name + " ] from the struct [" + owner.name + "]."); - } - } - - if (genargs != null && genargs.length != args.length) { - throw new IllegalArgumentException("Generic arguments arity [" + genargs.length + "] is not the same as " + - (statik ? "function" : "method") + " [" + name + "] arguments arity" + - " [" + args.length + "] within the struct [" + owner.name + "]."); + if (owner.staticMethods.containsKey(methodKey) || owner.methods.containsKey(methodKey)) { + throw new IllegalArgumentException( + "Duplicate method signature [" + methodKey + "] found within the struct [" + owner.name + "]."); } final Class[] classes = new Class[args.length]; for (int count = 0; count < classes.length; ++count) { - if (genargs != null) { - if (!args[count].clazz.isAssignableFrom(genargs[count].clazz)) { - throw new ClassCastException("Generic argument [" + genargs[count].name + "] is not a sub class" + - " of [" + args[count].name + "] in the " + (statik ? "function" : "method") + - " [" + name + " ] from the struct [" + owner.name + "]."); - } - } - classes[count] = args[count].clazz; } @@ -1736,15 +673,15 @@ public final class Definition { try { reflect = owner.clazz.getMethod(alias == null ? name : alias, classes); } catch (final NoSuchMethodException exception) { - throw new IllegalArgumentException((statik ? "Function" : "Method") + - " [" + (alias == null ? name : alias) + "] not found for class [" + owner.clazz.getName() + "]" + + throw new IllegalArgumentException("Method [" + (alias == null ? name : alias) + + "] not found for class [" + owner.clazz.getName() + "]" + " with arguments " + Arrays.toString(classes) + "."); } if (!reflect.getReturnType().equals(rtn.clazz)) { throw new IllegalArgumentException("Specified return type class [" + rtn.clazz + "]" + - " does not match the found return type class [" + reflect.getReturnType() + "] for the " + - (statik ? "function" : "method") + " [" + name + "]" + + " does not match the found return type class [" + reflect.getReturnType() + "] for the" + + " method [" + name + "]" + " within the struct [" + owner.name + "]."); } @@ -1760,67 +697,33 @@ public final class Definition { " with arguments " + Arrays.toString(classes) + "."); } - final Method method = new Method(name, owner, genrtn != null ? genrtn : rtn, - Arrays.asList(genargs != null ? genargs : args), asm, reflect, handle); + final Method method = new Method(name, owner, rtn, Arrays.asList(args), asm, reflect, handle); final int modifiers = reflect.getModifiers(); - if (statik) { - if (!java.lang.reflect.Modifier.isStatic(modifiers)) { - throw new IllegalArgumentException("Function [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to a static Java method."); - } - + if (java.lang.reflect.Modifier.isStatic(modifiers)) { owner.staticMethods.put(methodKey, method); } else { - if (java.lang.reflect.Modifier.isStatic(modifiers)) { - throw new IllegalArgumentException("Method [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to a non-static Java method."); - } - owner.methods.put(methodKey, method); } } - private final void addField(final String struct, final String name, final String alias, - final boolean statik, final Type type, final Type generic) { + private final void addFieldInternal(final String struct, final String name, final String alias, + final Type type) { final Struct owner = structsMap.get(struct); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for " + - (statik ? "static" : "member") + " [" + name + "]."); + " field [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { - throw new IllegalArgumentException("Invalid " + (statik ? "static" : "member") + + throw new IllegalArgumentException("Invalid field " + " name [" + name + "] with the struct [" + owner.name + "]."); } - if (owner.staticMembers.containsKey(name)) { - if (statik) { - throw new IllegalArgumentException("Duplicate static name [" + name + "]" + - " found within the struct [" + owner.name + "]."); - } else { - throw new IllegalArgumentException("Statics and members may not have the same name " + - "[" + name + "] within the same struct [" + owner.name + "]."); - } - } - - if (owner.members.containsKey(name)) { - if (statik) { - throw new IllegalArgumentException("Statics and members may not have the same name " + - "[" + name + "] within the same struct [" + owner.name + "]."); - } else { - throw new IllegalArgumentException("Duplicate member name [" + name + "]" + - " found within the struct [" + owner.name + "]."); - } - } - - if (generic != null) { - if (!type.clazz.isAssignableFrom(generic.clazz)) { - throw new ClassCastException("Generic type [" + generic.clazz.getCanonicalName() + "]" + - " is not a sub class of [" + type.clazz.getCanonicalName() + "] for the field" + - " [" + name + " ] from the struct [" + owner.name + "]."); - } + if (owner.staticMembers.containsKey(name) || owner.members.containsKey(name)) { + throw new IllegalArgumentException("Duplicate field name [" + name + "]" + + " found within the struct [" + owner.name + "]."); } java.lang.reflect.Field reflect; @@ -1832,11 +735,14 @@ public final class Definition { " not found for class [" + owner.clazz.getName() + "]."); } + final int modifiers = reflect.getModifiers(); + boolean isStatic = java.lang.reflect.Modifier.isStatic(modifiers); + MethodHandle getter = null; MethodHandle setter = null; try { - if (!statik) { + if (!isStatic) { getter = MethodHandles.publicLookup().unreflectGetter(reflect); setter = MethodHandles.publicLookup().unreflectSetter(reflect); } @@ -1845,42 +751,33 @@ public final class Definition { " not found for class [" + owner.clazz.getName() + "]."); } - final Field field = new Field(name, owner, generic == null ? type : generic, type, reflect, getter, setter); - final int modifiers = reflect.getModifiers(); - - if (statik) { - if (!java.lang.reflect.Modifier.isStatic(modifiers)) { - throw new IllegalArgumentException(); - } + final Field field = new Field(name, owner, type, reflect, getter, setter); + if (isStatic) { + // require that all static fields are static final if (!java.lang.reflect.Modifier.isFinal(modifiers)) { throw new IllegalArgumentException("Static [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to static Java field."); + " within the struct [" + owner.name + "] is not final."); } owner.staticMembers.put(alias == null ? name : alias, field); } else { - if (java.lang.reflect.Modifier.isStatic(modifiers)) { - throw new IllegalArgumentException("Member [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to non-static Java field."); - } - owner.members.put(alias == null ? name : alias, field); } } - private final void copyStruct(final String struct, final String... children) { + private final void copyStruct(final String struct, List children) { final Struct owner = structsMap.get(struct); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); } - for (int count = 0; count < children.length; ++count) { - final Struct child = structsMap.get(children[count]); + for (int count = 0; count < children.size(); ++count) { + final Struct child = structsMap.get(children.get(count)); if (struct == null) { - throw new IllegalArgumentException("Child struct [" + children[count] + "]" + + throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + " not defined for copy to owner struct [" + owner.name + "]."); } @@ -1944,143 +841,12 @@ public final class Definition { } owner.members.put(field.name, - new Field(field.name, owner, field.type, field.generic, reflect, getter, setter)); + new Field(field.name, owner, field.type, reflect, getter, setter)); } } } } - private final void addTransform(final Type from, final Type to, final boolean explicit) { - if (from.equals(to)) { - throw new IllegalArgumentException("Transform cannot" + - " have cast type from [" + from.name + "] be the same as cast type to [" + to.name + "]."); - } - - if (!from.sort.primitive || !to.sort.primitive) { - throw new IllegalArgumentException("Only transforms between two primitives may be a simple cast, but" + - "found [" + from.name + "] and [" + to.name + "]."); - } - - final Cast cast = new Cast(from, to, explicit); - - if (transformsMap.containsKey(cast)) { - throw new IllegalArgumentException("Transform with " + - " cast type from [" + from.name + "] to cast type to [" + to.name + "] already defined."); - } - - transformsMap.put(cast, cast); - } - - private final void addTransform(final Type from, final Type to, final String struct, - final String name, final boolean statik, final boolean explicit) { - final Struct owner = structsMap.get(struct); - - if (owner == null) { - throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for" + - " transform with cast type from [" + from.name + "] and cast type to [" + to.name + "]."); - } - - if (from.equals(to)) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "] cannot" + - " have cast type from [" + from.name + "] be the same as cast type to [" + to.name + "]."); - } - - final Cast cast = new Cast(from, to, explicit); - - if (transformsMap.containsKey(cast)) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] already defined."); - } - - final Cast transform; - - final Method method; - Type upcast = null; - Type downcast = null; - - // transforms are implicitly arity of 0, unless a static method where its 1 (receiver passed) - final MethodKey methodKey = new MethodKey(name, statik ? 1 : 0); - - if (statik) { - method = owner.staticMethods.get(methodKey); - - if (method == null) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using a function [" + name + "] that is not defined."); - } - - if (method.arguments.size() != 1) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using function [" + name + "] does not have a single type argument."); - } - - Type argument = method.arguments.get(0); - - if (!argument.clazz.isAssignableFrom(from.clazz)) { - if (from.clazz.isAssignableFrom(argument.clazz)) { - upcast = argument; - } else { - throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " function [" + name + "] cannot cast from type to the function input argument type."); - } - } - - final Type rtn = method.rtn; - - if (!to.clazz.isAssignableFrom(rtn.clazz)) { - if (rtn.clazz.isAssignableFrom(to.clazz)) { - downcast = to; - } else { - throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " function [" + name + "] cannot cast to type to the function return argument type."); - } - } - } else { - method = owner.methods.get(methodKey); - - if (method == null) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using a method [" + name + "] that is not defined."); - } - - if (!method.arguments.isEmpty()) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using method [" + name + "] does not have a single type argument."); - } - - if (!owner.clazz.isAssignableFrom(from.clazz)) { - if (from.clazz.isAssignableFrom(owner.clazz)) { - upcast = getType(owner.name); - } else { - throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " method [" + name + "] cannot cast from type to the method input argument type."); - } - } - - final Type rtn = method.rtn; - - if (!to.clazz.isAssignableFrom(rtn.clazz)) { - if (rtn.clazz.isAssignableFrom(to.clazz)) { - downcast = to; - } else { - throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "]" + - " using method [" + name + "] cannot cast to type to the method return argument type."); - } - } - } - - transform = new Transform(cast, method, upcast, downcast); - transformsMap.put(cast, transform); - } - /** * Precomputes a more efficient structure for dynamic method/field access. */ @@ -2132,7 +898,12 @@ public final class Definition { runtimeMap.put(struct.clazz, new RuntimeClass(methods, getters, setters)); } - public final Type getType(final String name) { + private Type getTypeInternal(final String name) { + // simple types (e.g. 0 array dimensions) are a simple hash lookup for speed + Type simple = simpleTypesMap.get(name); + if (simple != null) { + return simple; + } final int dimensions = getDimensions(name); final String structstr = dimensions == 0 ? name : name.substring(0, name.indexOf('[')); final Struct struct = structsMap.get(structstr); @@ -2141,10 +912,10 @@ public final class Definition { throw new IllegalArgumentException("The struct with name [" + name + "] has not been defined."); } - return getType(struct, dimensions); + return getTypeInternal(struct, dimensions); } - public final Type getType(final Struct struct, final int dimensions) { + private Type getTypeInternal(final Struct struct, final int dimensions) { String name = struct.name; org.objectweb.asm.Type type = struct.type; Class clazz = struct.clazz; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index fbb1d246a83..90c02b7e801 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -21,7 +21,6 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Transform; import org.elasticsearch.painless.Definition.Type; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Label; @@ -34,10 +33,7 @@ import java.util.ArrayList; import java.util.Deque; import java.util.List; -import static org.elasticsearch.painless.WriterConstants.ADDEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.ADDEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.ADDWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.ADDWOOVERLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.CHAR_TO_STRING; import static org.elasticsearch.painless.WriterConstants.DEF_ADD_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_AND_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_DIV_CALL; @@ -47,21 +43,27 @@ import static org.elasticsearch.painless.WriterConstants.DEF_OR_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_REM_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_RSH_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_SUB_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_BOOLEAN; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_BYTE_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_BYTE_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_CHAR_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_CHAR_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_DOUBLE_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_DOUBLE_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_FLOAT_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_FLOAT_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_INT_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_INT_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_LONG_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_LONG_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_IMPLICIT; import static org.elasticsearch.painless.WriterConstants.DEF_USH_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE; import static org.elasticsearch.painless.WriterConstants.DEF_XOR_CALL; -import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_INT; -import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_LONG; import static org.elasticsearch.painless.WriterConstants.INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; import static org.elasticsearch.painless.WriterConstants.MAX_INDY_STRING_CONCAT_ARGS; -import static org.elasticsearch.painless.WriterConstants.MULEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.MULEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.MULWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.MULWOOVERLOW_FLOAT; import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE; -import static org.elasticsearch.painless.WriterConstants.REMWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.REMWOOVERLOW_FLOAT; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_BOOLEAN; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_CHAR; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_DOUBLE; @@ -73,29 +75,9 @@ import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_ST import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_CONSTRUCTOR; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_TOSTRING; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_TYPE; +import static org.elasticsearch.painless.WriterConstants.STRING_TO_CHAR; import static org.elasticsearch.painless.WriterConstants.STRING_TYPE; -import static org.elasticsearch.painless.WriterConstants.SUBEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.SUBEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.SUBWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.SUBWOOVERLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_INT; -import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOFLOATWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOINTEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.UTILITY_TYPE; /** * Extension of {@link GeneratorAdapter} with some utility methods. @@ -132,49 +114,84 @@ public final class MethodWriter extends GeneratorAdapter { visitVarInsn(Opcodes.ILOAD, slot); push(0); ifICmp(GeneratorAdapter.GT, end); - throwException(PAINLESS_ERROR_TYPE, - "The maximum number of statements that can be executed in a loop has been reached."); + throwException(PAINLESS_ERROR_TYPE, "The maximum number of statements that can be executed in a loop has been reached."); mark(end); } } public void writeCast(final Cast cast) { - if (cast instanceof Transform) { - final Transform transform = (Transform)cast; - - if (transform.upcast != null) { - checkCast(transform.upcast.type); - } - - if (java.lang.reflect.Modifier.isStatic(transform.method.reflect.getModifiers())) { - invokeStatic(transform.method.owner.type, transform.method.method); - } else if (java.lang.reflect.Modifier.isInterface(transform.method.owner.clazz.getModifiers())) { - invokeInterface(transform.method.owner.type, transform.method.method); - } else { - invokeVirtual(transform.method.owner.type, transform.method.method); - } - - if (transform.downcast != null) { - checkCast(transform.downcast.type); - } - } else if (cast != null) { + if (cast != null) { final Type from = cast.from; final Type to = cast.to; - if (from.equals(to)) { - return; - } - - if (from.sort.numeric && from.sort.primitive && to.sort.numeric && to.sort.primitive) { - cast(from.type, to.type); - } else { - if (!to.clazz.isAssignableFrom(from.clazz)) { - checkCast(to.type); + if (from.sort == Sort.CHAR && to.sort == Sort.STRING) { + invokeStatic(UTILITY_TYPE, CHAR_TO_STRING); + } else if (from.sort == Sort.STRING && to.sort == Sort.CHAR) { + invokeStatic(UTILITY_TYPE, STRING_TO_CHAR); + } else if (cast.unboxFrom) { + if (from.sort == Sort.DEF) { + if (cast.explicit) { + if (to.sort == Sort.BOOL) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); + else if (to.sort == Sort.BYTE) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_EXPLICIT); + else if (to.sort == Sort.SHORT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_EXPLICIT); + else if (to.sort == Sort.CHAR) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_EXPLICIT); + else if (to.sort == Sort.INT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_EXPLICIT); + else if (to.sort == Sort.LONG) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_EXPLICIT); + else if (to.sort == Sort.FLOAT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_EXPLICIT); + else if (to.sort == Sort.DOUBLE) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_EXPLICIT); + else throw new IllegalStateException("Illegal tree structure."); + } else { + if (to.sort == Sort.BOOL) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); + else if (to.sort == Sort.BYTE) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_IMPLICIT); + else if (to.sort == Sort.SHORT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_IMPLICIT); + else if (to.sort == Sort.CHAR) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_IMPLICIT); + else if (to.sort == Sort.INT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_IMPLICIT); + else if (to.sort == Sort.LONG) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_IMPLICIT); + else if (to.sort == Sort.FLOAT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_IMPLICIT); + else if (to.sort == Sort.DOUBLE) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_IMPLICIT); + else throw new IllegalStateException("Illegal tree structure."); + } + } else { + unbox(from.type); + writeCast(from, to); } + } else if (cast.unboxTo) { + writeCast(from, to); + unbox(to.type); + } else if (cast.boxFrom) { + box(from.type); + writeCast(from, to); + } else if (cast.boxTo) { + writeCast(from, to); + box(to.type); + } else { + writeCast(from, to); } } } + private void writeCast(final Type from, final Type to) { + if (from.equals(to)) { + return; + } + + if (from.sort.numeric && from.sort.primitive && to.sort.numeric && to.sort.primitive) { + cast(from.type, to.type); + } else { + if (!to.clazz.isAssignableFrom(from.clazz)) { + checkCast(to.type); + } + } + } + + /** + * Proxy the box method to use valueOf instead to ensure that the modern boxing methods are used. + */ + @Override + public void box(org.objectweb.asm.Type type) { + valueOf(type); + } + public void writeBranch(final Label tru, final Label fals) { if (tru != null) { visitJumpInsn(Opcodes.IFNE, tru); @@ -182,7 +199,7 @@ public final class MethodWriter extends GeneratorAdapter { visitJumpInsn(Opcodes.IFEQ, fals); } } - + public void writeNewStrings() { if (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE != null) { // Java 9+: we just push our argument collector onto deque @@ -236,231 +253,51 @@ public final class MethodWriter extends GeneratorAdapter { } } - public void writeBinaryInstruction(final CompilerSettings settings, final Definition definition, - final String location, - final Type type, final Operation operation) { + public void writeBinaryInstruction(final String location, final Type type, final Operation operation) { final Sort sort = type.sort; - boolean exact = !settings.getNumericOverflow() && - ((sort == Sort.INT || sort == Sort.LONG) && - (operation == Operation.MUL || operation == Operation.DIV || - operation == Operation.ADD || operation == Operation.SUB) || - (sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (operation == Operation.MUL || operation == Operation.DIV || operation == Operation.REM || - operation == Operation.ADD || operation == Operation.SUB)); - if (exact) { - switch (sort) { - case INT: - switch (operation) { - case MUL: invokeStatic(definition.mathType.type, MULEXACT_INT); break; - case DIV: invokeStatic(definition.utilityType.type, DIVWOOVERLOW_INT); break; - case ADD: invokeStatic(definition.mathType.type, ADDEXACT_INT); break; - case SUB: invokeStatic(definition.mathType.type, SUBEXACT_INT); break; - } + if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && + (operation == Operation.LSH || operation == Operation.USH || + operation == Operation.RSH || operation == Operation.BWAND || + operation == Operation.XOR || operation == Operation.BWOR)) { + throw new IllegalStateException("Error " + location + ": Illegal tree structure."); + } - break; - case LONG: - switch (operation) { - case MUL: invokeStatic(definition.mathType.type, MULEXACT_LONG); break; - case DIV: invokeStatic(definition.utilityType.type, DIVWOOVERLOW_LONG); break; - case ADD: invokeStatic(definition.mathType.type, ADDEXACT_LONG); break; - case SUB: invokeStatic(definition.mathType.type, SUBEXACT_LONG); break; - } - - break; - case FLOAT: - switch (operation) { - case MUL: invokeStatic(definition.utilityType.type, MULWOOVERLOW_FLOAT); break; - case DIV: invokeStatic(definition.utilityType.type, DIVWOOVERLOW_FLOAT); break; - case REM: invokeStatic(definition.utilityType.type, REMWOOVERLOW_FLOAT); break; - case ADD: invokeStatic(definition.utilityType.type, ADDWOOVERLOW_FLOAT); break; - case SUB: invokeStatic(definition.utilityType.type, SUBWOOVERLOW_FLOAT); break; - default: - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } - - break; - case DOUBLE: - switch (operation) { - case MUL: invokeStatic(definition.utilityType.type, MULWOOVERLOW_DOUBLE); break; - case DIV: invokeStatic(definition.utilityType.type, DIVWOOVERLOW_DOUBLE); break; - case REM: invokeStatic(definition.utilityType.type, REMWOOVERLOW_DOUBLE); break; - case ADD: invokeStatic(definition.utilityType.type, ADDWOOVERLOW_DOUBLE); break; - case SUB: invokeStatic(definition.utilityType.type, SUBWOOVERLOW_DOUBLE); break; - default: - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } - - break; + if (sort == Sort.DEF) { + switch (operation) { + case MUL: invokeStatic(DEF_UTIL_TYPE, DEF_MUL_CALL); break; + case DIV: invokeStatic(DEF_UTIL_TYPE, DEF_DIV_CALL); break; + case REM: invokeStatic(DEF_UTIL_TYPE, DEF_REM_CALL); break; + case ADD: invokeStatic(DEF_UTIL_TYPE, DEF_ADD_CALL); break; + case SUB: invokeStatic(DEF_UTIL_TYPE, DEF_SUB_CALL); break; + case LSH: invokeStatic(DEF_UTIL_TYPE, DEF_LSH_CALL); break; + case USH: invokeStatic(DEF_UTIL_TYPE, DEF_RSH_CALL); break; + case RSH: invokeStatic(DEF_UTIL_TYPE, DEF_USH_CALL); break; + case BWAND: invokeStatic(DEF_UTIL_TYPE, DEF_AND_CALL); break; + case XOR: invokeStatic(DEF_UTIL_TYPE, DEF_XOR_CALL); break; + case BWOR: invokeStatic(DEF_UTIL_TYPE, DEF_OR_CALL); break; default: throw new IllegalStateException("Error " + location + ": Illegal tree structure."); } } else { - if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (operation == Operation.LSH || operation == Operation.USH || - operation == Operation.RSH || operation == Operation.BWAND || - operation == Operation.XOR || operation == Operation.BWOR)) { - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } - - if (sort == Sort.DEF) { - switch (operation) { - case MUL: invokeStatic(definition.defobjType.type, DEF_MUL_CALL); break; - case DIV: invokeStatic(definition.defobjType.type, DEF_DIV_CALL); break; - case REM: invokeStatic(definition.defobjType.type, DEF_REM_CALL); break; - case ADD: invokeStatic(definition.defobjType.type, DEF_ADD_CALL); break; - case SUB: invokeStatic(definition.defobjType.type, DEF_SUB_CALL); break; - case LSH: invokeStatic(definition.defobjType.type, DEF_LSH_CALL); break; - case USH: invokeStatic(definition.defobjType.type, DEF_RSH_CALL); break; - case RSH: invokeStatic(definition.defobjType.type, DEF_USH_CALL); break; - case BWAND: invokeStatic(definition.defobjType.type, DEF_AND_CALL); break; - case XOR: invokeStatic(definition.defobjType.type, DEF_XOR_CALL); break; - case BWOR: invokeStatic(definition.defobjType.type, DEF_OR_CALL); break; - default: - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } - } else { - switch (operation) { - case MUL: math(GeneratorAdapter.MUL, type.type); break; - case DIV: math(GeneratorAdapter.DIV, type.type); break; - case REM: math(GeneratorAdapter.REM, type.type); break; - case ADD: math(GeneratorAdapter.ADD, type.type); break; - case SUB: math(GeneratorAdapter.SUB, type.type); break; - case LSH: math(GeneratorAdapter.SHL, type.type); break; - case USH: math(GeneratorAdapter.USHR, type.type); break; - case RSH: math(GeneratorAdapter.SHR, type.type); break; - case BWAND: math(GeneratorAdapter.AND, type.type); break; - case XOR: math(GeneratorAdapter.XOR, type.type); break; - case BWOR: math(GeneratorAdapter.OR, type.type); break; - default: - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } + switch (operation) { + case MUL: math(GeneratorAdapter.MUL, type.type); break; + case DIV: math(GeneratorAdapter.DIV, type.type); break; + case REM: math(GeneratorAdapter.REM, type.type); break; + case ADD: math(GeneratorAdapter.ADD, type.type); break; + case SUB: math(GeneratorAdapter.SUB, type.type); break; + case LSH: math(GeneratorAdapter.SHL, type.type); break; + case USH: math(GeneratorAdapter.USHR, type.type); break; + case RSH: math(GeneratorAdapter.SHR, type.type); break; + case BWAND: math(GeneratorAdapter.AND, type.type); break; + case XOR: math(GeneratorAdapter.XOR, type.type); break; + case BWOR: math(GeneratorAdapter.OR, type.type); break; + default: + throw new IllegalStateException("Error " + location + ": Illegal tree structure."); } } } - /** - * Called for any compound assignment (including increment/decrement instructions). - * We have to be stricter than writeBinary and do overflow checks against the original type's size - * instead of the promoted type's size, since the result will be implicitly cast back. - * - * @return This will be true if an instruction is written, false otherwise. - */ - public boolean writeExactInstruction( - final Definition definition, final Sort fsort, final Sort tsort) { - if (fsort == Sort.DOUBLE) { - if (tsort == Sort.FLOAT) { - invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.FLOAT_OBJ) { - invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - checkCast(definition.floatobjType.type); - } else if (tsort == Sort.LONG) { - invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.LONG_OBJ) { - invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - checkCast(definition.longobjType.type); - } else if (tsort == Sort.INT) { - invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.INT_OBJ) { - invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - checkCast(definition.intobjType.type); - } else if (tsort == Sort.CHAR) { - invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.CHAR_OBJ) { - invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - checkCast(definition.charobjType.type); - } else if (tsort == Sort.SHORT) { - invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.SHORT_OBJ) { - invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - checkCast(definition.shortobjType.type); - } else if (tsort == Sort.BYTE) { - invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.BYTE_OBJ) { - invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (fsort == Sort.FLOAT) { - if (tsort == Sort.LONG) { - invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - } else if (tsort == Sort.LONG_OBJ) { - invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - checkCast(definition.longobjType.type); - } else if (tsort == Sort.INT) { - invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - } else if (tsort == Sort.INT_OBJ) { - invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - checkCast(definition.intobjType.type); - } else if (tsort == Sort.CHAR) { - invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - } else if (tsort == Sort.CHAR_OBJ) { - invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - checkCast(definition.charobjType.type); - } else if (tsort == Sort.SHORT) { - invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - } else if (tsort == Sort.SHORT_OBJ) { - invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - checkCast(definition.shortobjType.type); - } else if (tsort == Sort.BYTE) { - invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - } else if (tsort == Sort.BYTE_OBJ) { - invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (fsort == Sort.LONG) { - if (tsort == Sort.INT) { - invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - } else if (tsort == Sort.INT_OBJ) { - invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - checkCast(definition.intobjType.type); - } else if (tsort == Sort.CHAR) { - invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - } else if (tsort == Sort.CHAR_OBJ) { - invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - checkCast(definition.charobjType.type); - } else if (tsort == Sort.SHORT) { - invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - } else if (tsort == Sort.SHORT_OBJ) { - invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - checkCast(definition.shortobjType.type); - } else if (tsort == Sort.BYTE) { - invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - } else if (tsort == Sort.BYTE_OBJ) { - invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (fsort == Sort.INT) { - if (tsort == Sort.CHAR) { - invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - } else if (tsort == Sort.CHAR_OBJ) { - invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - checkCast(definition.charobjType.type); - } else if (tsort == Sort.SHORT) { - invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - } else if (tsort == Sort.SHORT_OBJ) { - invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - checkCast(definition.shortobjType.type); - } else if (tsort == Sort.BYTE) { - invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - } else if (tsort == Sort.BYTE_OBJ) { - invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - checkCast(definition.byteobjType.type); - } else { - return false; - } - } else { - return false; - } - - return true; - } - public void writeDup(final int size, final int xsize) { if (size == 1) { if (xsize == 2) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index e5998948d62..67f889b7a72 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -19,9 +19,9 @@ package org.elasticsearch.painless; + import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; /** @@ -29,6 +29,11 @@ import org.elasticsearch.script.ScriptModule; */ public final class PainlessPlugin extends Plugin { + // force to pare our definition at startup (not on the user's first script) + static { + Definition.VOID_TYPE.hashCode(); + } + @Override public String name() { return "lang-painless"; @@ -41,6 +46,6 @@ public final class PainlessPlugin extends Plugin { public void onModule(final ScriptModule module) { module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration( - PainlessScriptEngineService.class, PainlessScriptEngineService.NAME, ScriptMode.ON)); + PainlessScriptEngineService.class, PainlessScriptEngineService.NAME, true)); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java index dafc6aaba8a..72a657cd7f0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java @@ -38,9 +38,7 @@ import java.security.AccessController; import java.security.Permissions; import java.security.PrivilegedAction; import java.security.ProtectionDomain; -import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; /** @@ -117,13 +115,7 @@ public final class PainlessScriptEngineService extends AbstractComponent impleme // Use custom settings specified by params. compilerSettings = new CompilerSettings(); Map copy = new HashMap<>(params); - String value = copy.remove(CompilerSettings.NUMERIC_OVERFLOW); - - if (value != null) { - compilerSettings.setNumericOverflow(Boolean.parseBoolean(value)); - } - - value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER); + String value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER); if (value != null) { compilerSettings.setMaxLoopCounter(Integer.parseInt(value)); @@ -212,7 +204,7 @@ public final class PainlessScriptEngineService extends AbstractComponent impleme * Action taken when the engine is closed. */ @Override - public void close() throws IOException { + public void close() { // Nothing to do. } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java index 32641649827..5ab3450db7e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java @@ -25,442 +25,10 @@ package org.elasticsearch.painless; */ public class Utility { - public static boolean NumberToboolean(final Number value) { - return value.longValue() != 0; - } - - public static char NumberTochar(final Number value) { - return (char)value.intValue(); - } - - public static Boolean NumberToBoolean(final Number value) { - return value.longValue() != 0; - } - - public static Byte NumberToByte(final Number value) { - return value == null ? null : value.byteValue(); - } - - public static Short NumberToShort(final Number value) { - return value == null ? null : value.shortValue(); - } - - public static Character NumberToCharacter(final Number value) { - return value == null ? null : (char)value.intValue(); - } - - public static Integer NumberToInteger(final Number value) { - return value == null ? null : value.intValue(); - } - - public static Long NumberToLong(final Number value) { - return value == null ? null : value.longValue(); - } - - public static Float NumberToFloat(final Number value) { - return value == null ? null : value.floatValue(); - } - - public static Double NumberToDouble(final Number value) { - return value == null ? null : value.doubleValue(); - } - - public static byte booleanTobyte(final boolean value) { - return (byte)(value ? 1 : 0); - } - - public static short booleanToshort(final boolean value) { - return (short)(value ? 1 : 0); - } - - public static char booleanTochar(final boolean value) { - return (char)(value ? 1 : 0); - } - - public static int booleanToint(final boolean value) { - return value ? 1 : 0; - } - - public static long booleanTolong(final boolean value) { - return value ? 1 : 0; - } - - public static float booleanTofloat(final boolean value) { - return value ? 1 : 0; - } - - public static double booleanTodouble(final boolean value) { - return value ? 1 : 0; - } - - public static Integer booleanToInteger(final boolean value) { - return value ? 1 : 0; - } - - public static byte BooleanTobyte(final Boolean value) { - return (byte)(value ? 1 : 0); - } - - public static short BooleanToshort(final Boolean value) { - return (short)(value ? 1 : 0); - } - - public static char BooleanTochar(final Boolean value) { - return (char)(value ? 1 : 0); - } - - public static int BooleanToint(final Boolean value) { - return value ? 1 : 0; - } - - public static long BooleanTolong(final Boolean value) { - return value ? 1 : 0; - } - - public static float BooleanTofloat(final Boolean value) { - return value ? 1 : 0; - } - - public static double BooleanTodouble(final Boolean value) { - return value ? 1 : 0; - } - - public static Byte BooleanToByte(final Boolean value) { - return value == null ? null : (byte)(value ? 1 : 0); - } - - public static Short BooleanToShort(final Boolean value) { - return value == null ? null : (short)(value ? 1 : 0); - } - - public static Character BooleanToCharacter(final Boolean value) { - return value == null ? null : (char)(value ? 1 : 0); - } - - public static Integer BooleanToInteger(final Boolean value) { - return value == null ? null : value ? 1 : 0; - } - - public static Long BooleanToLong(final Boolean value) { - return value == null ? null : value ? 1L : 0L; - } - - public static Float BooleanToFloat(final Boolean value) { - return value == null ? null : value ? 1F : 0F; - } - - public static Double BooleanToDouble(final Boolean value) { - return value == null ? null : value ? 1D : 0D; - } - - public static boolean byteToboolean(final byte value) { - return value != 0; - } - - public static Short byteToShort(final byte value) { - return (short)value; - } - - public static Character byteToCharacter(final byte value) { - return (char)value; - } - - public static Integer byteToInteger(final byte value) { - return (int)value; - } - - public static Long byteToLong(final byte value) { - return (long)value; - } - - public static Float byteToFloat(final byte value) { - return (float)value; - } - - public static Double byteToDouble(final byte value) { - return (double)value; - } - - public static boolean ByteToboolean(final Byte value) { - return value != 0; - } - - public static char ByteTochar(final Byte value) { - return (char)value.byteValue(); - } - - public static boolean shortToboolean(final short value) { - return value != 0; - } - - public static Byte shortToByte(final short value) { - return (byte)value; - } - - public static Character shortToCharacter(final short value) { - return (char)value; - } - - public static Integer shortToInteger(final short value) { - return (int)value; - } - - public static Long shortToLong(final short value) { - return (long)value; - } - - public static Float shortToFloat(final short value) { - return (float)value; - } - - public static Double shortToDouble(final short value) { - return (double)value; - } - - public static boolean ShortToboolean(final Short value) { - return value != 0; - } - - public static char ShortTochar(final Short value) { - return (char)value.shortValue(); - } - - public static boolean charToboolean(final char value) { - return value != 0; - } - - public static Byte charToByte(final char value) { - return (byte)value; - } - - public static Short charToShort(final char value) { - return (short)value; - } - - public static Integer charToInteger(final char value) { - return (int)value; - } - - public static Long charToLong(final char value) { - return (long)value; - } - - public static Float charToFloat(final char value) { - return (float)value; - } - - public static Double charToDouble(final char value) { - return (double)value; - } - public static String charToString(final char value) { return String.valueOf(value); } - public static boolean CharacterToboolean(final Character value) { - return value != 0; - } - - public static byte CharacterTobyte(final Character value) { - return (byte)value.charValue(); - } - - public static short CharacterToshort(final Character value) { - return (short)value.charValue(); - } - - public static int CharacterToint(final Character value) { - return value; - } - - public static long CharacterTolong(final Character value) { - return value; - } - - public static float CharacterTofloat(final Character value) { - return value; - } - - public static double CharacterTodouble(final Character value) { - return value; - } - - public static Boolean CharacterToBoolean(final Character value) { - return value == null ? null : value != 0; - } - - public static Byte CharacterToByte(final Character value) { - return value == null ? null : (byte)value.charValue(); - } - - public static Short CharacterToShort(final Character value) { - return value == null ? null : (short)value.charValue(); - } - - public static Integer CharacterToInteger(final Character value) { - return value == null ? null : (int)value; - } - - public static Long CharacterToLong(final Character value) { - return value == null ? null : (long)value; - } - - public static Float CharacterToFloat(final Character value) { - return value == null ? null : (float)value; - } - - public static Double CharacterToDouble(final Character value) { - return value == null ? null : (double)value; - } - - public static String CharacterToString(final Character value) { - return value == null ? null : value.toString(); - } - - public static boolean intToboolean(final int value) { - return value != 0; - } - - public static Byte intToByte(final int value) { - return (byte)value; - } - - public static Short intToShort(final int value) { - return (short)value; - } - - public static Character intToCharacter(final int value) { - return (char)value; - } - - public static Long intToLong(final int value) { - return (long)value; - } - - public static Float intToFloat(final int value) { - return (float)value; - } - - public static Double intToDouble(final int value) { - return (double)value; - } - - public static boolean IntegerToboolean(final Integer value) { - return value != 0; - } - - public static char IntegerTochar(final Integer value) { - return (char)value.intValue(); - } - - public static boolean longToboolean(final long value) { - return value != 0; - } - - public static Byte longToByte(final long value) { - return (byte)value; - } - - public static Short longToShort(final long value) { - return (short)value; - } - - public static Character longToCharacter(final long value) { - return (char)value; - } - - public static Integer longToInteger(final long value) { - return (int)value; - } - - public static Float longToFloat(final long value) { - return (float)value; - } - - public static Double longToDouble(final long value) { - return (double)value; - } - - public static boolean LongToboolean(final Long value) { - return value != 0; - } - - public static char LongTochar(final Long value) { - return (char)value.longValue(); - } - - public static boolean floatToboolean(final float value) { - return value != 0; - } - - public static Byte floatToByte(final float value) { - return (byte)value; - } - - public static Short floatToShort(final float value) { - return (short)value; - } - - public static Character floatToCharacter(final float value) { - return (char)value; - } - - public static Integer floatToInteger(final float value) { - return (int)value; - } - - public static Long floatToLong(final float value) { - return (long)value; - } - - public static Double floatToDouble(final float value) { - return (double)value; - } - - public static boolean FloatToboolean(final Float value) { - return value != 0; - } - - public static char FloatTochar(final Float value) { - return (char)value.floatValue(); - } - - public static boolean doubleToboolean(final double value) { - return value != 0; - } - - public static Byte doubleToByte(final double value) { - return (byte)value; - } - - public static Short doubleToShort(final double value) { - return (short)value; - } - - public static Character doubleToCharacter(final double value) { - return (char)value; - } - - public static Integer doubleToInteger(final double value) { - return (int)value; - } - - public static Long doubleToLong(final double value) { - return (long)value; - } - - public static Float doubleToFloat(final double value) { - return (float)value; - } - - public static boolean DoubleToboolean(final Double value) { - return value != 0; - } - - public static char DoubleTochar(final Double value) { - return (char)value.doubleValue(); - } - public static char StringTochar(final String value) { if (value.length() != 1) { throw new ClassCastException("Cannot cast [String] with length greater than one to [char]."); @@ -469,359 +37,6 @@ public class Utility { return value.charAt(0); } - public static Character StringToCharacter(final String value) { - if (value == null) { - return null; - } - - if (value.length() != 1) { - throw new ClassCastException("Cannot cast [String] with length greater than one to [Character]."); - } - - return value.charAt(0); - } - - // although divide by zero is guaranteed, the special overflow case is not caught. - // its not needed for remainder because it is not possible there. - // see https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.17.2 - - /** - * Integer divide without overflow - * @throws ArithmeticException on overflow or divide-by-zero - */ - public static int divideWithoutOverflow(int x, int y) { - if (x == Integer.MIN_VALUE && y == -1) { - throw new ArithmeticException("integer overflow"); - } - return x / y; - } - - /** - * Long divide without overflow - * @throws ArithmeticException on overflow or divide-by-zero - */ - public static long divideWithoutOverflow(long x, long y) { - if (x == Long.MIN_VALUE && y == -1L) { - throw new ArithmeticException("long overflow"); - } - return x / y; - } - - // byte, short, and char are promoted to int for normal operations, - // so the JDK exact methods are typically used, and the result has a wider range. - // but compound assignments and increment/decrement operators (e.g. byte b = Byte.MAX_VALUE; b++;) - // implicitly cast back to the original type: so these need to be checked against the original range. - - /** - * Like {@link Math#toIntExact(long)} but for byte range. - */ - public static byte toByteExact(int value) { - byte s = (byte) value; - if (s != value) { - throw new ArithmeticException("byte overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for byte range. - */ - public static byte toByteExact(long value) { - byte s = (byte) value; - if (s != value) { - throw new ArithmeticException("byte overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for byte range. - */ - public static byte toByteWithoutOverflow(float value) { - if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { - throw new ArithmeticException("byte overflow"); - } - return (byte)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for byte range. - */ - public static byte toByteWithoutOverflow(double value) { - if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { - throw new ArithmeticException("byte overflow"); - } - return (byte)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for short range. - */ - public static short toShortExact(int value) { - short s = (short) value; - if (s != value) { - throw new ArithmeticException("short overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for short range. - */ - public static short toShortExact(long value) { - short s = (short) value; - if (s != value) { - throw new ArithmeticException("short overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for short range. - */ - public static short toShortWithoutOverflow(float value) { - if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { - throw new ArithmeticException("short overflow"); - } - return (short)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for short range. - */ - public static short toShortExact(double value) { - if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { - throw new ArithmeticException("short overflow"); - } - return (short)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for char range. - */ - public static char toCharExact(int value) { - char s = (char) value; - if (s != value) { - throw new ArithmeticException("char overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for char range. - */ - public static char toCharExact(long value) { - char s = (char) value; - if (s != value) { - throw new ArithmeticException("char overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for char range. - */ - public static char toCharWithoutOverflow(float value) { - if (value < Character.MIN_VALUE || value > Character.MAX_VALUE) { - throw new ArithmeticException("char overflow"); - } - return (char)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for char range. - */ - public static char toCharWithoutOverflow(double value) { - if (value < Character.MIN_VALUE || value > Character.MAX_VALUE) { - throw new ArithmeticException("char overflow"); - } - return (char)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for int range. - */ - public static int toIntWithoutOverflow(float value) { - if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) { - throw new ArithmeticException("int overflow"); - } - return (int)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for int range. - */ - public static int toIntWithoutOverflow(double value) { - if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) { - throw new ArithmeticException("int overflow"); - } - return (int)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for long range. - */ - public static long toLongWithoutOverflow(float value) { - if (value < Long.MIN_VALUE || value > Long.MAX_VALUE) { - throw new ArithmeticException("long overflow"); - } - return (long)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for long range. - */ - public static float toLongWithoutOverflow(double value) { - if (value < Long.MIN_VALUE || value > Long.MAX_VALUE) { - throw new ArithmeticException("long overflow"); - } - return (long)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for float range. - */ - public static float toFloatWithoutOverflow(double value) { - if (value < Float.MIN_VALUE || value > Float.MAX_VALUE) { - throw new ArithmeticException("float overflow"); - } - return (float)value; - } - - /** - * Checks for overflow, result is infinite but operands are finite - * @throws ArithmeticException if overflow occurred - */ - private static float checkInfFloat(float x, float y, float z) { - if (Float.isInfinite(z)) { - if (Float.isFinite(x) && Float.isFinite(y)) { - throw new ArithmeticException("float overflow"); - } - } - return z; - } - - /** - * Checks for NaN, result is NaN but operands are finite - * @throws ArithmeticException if overflow occurred - */ - private static float checkNaNFloat(float x, float y, float z) { - if (Float.isNaN(z)) { - if (Float.isFinite(x) && Float.isFinite(y)) { - throw new ArithmeticException("NaN"); - } - } - return z; - } - - /** - * Checks for NaN, result is infinite but operands are finite - * @throws ArithmeticException if overflow occurred - */ - private static double checkInfDouble(double x, double y, double z) { - if (Double.isInfinite(z)) { - if (Double.isFinite(x) && Double.isFinite(y)) { - throw new ArithmeticException("double overflow"); - } - } - return z; - } - - /** - * Checks for NaN, result is NaN but operands are finite - * @throws ArithmeticException if overflow occurred - */ - private static double checkNaNDouble(double x, double y, double z) { - if (Double.isNaN(z)) { - if (Double.isFinite(x) && Double.isFinite(y)) { - throw new ArithmeticException("NaN"); - } - } - return z; - } - - /** - * Adds two floats but throws {@code ArithmeticException} - * if the result overflows. - */ - public static float addWithoutOverflow(float x, float y) { - return checkInfFloat(x, y, x + y); - } - - /** - * Adds two doubles but throws {@code ArithmeticException} - * if the result overflows. - */ - public static double addWithoutOverflow(double x, double y) { - return checkInfDouble(x, y, x + y); - } - - /** - * Subtracts two floats but throws {@code ArithmeticException} - * if the result overflows. - */ - public static float subtractWithoutOverflow(float x, float y) { - return checkInfFloat(x, y, x - y); - } - - /** - * Subtracts two doubles but throws {@code ArithmeticException} - * if the result overflows. - */ - public static double subtractWithoutOverflow(double x, double y) { - return checkInfDouble(x, y , x - y); - } - - /** - * Multiplies two floats but throws {@code ArithmeticException} - * if the result overflows. - */ - public static float multiplyWithoutOverflow(float x, float y) { - return checkInfFloat(x, y, x * y); - } - - /** - * Multiplies two doubles but throws {@code ArithmeticException} - * if the result overflows. - */ - public static double multiplyWithoutOverflow(double x, double y) { - return checkInfDouble(x, y, x * y); - } - - /** - * Divides two floats but throws {@code ArithmeticException} - * if the result overflows, or would create NaN from finite - * inputs ({@code x == 0, y == 0}) - */ - public static float divideWithoutOverflow(float x, float y) { - return checkNaNFloat(x, y, checkInfFloat(x, y, x / y)); - } - - /** - * Divides two doubles but throws {@code ArithmeticException} - * if the result overflows, or would create NaN from finite - * inputs ({@code x == 0, y == 0}) - */ - public static double divideWithoutOverflow(double x, double y) { - return checkNaNDouble(x, y, checkInfDouble(x, y, x / y)); - } - - /** - * Takes remainder two floats but throws {@code ArithmeticException} - * if the result would create NaN from finite inputs ({@code y == 0}) - */ - public static float remainderWithoutOverflow(float x, float y) { - return checkNaNFloat(x, y, x % y); - } - - /** - * Divides two doubles but throws {@code ArithmeticException} - * if the result would create NaN from finite inputs ({@code y == 0}) - */ - public static double remainderWithoutOverflow(double x, double y) { - return checkNaNDouble(x, y, x % y); - } - public static boolean checkEquals(final Object left, final Object right) { if (left != null) { return left.equals(right); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Variables.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Variables.java index adf930b017c..28a7416d800 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Variables.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Variables.java @@ -86,14 +86,12 @@ public final class Variables { } } - private final Definition definition; final Reserved reserved; private final Deque scopes = new ArrayDeque<>(); private final Deque variables = new ArrayDeque<>(); - public Variables(final CompilerSettings settings, final Definition definition, final Reserved reserved) { - this.definition = definition; + public Variables(final CompilerSettings settings, final Reserved reserved) { this.reserved = reserved; incrementScope(); @@ -101,35 +99,35 @@ public final class Variables { // Method variables. // This reference. Internal use only. - addVariable("[" + Reserved.THIS + "]" , definition.execType.name, Reserved.THIS , true, true); + addVariable("[" + Reserved.THIS + "]" , "Executable", Reserved.THIS , true, true); // Input map of variables passed to the script. TODO: Rename to 'params' since that will be its use. - addVariable("[" + Reserved.PARAMS + "]", definition.smapType.name, Reserved.PARAMS, true, true); + addVariable("[" + Reserved.PARAMS + "]", "Map", Reserved.PARAMS, true, true); // Scorer parameter passed to the script. Internal use only. - addVariable("[" + Reserved.SCORER + "]", definition.defType.name , Reserved.SCORER, true, true); + addVariable("[" + Reserved.SCORER + "]", "def", Reserved.SCORER, true, true); - // Doc parameter passed to the script. TODO: Currently working as a Map, we can do better? - addVariable("[" + Reserved.DOC + "]" , definition.smapType.name, Reserved.DOC , true, true); + // Doc parameter passed to the script. TODO: Currently working as a Map, we can do better? + addVariable("[" + Reserved.DOC + "]" , "Map", Reserved.DOC , true, true); // Aggregation _value parameter passed to the script. - addVariable("[" + Reserved.VALUE + "]" , definition.defType.name , Reserved.VALUE , true, true); + addVariable("[" + Reserved.VALUE + "]" , "def", Reserved.VALUE , true, true); // Shortcut variables. // Document's score as a read-only double. if (reserved.score) { - addVariable("[" + Reserved.SCORE + "]", definition.doubleType.name, Reserved.SCORE, true, true); + addVariable("[" + Reserved.SCORE + "]", "double", Reserved.SCORE, true, true); } // The ctx map set by executable scripts as a read-only map. if (reserved.ctx) { - addVariable("[" + Reserved.CTX + "]", definition.smapType.name, Reserved.CTX, true, true); + addVariable("[" + Reserved.CTX + "]", "Map", Reserved.CTX, true, true); } // Loop counter to catch infinite loops. Internal use only. if (reserved.loop && settings.getMaxLoopCounter() > 0) { - addVariable("[" + Reserved.LOOP + "]", definition.intType.name, Reserved.LOOP, true, true); + addVariable("[" + Reserved.LOOP + "]", "int", Reserved.LOOP, true, true); } } @@ -182,7 +180,7 @@ public final class Variables { final Type type; try { - type = definition.getType(typestr); + type = Definition.getType(typestr); } catch (final IllegalArgumentException exception) { throw new IllegalArgumentException("Error " + location + ": Not a type [" + typestr + "]."); } @@ -190,7 +188,7 @@ public final class Variables { boolean legal = !name.contains("<"); try { - definition.getType(name); + Definition.getType(name); legal = false; } catch (final IllegalArgumentException exception) { // Do nothing. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java index 449361867b9..6e2d0e1431b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java @@ -38,15 +38,14 @@ import static org.elasticsearch.painless.WriterConstants.MAP_TYPE; */ final class Writer { - static byte[] write(final CompilerSettings settings, final Definition definition, + static byte[] write(final CompilerSettings settings, String name, final String source, final Variables variables, final SSource root) { - final Writer writer = new Writer(settings, definition, name, source, variables, root); + final Writer writer = new Writer(settings, name, source, variables, root); return writer.getBytes(); } private final CompilerSettings settings; - private final Definition definition; private final String scriptName; private final String source; private final Variables variables; @@ -55,10 +54,9 @@ final class Writer { private final ClassWriter writer; private final MethodWriter adapter; - private Writer(final CompilerSettings settings, final Definition definition, + private Writer(final CompilerSettings settings, String name, final String source, final Variables variables, final SSource root) { this.settings = settings; - this.definition = definition; this.scriptName = name; this.source = source; this.variables = variables; @@ -117,7 +115,7 @@ final class Writer { // if we truncated, make it obvious if (limit != source.length()) { fileName.append(" ..."); - } + } fileName.append(" @ "); } else { // its a named script, just use the name @@ -177,7 +175,7 @@ final class Writer { adapter.visitVarInsn(Opcodes.ISTORE, loop.slot); } - root.write(settings, definition, adapter); + root.write(adapter); adapter.endMethod(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java index 6bdb9856114..410c06e6fd7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java @@ -27,7 +27,6 @@ import org.objectweb.asm.Type; import org.objectweb.asm.commons.Method; import java.lang.invoke.CallSite; -import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.util.Map; @@ -39,7 +38,7 @@ public final class WriterConstants { public final static String BASE_CLASS_NAME = Executable.class.getName(); public final static Type BASE_CLASS_TYPE = Type.getType(Executable.class); - + public final static String CLASS_NAME = BASE_CLASS_NAME + "$Script"; public final static Type CLASS_TYPE = Type.getObjectType(CLASS_NAME.replace('.', '/')); @@ -56,6 +55,10 @@ public final class WriterConstants { public final static Type MAP_TYPE = Type.getType(Map.class); public final static Method MAP_GET = getAsmMethod(Object.class, "get", Object.class); + public final static Type UTILITY_TYPE = Type.getType(Utility.class); + public final static Method STRING_TO_CHAR = getAsmMethod(char.class, "StringTochar", String.class); + public final static Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class); + /** dynamic callsite bootstrap signature */ public final static MethodType DEF_BOOTSTRAP_TYPE = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, int.class); @@ -63,19 +66,35 @@ public final class WriterConstants { new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(DefBootstrap.class), "bootstrap", DEF_BOOTSTRAP_TYPE.toMethodDescriptorString()); - public final static Method DEF_NOT_CALL = getAsmMethod(Object.class, "not", Object.class); - public final static Method DEF_NEG_CALL = getAsmMethod(Object.class, "neg", Object.class); - public final static Method DEF_MUL_CALL = getAsmMethod(Object.class, "mul", Object.class, Object.class); - public final static Method DEF_DIV_CALL = getAsmMethod(Object.class, "div", Object.class, Object.class); - public final static Method DEF_REM_CALL = getAsmMethod(Object.class, "rem", Object.class, Object.class); - public final static Method DEF_ADD_CALL = getAsmMethod(Object.class, "add", Object.class, Object.class); - public final static Method DEF_SUB_CALL = getAsmMethod(Object.class, "sub", Object.class, Object.class); - public final static Method DEF_LSH_CALL = getAsmMethod(Object.class, "lsh", Object.class, int.class); - public final static Method DEF_RSH_CALL = getAsmMethod(Object.class, "rsh", Object.class, int.class); - public final static Method DEF_USH_CALL = getAsmMethod(Object.class, "ush", Object.class, int.class); - public final static Method DEF_AND_CALL = getAsmMethod(Object.class, "and", Object.class, Object.class); - public final static Method DEF_XOR_CALL = getAsmMethod(Object.class, "xor", Object.class, Object.class); - public final static Method DEF_OR_CALL = getAsmMethod(Object.class, "or" , Object.class, Object.class); + public final static Type DEF_UTIL_TYPE = Type.getType(Def.class); + public final static Method DEF_TO_BOOLEAN = getAsmMethod(boolean.class, "DefToboolean" , Object.class); + public final static Method DEF_TO_BYTE_IMPLICIT = getAsmMethod(byte.class , "DefTobyteImplicit" , Object.class); + public final static Method DEF_TO_SHORT_IMPLICIT = getAsmMethod(short.class , "DefToshortImplicit" , Object.class); + public final static Method DEF_TO_CHAR_IMPLICIT = getAsmMethod(char.class , "DefTocharImplicit" , Object.class); + public final static Method DEF_TO_INT_IMPLICIT = getAsmMethod(int.class , "DefTointImplicit" , Object.class); + public final static Method DEF_TO_LONG_IMPLICIT = getAsmMethod(long.class , "DefTolongImplicit" , Object.class); + public final static Method DEF_TO_FLOAT_IMPLICIT = getAsmMethod(float.class , "DefTofloatImplicit" , Object.class); + public final static Method DEF_TO_DOUBLE_IMPLICIT = getAsmMethod(double.class , "DefTodoubleImplicit", Object.class); + public final static Method DEF_TO_BYTE_EXPLICIT = getAsmMethod(byte.class , "DefTobyteExplicit" , Object.class); + public final static Method DEF_TO_SHORT_EXPLICIT = getAsmMethod(short.class , "DefToshortExplicit" , Object.class); + public final static Method DEF_TO_CHAR_EXPLICIT = getAsmMethod(char.class , "DefTocharExplicit" , Object.class); + public final static Method DEF_TO_INT_EXPLICIT = getAsmMethod(int.class , "DefTointExplicit" , Object.class); + public final static Method DEF_TO_LONG_EXPLICIT = getAsmMethod(long.class , "DefTolongExplicit" , Object.class); + public final static Method DEF_TO_FLOAT_EXPLICIT = getAsmMethod(float.class , "DefTofloatExplicit" , Object.class); + public final static Method DEF_TO_DOUBLE_EXPLICIT = getAsmMethod(double.class , "DefTodoubleExplicit", Object.class); + public final static Method DEF_NOT_CALL = getAsmMethod(Object.class , "not", Object.class); + public final static Method DEF_NEG_CALL = getAsmMethod(Object.class , "neg", Object.class); + public final static Method DEF_MUL_CALL = getAsmMethod(Object.class , "mul", Object.class, Object.class); + public final static Method DEF_DIV_CALL = getAsmMethod(Object.class , "div", Object.class, Object.class); + public final static Method DEF_REM_CALL = getAsmMethod(Object.class , "rem", Object.class, Object.class); + public final static Method DEF_ADD_CALL = getAsmMethod(Object.class , "add", Object.class, Object.class); + public final static Method DEF_SUB_CALL = getAsmMethod(Object.class , "sub", Object.class, Object.class); + public final static Method DEF_LSH_CALL = getAsmMethod(Object.class , "lsh", Object.class, int.class); + public final static Method DEF_RSH_CALL = getAsmMethod(Object.class , "rsh", Object.class, int.class); + public final static Method DEF_USH_CALL = getAsmMethod(Object.class , "ush", Object.class, int.class); + public final static Method DEF_AND_CALL = getAsmMethod(Object.class , "and", Object.class, Object.class); + public final static Method DEF_XOR_CALL = getAsmMethod(Object.class , "xor", Object.class, Object.class); + public final static Method DEF_OR_CALL = getAsmMethod(Object.class , "or" , Object.class, Object.class); public final static Method DEF_EQ_CALL = getAsmMethod(boolean.class, "eq" , Object.class, Object.class); public final static Method DEF_LT_CALL = getAsmMethod(boolean.class, "lt" , Object.class, Object.class); public final static Method DEF_LTE_CALL = getAsmMethod(boolean.class, "lte", Object.class, Object.class); @@ -99,9 +118,9 @@ public final class WriterConstants { } INDY_STRING_CONCAT_BOOTSTRAP_HANDLE = bs; } - + public final static int MAX_INDY_STRING_CONCAT_ARGS = 200; - + public final static Type STRING_TYPE = Type.getType(String.class); public final static Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class); @@ -116,59 +135,7 @@ public final class WriterConstants { public final static Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class); public final static Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); - public final static Method TOINTEXACT_LONG = getAsmMethod(int.class, "toIntExact", long.class); - public final static Method NEGATEEXACT_INT = getAsmMethod(int.class, "negateExact", int.class); - public final static Method NEGATEEXACT_LONG = getAsmMethod(long.class, "negateExact", long.class); - public final static Method MULEXACT_INT = getAsmMethod(int.class, "multiplyExact", int.class, int.class); - public final static Method MULEXACT_LONG = getAsmMethod(long.class, "multiplyExact", long.class, long.class); - public final static Method ADDEXACT_INT = getAsmMethod(int.class, "addExact", int.class, int.class); - public final static Method ADDEXACT_LONG = getAsmMethod(long.class, "addExact", long.class, long.class); - public final static Method SUBEXACT_INT = getAsmMethod(int.class, "subtractExact", int.class, int.class); - public final static Method SUBEXACT_LONG = getAsmMethod(long.class, "subtractExact", long.class, long.class); - - public final static Method CHECKEQUALS = - getAsmMethod(boolean.class, "checkEquals", Object.class, Object.class); - public final static Method TOBYTEEXACT_INT = getAsmMethod(byte.class, "toByteExact", int.class); - public final static Method TOBYTEEXACT_LONG = getAsmMethod(byte.class, "toByteExact", long.class); - public final static Method TOBYTEWOOVERFLOW_FLOAT = getAsmMethod(byte.class, "toByteWithoutOverflow", float.class); - public final static Method TOBYTEWOOVERFLOW_DOUBLE = getAsmMethod(byte.class, "toByteWithoutOverflow", double.class); - public final static Method TOSHORTEXACT_INT = getAsmMethod(short.class, "toShortExact", int.class); - public final static Method TOSHORTEXACT_LONG = getAsmMethod(short.class, "toShortExact", long.class); - public final static Method TOSHORTWOOVERFLOW_FLOAT = getAsmMethod(short.class, "toShortWithoutOverflow", float.class); - public final static Method TOSHORTWOOVERFLOW_DOUBLE = getAsmMethod(short.class, "toShortWihtoutOverflow", double.class); - public final static Method TOCHAREXACT_INT = getAsmMethod(char.class, "toCharExact", int.class); - public final static Method TOCHAREXACT_LONG = getAsmMethod(char.class, "toCharExact", long.class); - public final static Method TOCHARWOOVERFLOW_FLOAT = getAsmMethod(char.class, "toCharWithoutOverflow", float.class); - public final static Method TOCHARWOOVERFLOW_DOUBLE = getAsmMethod(char.class, "toCharWithoutOverflow", double.class); - public final static Method TOINTWOOVERFLOW_FLOAT = getAsmMethod(int.class, "toIntWithoutOverflow", float.class); - public final static Method TOINTWOOVERFLOW_DOUBLE = getAsmMethod(int.class, "toIntWithoutOverflow", double.class); - public final static Method TOLONGWOOVERFLOW_FLOAT = getAsmMethod(long.class, "toLongWithoutOverflow", float.class); - public final static Method TOLONGWOOVERFLOW_DOUBLE = getAsmMethod(long.class, "toLongWithoutOverflow", double.class); - public final static Method TOFLOATWOOVERFLOW_DOUBLE = getAsmMethod(float.class , "toFloatWihtoutOverflow", double.class); - public final static Method MULWOOVERLOW_FLOAT = - getAsmMethod(float.class, "multiplyWithoutOverflow", float.class, float.class); - public final static Method MULWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "multiplyWithoutOverflow", double.class, double.class); - public final static Method DIVWOOVERLOW_INT = - getAsmMethod(int.class, "divideWithoutOverflow", int.class, int.class); - public final static Method DIVWOOVERLOW_LONG = - getAsmMethod(long.class, "divideWithoutOverflow", long.class, long.class); - public final static Method DIVWOOVERLOW_FLOAT = - getAsmMethod(float.class, "divideWithoutOverflow", float.class, float.class); - public final static Method DIVWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "divideWithoutOverflow", double.class, double.class); - public final static Method REMWOOVERLOW_FLOAT = - getAsmMethod(float.class, "remainderWithoutOverflow", float.class, float.class); - public final static Method REMWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "remainderWithoutOverflow", double.class, double.class); - public final static Method ADDWOOVERLOW_FLOAT = - getAsmMethod(float.class, "addWithoutOverflow", float.class, float.class); - public final static Method ADDWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "addWithoutOverflow", double.class, double.class); - public final static Method SUBWOOVERLOW_FLOAT = - getAsmMethod(float.class, "subtractWithoutOverflow", float.class, float.class); - public final static Method SUBWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "subtractWithoutOverflow", double.class, double.class); + public final static Method CHECKEQUALS = getAsmMethod(boolean.class, "checkEquals", Object.class, Object.class); private static Method getAsmMethod(final Class rtype, final String name, final Class... ptypes) { return new Method(name, MethodType.methodType(rtype, ptypes).toMethodDescriptorString()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index 4f6e2f5e87c..9ecf6754248 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -22,6 +22,7 @@ package org.elasticsearch.painless.antlr; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.Variables.Reserved; import org.elasticsearch.painless.antlr.PainlessParser.AfterthoughtContext; @@ -121,15 +122,17 @@ import java.util.List; */ public final class Walker extends PainlessParserBaseVisitor { - public static SSource buildPainlessTree(final String source, final Reserved reserved) { - return new Walker(source, reserved).source; + public static SSource buildPainlessTree(String source, Reserved reserved, CompilerSettings settings) { + return new Walker(source, reserved, settings).source; } private final Reserved reserved; private final SSource source; + private final CompilerSettings settings; - private Walker(final String source, final Reserved reserved) { + private Walker(String source, Reserved reserved, CompilerSettings settings) { this.reserved = reserved; + this.settings = settings; this.source = (SSource)visit(buildAntlrTree(source)); } @@ -181,7 +184,7 @@ public final class Walker extends PainlessParserBaseVisitor { reserved.usesLoop(); - return new SWhile(line(ctx), location(ctx), condition, block); + return new SWhile(line(ctx), location(ctx), condition, block, settings.getMaxLoopCounter()); } @Override @@ -191,7 +194,7 @@ public final class Walker extends PainlessParserBaseVisitor { reserved.usesLoop(); - return new SDo(line(ctx), location(ctx), block, condition); + return new SDo(line(ctx), location(ctx), block, condition, settings.getMaxLoopCounter()); } @Override @@ -203,7 +206,7 @@ public final class Walker extends PainlessParserBaseVisitor { reserved.usesLoop(); - return new SFor(line(ctx), location(ctx), intializer, condition, afterthought, block); + return new SFor(line(ctx), location(ctx), intializer, condition, afterthought, block, settings.getMaxLoopCounter()); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index 528da4384c8..54eb8091e8e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.AnalyzerCaster; @@ -67,6 +65,12 @@ public abstract class AExpression extends ANode { */ protected boolean explicit = false; + /** + * Set to true if a cast is allowed to boxed/unboxed. This is used + * for method arguments because casting may be required. + */ + protected boolean internal = false; + /** * Set to the value of the constant this expression node represents if * and only if the node represents a constant. If this is not null @@ -101,27 +105,27 @@ public abstract class AExpression extends ANode { /** * Checks for errors and collects data for the writing phase. */ - abstract void analyze(final CompilerSettings settings, final Definition definition, final Variables variables); + abstract void analyze(Variables variables); /** * Writes ASM based on the data collected during the analysis phase. */ - abstract void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void write(MethodWriter adapter); /** * Inserts {@link ECast} nodes into the tree for implicit casts. Also replaces * nodes with the constant variable set to a non-null value with {@link EConstant}. * @return The new child node for the parent node calling this method. */ - AExpression cast(final CompilerSettings settings, final Definition definition, final Variables variables) { - final Cast cast = AnalyzerCaster.getLegalCast(definition, location, actual, expected, explicit); + AExpression cast(Variables variables) { + final Cast cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal); if (cast == null) { if (constant == null || this instanceof EConstant) { return this; } else { final EConstant econstant = new EConstant(line, location, constant); - econstant.analyze(settings, definition, variables); + econstant.analyze(variables); if (!expected.equals(econstant.actual)) { throw new IllegalStateException(error("Illegal tree structure.")); @@ -142,7 +146,7 @@ public abstract class AExpression extends ANode { constant = AnalyzerCaster.constCast(location, constant, cast); final EConstant econstant = new EConstant(line, location, constant); - econstant.analyze(settings, definition, variables); + econstant.analyze(variables); if (!expected.equals(econstant.actual)) { throw new IllegalStateException(error("Illegal tree structure.")); @@ -156,7 +160,7 @@ public abstract class AExpression extends ANode { return ecast; } else { final EConstant econstant = new EConstant(line, location, constant); - econstant.analyze(settings, definition, variables); + econstant.analyze(variables); if (!actual.equals(econstant.actual)) { throw new IllegalStateException(error("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java index ffbfff112b3..e134e7fbcb9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -75,7 +73,7 @@ public abstract class ALink extends ANode { */ String string = null; - ALink(final int line, final String location, final int size) { + ALink(int line, String location, int size) { super(line, location); this.size = size; @@ -87,27 +85,27 @@ public abstract class ALink extends ANode { * def or a shortcut is used. Otherwise, returns itself. This will be * updated into the {@link EChain} node's list of links. */ - abstract ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables); + abstract ALink analyze(Variables variables); /** * Write values before a load/store occurs such as an array index. */ - abstract void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void write(MethodWriter adapter); /** * Write a load for the specific link type. */ - abstract void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void load(MethodWriter adapter); /** * Write a store for the specific link type. */ - abstract void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void store(MethodWriter adapter); /** * Used to copy link data from one to another during analysis in the case of replacement. */ - final ALink copy(final ALink link) { + final ALink copy(ALink link) { load = link.load; store = link.store; statik = link.statik; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java index 569e8cfb03b..ebc4a166268 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; import org.elasticsearch.painless.MethodWriter; @@ -109,17 +107,17 @@ public abstract class AStatement extends ANode { */ Label brake = null; - AStatement(final int line, final String location) { + AStatement(int line, String location) { super(line, location); } /** * Checks for errors and collects data for the writing phase. */ - abstract void analyze(final CompilerSettings settings, final Definition definition, final Variables variables); + abstract void analyze(Variables variables); /** * Writes ASM based on the data collected during the analysis phase. */ - abstract void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void write(MethodWriter adapter); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 07fe1ff3447..ee990f60b61 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; @@ -39,7 +38,7 @@ public final class EBinary extends AExpression { boolean cat = false; - public EBinary(final int line, final String location, final Operation operation, final AExpression left, final AExpression right) { + public EBinary(int line, String location, Operation operation, AExpression left, AExpression right) { super(line, location); this.operation = operation; @@ -48,39 +47,39 @@ public final class EBinary extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (operation == Operation.MUL) { - analyzeMul(settings, definition, variables); + analyzeMul(variables); } else if (operation == Operation.DIV) { - analyzeDiv(settings, definition, variables); + analyzeDiv(variables); } else if (operation == Operation.REM) { - analyzeRem(settings, definition, variables); + analyzeRem(variables); } else if (operation == Operation.ADD) { - analyzeAdd(settings, definition, variables); + analyzeAdd(variables); } else if (operation == Operation.SUB) { - analyzeSub(settings, definition, variables); + analyzeSub(variables); } else if (operation == Operation.LSH) { - analyzeLSH(settings, definition, variables); + analyzeLSH(variables); } else if (operation == Operation.RSH) { - analyzeRSH(settings, definition, variables); + analyzeRSH(variables); } else if (operation == Operation.USH) { - analyzeUSH(settings, definition, variables); + analyzeUSH(variables); } else if (operation == Operation.BWAND) { - analyzeBWAnd(settings, definition, variables); + analyzeBWAnd(variables); } else if (operation == Operation.XOR) { - analyzeXor(settings, definition, variables); + analyzeXor(variables); } else if (operation == Operation.BWOR) { - analyzeBWOr(settings, definition, variables); + analyzeBWOr(variables); } else { throw new IllegalStateException(error("Illegal tree structure.")); } } - private void analyzeMul(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeMul(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply multiply [*] to types " + @@ -90,25 +89,20 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); final Sort sort = promote.sort; if (sort == Sort.INT) { - constant = overflow ? (int)left.constant * (int)right.constant : - Math.multiplyExact((int)left.constant, (int)right.constant); + constant = (int)left.constant * (int)right.constant; } else if (sort == Sort.LONG) { - constant = overflow ? (long)left.constant * (long)right.constant : - Math.multiplyExact((long)left.constant, (long)right.constant); + constant = (long)left.constant * (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant * (float)right.constant : - org.elasticsearch.painless.Utility.multiplyWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant * (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant * (double)right.constant : - org.elasticsearch.painless.Utility.multiplyWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant * (double)right.constant; } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -117,11 +111,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeDiv(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeDiv(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply divide [/] to types " + @@ -131,25 +125,20 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); final Sort sort = promote.sort; if (sort == Sort.INT) { - constant = overflow ? (int)left.constant / (int)right.constant : - org.elasticsearch.painless.Utility.divideWithoutOverflow((int)left.constant, (int)right.constant); + constant = (int)left.constant / (int)right.constant; } else if (sort == Sort.LONG) { - constant = overflow ? (long)left.constant / (long)right.constant : - org.elasticsearch.painless.Utility.divideWithoutOverflow((long)left.constant, (long)right.constant); + constant = (long)left.constant / (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant / (float)right.constant : - org.elasticsearch.painless.Utility.divideWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant / (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant / (double)right.constant : - org.elasticsearch.painless.Utility.divideWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant / (double)right.constant; } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -158,11 +147,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeRem(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeRem(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply remainder [%] to types " + @@ -172,11 +161,10 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); final Sort sort = promote.sort; if (sort == Sort.INT) { @@ -184,11 +172,9 @@ public final class EBinary extends AExpression { } else if (sort == Sort.LONG) { constant = (long)left.constant % (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant % (float)right.constant : - org.elasticsearch.painless.Utility.remainderWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant % (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant % (double)right.constant : - org.elasticsearch.painless.Utility.remainderWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant % (double)right.constant; } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -197,11 +183,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeAdd(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeAdd(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteAdd(definition, left.actual, right.actual); + final Type promote = AnalyzerCaster.promoteAdd(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply add [+] to types " + @@ -227,24 +213,18 @@ public final class EBinary extends AExpression { right.expected = promote; } - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); - if (sort == Sort.INT) { - constant = overflow ? (int)left.constant + (int)right.constant : - Math.addExact((int)left.constant, (int)right.constant); + constant = (int)left.constant + (int)right.constant; } else if (sort == Sort.LONG) { - constant = overflow ? (long)left.constant + (long)right.constant : - Math.addExact((long)left.constant, (long)right.constant); + constant = (long)left.constant + (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant + (float)right.constant : - org.elasticsearch.painless.Utility.addWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant + (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant + (double)right.constant : - org.elasticsearch.painless.Utility.addWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant + (double)right.constant; } else if (sort == Sort.STRING) { constant = "" + left.constant + right.constant; } else { @@ -255,11 +235,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeSub(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeSub(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply subtract [-] to types " + @@ -269,25 +249,20 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); final Sort sort = promote.sort; if (sort == Sort.INT) { - constant = overflow ? (int)left.constant - (int)right.constant : - Math.subtractExact((int)left.constant, (int)right.constant); + constant = (int)left.constant - (int)right.constant; } else if (sort == Sort.LONG) { - constant = overflow ? (long)left.constant - (long)right.constant : - Math.subtractExact((long)left.constant, (long)right.constant); + constant = (long)left.constant - (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant - (float)right.constant : - org.elasticsearch.painless.Utility.subtractWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant - (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant - (double)right.constant : - org.elasticsearch.painless.Utility.subtractWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant - (double)right.constant; } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -296,11 +271,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeLSH(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeLSH(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply left shift [<<] to types " + @@ -308,11 +283,11 @@ public final class EBinary extends AExpression { } left.expected = promote; - right.expected = definition.intType; + right.expected = Definition.INT_TYPE; right.explicit = true; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -329,11 +304,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeRSH(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeRSH(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply right shift [>>] to types " + @@ -341,11 +316,11 @@ public final class EBinary extends AExpression { } left.expected = promote; - right.expected = definition.intType; + right.expected = Definition.INT_TYPE; right.explicit = true; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -362,11 +337,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeUSH(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeUSH(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply unsigned shift [>>>] to types " + @@ -374,11 +349,11 @@ public final class EBinary extends AExpression { } left.expected = promote; - right.expected = definition.intType; + right.expected = Definition.INT_TYPE; right.explicit = true; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -395,11 +370,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeBWAnd(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeBWAnd(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, false, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply and [&] to types " + @@ -409,8 +384,8 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -427,11 +402,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeXor(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeXor(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteXor(definition, left.actual, right.actual); + final Type promote = AnalyzerCaster.promoteXor(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply xor [^] to types " + @@ -441,8 +416,8 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -461,11 +436,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeBWOr(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeBWOr(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, false, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply or [|] to types " + @@ -475,8 +450,8 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -494,19 +469,19 @@ public final class EBinary extends AExpression { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { if (actual.sort == Sort.STRING && operation == Operation.ADD) { if (!cat) { adapter.writeNewStrings(); } - left.write(settings, definition, adapter); + left.write(adapter); if (!(left instanceof EBinary) || ((EBinary)left).operation != Operation.ADD || left.actual.sort != Sort.STRING) { adapter.writeAppendStrings(left.actual); } - right.write(settings, definition, adapter); + right.write(adapter); if (!(right instanceof EBinary) || ((EBinary)right).operation != Operation.ADD || right.actual.sort != Sort.STRING) { adapter.writeAppendStrings(right.actual); @@ -516,10 +491,10 @@ public final class EBinary extends AExpression { adapter.writeToStrings(); } } else { - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(adapter); + right.write(adapter); - adapter.writeBinaryInstruction(settings, definition, location, actual, operation); + adapter.writeBinaryInstruction(location, actual, operation); } adapter.writeBranch(tru, fals); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java index 7f9f7dee000..27cd3018ded 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.Variables; @@ -35,7 +34,7 @@ public final class EBool extends AExpression { AExpression left; AExpression right; - public EBool(final int line, final String location, final Operation operation, final AExpression left, final AExpression right) { + public EBool(int line, String location, Operation operation, AExpression left, AExpression right) { super(line, location); this.operation = operation; @@ -44,14 +43,14 @@ public final class EBool extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.expected = definition.booleanType; - left.analyze(settings, definition, variables); - left = left.cast(settings, definition, variables); + void analyze(Variables variables) { + left.expected = Definition.BOOLEAN_TYPE; + left.analyze(variables); + left = left.cast(variables); - right.expected = definition.booleanType; - right.analyze(settings, definition, variables); - right = right.cast(settings, definition, variables); + right.expected = Definition.BOOLEAN_TYPE; + right.analyze(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { if (operation == Operation.AND) { @@ -63,11 +62,11 @@ public final class EBool extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { if (tru != null || fals != null) { if (operation == Operation.AND) { final Label localfals = fals == null ? new Label() : fals; @@ -76,8 +75,8 @@ public final class EBool extends AExpression { right.tru = tru; right.fals = fals; - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(adapter); + right.write(adapter); if (fals == null) { adapter.mark(localfals); @@ -89,8 +88,8 @@ public final class EBool extends AExpression { right.tru = tru; right.fals = fals; - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(adapter); + right.write(adapter); if (tru == null) { adapter.mark(localtru); @@ -106,8 +105,8 @@ public final class EBool extends AExpression { left.fals = localfals; right.fals = localfals; - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(adapter); + right.write(adapter); adapter.push(true); adapter.goTo(end); @@ -122,8 +121,8 @@ public final class EBool extends AExpression { left.tru = localtru; right.fals = localfals; - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(adapter); + right.write(adapter); adapter.mark(localtru); adapter.push(true); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java index 9b9f0917546..27d7bb9a626 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -29,19 +28,19 @@ import org.elasticsearch.painless.MethodWriter; */ public final class EBoolean extends AExpression { - public EBoolean(final int line, final String location, final boolean constant) { + public EBoolean(int line, String location, boolean constant) { super(line, location); this.constant = constant; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - actual = definition.booleanType; + void analyze(Variables variables) { + actual = Definition.BOOLEAN_TYPE; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { throw new IllegalArgumentException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index d68c95c910a..7a8c4a29b60 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -36,7 +34,7 @@ final class ECast extends AExpression { Cast cast = null; - ECast(final int line, final String location, final AExpression child, final Cast cast) { + ECast(int line, String location, AExpression child, Cast cast) { super(line, location); this.type = null; @@ -46,13 +44,13 @@ final class ECast extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { throw new IllegalStateException(error("Illegal tree structure.")); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - child.write(settings, definition, adapter); + void write(MethodWriter adapter) { + child.write(adapter); adapter.writeCast(cast); adapter.writeBranch(tru, fals); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java index 39afcd935ad..31bff2dd66d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Sort; @@ -44,12 +43,11 @@ public final class EChain extends AExpression { boolean cat = false; Type promote = null; - boolean exact = false; Cast there = null; Cast back = null; - public EChain(final int line, final String location, final List links, - final boolean pre, final boolean post, final Operation operation, final AExpression expression) { + public EChain(int line, String location, List links, + boolean pre, boolean post, Operation operation, AExpression expression) { super(line, location); this.links = links; @@ -60,20 +58,20 @@ public final class EChain extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - analyzeLinks(settings, definition, variables); + void analyze(Variables variables) { + analyzeLinks(variables); analyzeIncrDecr(); if (operation != null) { - analyzeCompound(settings, definition, variables); + analyzeCompound(variables); } else if (expression != null) { - analyzeWrite(settings, definition, variables); + analyzeWrite(variables); } else { analyzeRead(); } } - private void analyzeLinks(final CompilerSettings settings, final Definition definition, final Variables variables) { + private void analyzeLinks(Variables variables) { ALink previous = null; int index = 0; @@ -93,7 +91,7 @@ public final class EChain extends AExpression { current.store = expression != null || pre || post; } - final ALink analyzed = current.analyze(settings, definition, variables); + final ALink analyzed = current.analyze(variables); if (analyzed == null) { links.remove(index); @@ -154,33 +152,33 @@ public final class EChain extends AExpression { } } - private void analyzeCompound(final CompilerSettings settings, final Definition definition, final Variables variables) { + private void analyzeCompound(Variables variables) { final ALink last = links.get(links.size() - 1); - expression.analyze(settings, definition, variables); + expression.analyze(variables); if (operation == Operation.MUL) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true); + promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); } else if (operation == Operation.DIV) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true); + promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); } else if (operation == Operation.REM) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true); + promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); } else if (operation == Operation.ADD) { - promote = AnalyzerCaster.promoteAdd(definition, last.after, expression.actual); + promote = AnalyzerCaster.promoteAdd(last.after, expression.actual); } else if (operation == Operation.SUB) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true); + promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); } else if (operation == Operation.LSH) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, false, true); + promote = AnalyzerCaster.promoteNumeric(last.after, false); } else if (operation == Operation.RSH) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, false, true); + promote = AnalyzerCaster.promoteNumeric(last.after, false); } else if (operation == Operation.USH) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, false, true); + promote = AnalyzerCaster.promoteNumeric(last.after, false); } else if (operation == Operation.BWAND) { - promote = AnalyzerCaster.promoteXor(definition, last.after, expression.actual); + promote = AnalyzerCaster.promoteXor(last.after, expression.actual); } else if (operation == Operation.XOR) { - promote = AnalyzerCaster.promoteXor(definition, last.after, expression.actual); + promote = AnalyzerCaster.promoteXor(last.after, expression.actual); } else if (operation == Operation.BWOR) { - promote = AnalyzerCaster.promoteXor(definition, last.after, expression.actual); + promote = AnalyzerCaster.promoteXor(last.after, expression.actual); } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -200,42 +198,39 @@ public final class EChain extends AExpression { expression.expected = expression.actual; } else if (operation == Operation.LSH || operation == Operation.RSH || operation == Operation.USH) { - expression.expected = definition.intType; + expression.expected = Definition.INT_TYPE; expression.explicit = true; } else { expression.expected = promote; } - expression = expression.cast(settings, definition, variables); + expression = expression.cast(variables); - exact = !settings.getNumericOverflow() && - (operation == Operation.MUL || operation == Operation.DIV || operation == Operation.REM || - operation == Operation.ADD || operation == Operation.SUB); - there = AnalyzerCaster.getLegalCast(definition, location, last.after, promote, false); - back = AnalyzerCaster.getLegalCast(definition, location, promote, last.after, true); + there = AnalyzerCaster.getLegalCast(location, last.after, promote, false, false); + back = AnalyzerCaster.getLegalCast(location, promote, last.after, true, false); this.statement = true; - this.actual = read ? last.after : definition.voidType; + this.actual = read ? last.after : Definition.VOID_TYPE; } - private void analyzeWrite(final CompilerSettings settings, final Definition definition, final Variables variables) { + private void analyzeWrite(Variables variables) { final ALink last = links.get(links.size() - 1); // If the store node is a DEF node, we remove the cast to DEF from the expression // and promote the real type to it: if (last instanceof IDefLink) { - expression.analyze(settings, definition, variables); + expression.analyze(variables); last.after = expression.expected = expression.actual; } else { // otherwise we adapt the type of the expression to the store type expression.expected = last.after; - expression.analyze(settings, definition, variables); + expression.analyze(variables); } - expression = expression.cast(settings, definition, variables); + expression = expression.cast(variables); this.statement = true; - this.actual = read ? last.after : definition.voidType; + this.actual = read ? last.after : Definition.VOID_TYPE; } private void analyzeRead() { @@ -252,7 +247,7 @@ public final class EChain extends AExpression { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { if (cat) { adapter.writeNewStrings(); } @@ -260,15 +255,15 @@ public final class EChain extends AExpression { final ALink last = links.get(links.size() - 1); for (final ALink link : links) { - link.write(settings, definition, adapter); + link.write(adapter); if (link == last && link.store) { if (cat) { adapter.writeDup(link.size, 1); - link.load(settings, definition, adapter); + link.load(adapter); adapter.writeAppendStrings(link.after); - expression.write(settings, definition, adapter); + expression.write(adapter); if (!(expression instanceof EBinary) || ((EBinary)expression).operation != Operation.ADD || expression.actual.sort != Sort.STRING) { @@ -282,39 +277,37 @@ public final class EChain extends AExpression { adapter.writeDup(link.after.sort.size, link.size); } - link.store(settings, definition, adapter); + link.store(adapter); } else if (operation != null) { adapter.writeDup(link.size, 0); - link.load(settings, definition, adapter); + link.load(adapter); if (link.load && post) { adapter.writeDup(link.after.sort.size, link.size); } adapter.writeCast(there); - expression.write(settings, definition, adapter); - adapter.writeBinaryInstruction(settings, definition, location, promote, operation); + expression.write(adapter); + adapter.writeBinaryInstruction(location, promote, operation); - if (!exact || !adapter.writeExactInstruction(definition, promote.sort, link.after.sort)) { - adapter.writeCast(back); - } + adapter.writeCast(back); if (link.load && !post) { adapter.writeDup(link.after.sort.size, link.size); } - link.store(settings, definition, adapter); + link.store(adapter); } else { - expression.write(settings, definition, adapter); + expression.write(adapter); if (link.load) { adapter.writeDup(link.after.sort.size, link.size); } - link.store(settings, definition, adapter); + link.store(adapter); } } else { - link.load(settings, definition, adapter); + link.load(adapter); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index d9337ae562b..08fed075335 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; @@ -35,6 +34,8 @@ import static org.elasticsearch.painless.WriterConstants.DEF_GTE_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_GT_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_LTE_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_LT_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE; +import static org.elasticsearch.painless.WriterConstants.UTILITY_TYPE; /** * Represents a comparison expression. @@ -45,7 +46,7 @@ public final class EComp extends AExpression { AExpression left; AExpression right; - public EComp(final int line, final String location, final Operation operation, final AExpression left, final AExpression right) { + public EComp(int line, String location, Operation operation, AExpression left, AExpression right) { super(line, location); this.operation = operation; @@ -54,33 +55,33 @@ public final class EComp extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (operation == Operation.EQ) { - analyzeEq(settings, definition, variables); + analyzeEq(variables); } else if (operation == Operation.EQR) { - analyzeEqR(settings, definition, variables); + analyzeEqR(variables); } else if (operation == Operation.NE) { - analyzeNE(settings, definition, variables); + analyzeNE(variables); } else if (operation == Operation.NER) { - analyzeNER(settings, definition, variables); + analyzeNER(variables); } else if (operation == Operation.GTE) { - analyzeGTE(settings, definition, variables); + analyzeGTE(variables); } else if (operation == Operation.GT) { - analyzeGT(settings, definition, variables); + analyzeGT(variables); } else if (operation == Operation.LTE) { - analyzeLTE(settings, definition, variables); + analyzeLTE(variables); } else if (operation == Operation.LT) { - analyzeLT(settings, definition, variables); + analyzeLT(variables); } else { throw new IllegalStateException(error("Illegal tree structure.")); } } - private void analyzeEq(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeEq(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteEquality(definition, left.actual, right.actual); + final Type promote = AnalyzerCaster.promoteEquality(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply equals [==] to types " + @@ -90,8 +91,8 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.isNull && right.isNull) { throw new IllegalArgumentException(error("Extraneous comparison of null constants.")); @@ -119,14 +120,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeEqR(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeEqR(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteReference(definition, left.actual, right.actual); + final Type promote = AnalyzerCaster.promoteEquality(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply reference equals [===] to types " + @@ -136,8 +137,8 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.isNull && right.isNull) { throw new IllegalArgumentException(error("Extraneous comparison of null constants.")); @@ -161,14 +162,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeNE(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeNE(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteEquality(definition, left.actual, right.actual); + final Type promote = AnalyzerCaster.promoteEquality(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply not equals [!=] to types " + @@ -178,8 +179,8 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.isNull && right.isNull) { throw new IllegalArgumentException(error("Extraneous comparison of null constants.")); @@ -207,14 +208,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeNER(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeNER(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteReference(definition, left.actual, right.actual); + final Type promote = AnalyzerCaster.promoteEquality(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply reference not equals [!==] to types " + @@ -224,8 +225,8 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.isNull && right.isNull) { throw new IllegalArgumentException(error("Extraneous comparison of null constants.")); @@ -249,14 +250,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeGTE(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeGTE(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply greater than or equals [>=] to types " + @@ -266,8 +267,8 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -285,14 +286,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeGT(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeGT(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply greater than [>] to types " + @@ -302,8 +303,8 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -321,14 +322,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeLTE(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeLTE(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply less than or equals [<=] to types " + @@ -338,8 +339,8 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -357,14 +358,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeLT(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeLT(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply less than [>=] to types " + @@ -374,8 +375,8 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { final Sort sort = promote.sort; @@ -393,19 +394,19 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { final boolean branch = tru != null || fals != null; final org.objectweb.asm.Type rtype = right.actual.type; final Sort rsort = right.actual.sort; - left.write(settings, definition, adapter); + left.write(adapter); if (!right.isNull) { - right.write(settings, definition, adapter); + right.write(adapter); } final Label jump = tru != null ? tru : fals != null ? fals : new Label(); @@ -455,34 +456,37 @@ public final class EComp extends AExpression { if (eq) { if (right.isNull) { adapter.ifNull(jump); - } else if (!left.isNull && operation == Operation.EQ) { - adapter.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); + } else if (!left.isNull && (operation == Operation.EQ || operation == Operation.NE)) { + adapter.invokeStatic(DEF_UTIL_TYPE, DEF_EQ_CALL); + writejump = false; } else { adapter.ifCmp(rtype, MethodWriter.EQ, jump); } } else if (ne) { if (right.isNull) { adapter.ifNonNull(jump); - } else if (!left.isNull && operation == Operation.NE) { - adapter.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); + } else if (!left.isNull && (operation == Operation.EQ || operation == Operation.NE)) { + adapter.invokeStatic(DEF_UTIL_TYPE, DEF_EQ_CALL); adapter.ifZCmp(MethodWriter.EQ, jump); } else { adapter.ifCmp(rtype, MethodWriter.NE, jump); } } else if (lt) { - adapter.invokeStatic(definition.defobjType.type, DEF_LT_CALL); + adapter.invokeStatic(DEF_UTIL_TYPE, DEF_LT_CALL); + writejump = false; } else if (lte) { - adapter.invokeStatic(definition.defobjType.type, DEF_LTE_CALL); + adapter.invokeStatic(DEF_UTIL_TYPE, DEF_LTE_CALL); + writejump = false; } else if (gt) { - adapter.invokeStatic(definition.defobjType.type, DEF_GT_CALL); + adapter.invokeStatic(DEF_UTIL_TYPE, DEF_GT_CALL); + writejump = false; } else if (gte) { - adapter.invokeStatic(definition.defobjType.type, DEF_GTE_CALL); + adapter.invokeStatic(DEF_UTIL_TYPE, DEF_GTE_CALL); + writejump = false; } else { throw new IllegalStateException(error("Illegal tree structure.")); } - writejump = left.isNull || ne || operation == Operation.EQR; - if (branch && !writejump) { adapter.ifZCmp(MethodWriter.NE, jump); } @@ -492,8 +496,8 @@ public final class EComp extends AExpression { if (eq) { if (right.isNull) { adapter.ifNull(jump); - } else if (operation == Operation.EQ) { - adapter.invokeStatic(definition.utilityType.type, CHECKEQUALS); + } else if (operation == Operation.EQ || operation == Operation.NE) { + adapter.invokeStatic(UTILITY_TYPE, CHECKEQUALS); if (branch) { adapter.ifZCmp(MethodWriter.NE, jump); @@ -506,8 +510,8 @@ public final class EComp extends AExpression { } else if (ne) { if (right.isNull) { adapter.ifNonNull(jump); - } else if (operation == Operation.NE) { - adapter.invokeStatic(definition.utilityType.type, CHECKEQUALS); + } else if (operation == Operation.EQ || operation == Operation.NE) { + adapter.invokeStatic(UTILITY_TYPE, CHECKEQUALS); adapter.ifZCmp(MethodWriter.EQ, jump); } else { adapter.ifCmp(rtype, MethodWriter.NE, jump); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java index 5853fa3242e..8f9d0038586 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.AnalyzerCaster; @@ -36,8 +35,7 @@ public final class EConditional extends AExpression { AExpression left; AExpression right; - public EConditional(final int line, final String location, - final AExpression condition, final AExpression left, final AExpression right) { + public EConditional(int line, String location, AExpression condition, AExpression left, AExpression right) { super(line, location); this.condition = condition; @@ -46,10 +44,10 @@ public final class EConditional extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + void analyze(Variables variables) { + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); if (condition.constant != null) { throw new IllegalArgumentException(error("Extraneous conditional statement.")); @@ -57,27 +55,29 @@ public final class EConditional extends AExpression { left.expected = expected; left.explicit = explicit; + left.internal = internal; right.expected = expected; right.explicit = explicit; + right.internal = internal; actual = expected; - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + left.analyze(variables); + right.analyze(variables); if (expected == null) { - final Type promote = AnalyzerCaster.promoteConditional(definition, left.actual, right.actual, left.constant, right.constant); + final Type promote = AnalyzerCaster.promoteConditional(left.actual, right.actual, left.constant, right.constant); left.expected = promote; right.expected = promote; actual = promote; } - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { final Label localfals = new Label(); final Label end = new Label(); @@ -85,11 +85,11 @@ public final class EConditional extends AExpression { left.tru = right.tru = tru; left.fals = right.fals = fals; - condition.write(settings, definition, adapter); - left.write(settings, definition, adapter); + condition.write(adapter); + left.write(adapter); adapter.goTo(end); adapter.mark(localfals); - right.write(settings, definition, adapter); + right.write(adapter); adapter.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java index 7afa88ffc9a..ac18b849162 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Variables; @@ -31,39 +30,39 @@ import org.elasticsearch.painless.MethodWriter; */ final class EConstant extends AExpression { - EConstant(final int line, final String location, final Object constant) { + EConstant(int line, String location, Object constant) { super(line, location); this.constant = constant; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (constant instanceof String) { - actual = definition.stringType; + actual = Definition.STRING_TYPE; } else if (constant instanceof Double) { - actual = definition.doubleType; + actual = Definition.DOUBLE_TYPE; } else if (constant instanceof Float) { - actual = definition.floatType; + actual = Definition.FLOAT_TYPE; } else if (constant instanceof Long) { - actual = definition.longType; + actual = Definition.LONG_TYPE; } else if (constant instanceof Integer) { - actual = definition.intType; + actual = Definition.INT_TYPE; } else if (constant instanceof Character) { - actual = definition.charType; + actual = Definition.CHAR_TYPE; } else if (constant instanceof Short) { - actual = definition.shortType; + actual = Definition.SHORT_TYPE; } else if (constant instanceof Byte) { - actual = definition.byteType; + actual = Definition.BYTE_TYPE; } else if (constant instanceof Boolean) { - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } else { throw new IllegalStateException(error("Illegal tree structure.")); } } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { final Sort sort = actual.sort; switch (sort) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java index 7583d3eb158..85e857da3c4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -31,25 +30,25 @@ public final class EDecimal extends AExpression { final String value; - public EDecimal(final int line, final String location, final String value) { + public EDecimal(int line, String location, String value) { super(line, location); this.value = value; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (value.endsWith("f") || value.endsWith("F")) { try { constant = Float.parseFloat(value.substring(0, value.length() - 1)); - actual = definition.floatType; + actual = Definition.FLOAT_TYPE; } catch (final NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid float constant [" + value + "].")); } } else { try { constant = Double.parseDouble(value); - actual = definition.doubleType; + actual = Definition.DOUBLE_TYPE; } catch (final NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid double constant [" + value + "].")); } @@ -57,7 +56,7 @@ public final class EDecimal extends AExpression { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { throw new IllegalArgumentException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index ac0b06c0a79..7a8c9dadeca 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -32,7 +31,7 @@ public final class EExplicit extends AExpression { final String type; AExpression child; - public EExplicit(final int line, final String location, final String type, final AExpression child) { + public EExplicit(int line, String location, String type, AExpression child) { super(line, location); this.type = type; @@ -40,28 +39,29 @@ public final class EExplicit extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { try { - actual = definition.getType(this.type); + actual = Definition.getType(this.type); } catch (final IllegalArgumentException exception) { throw new IllegalArgumentException(error("Not a type [" + this.type + "].")); } child.expected = actual; child.explicit = true; - child.analyze(settings, definition, variables); - child = child.cast(settings, definition, variables); + child.analyze(variables); + child = child.cast(variables); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { throw new IllegalArgumentException(error("Illegal tree structure.")); } - AExpression cast(final CompilerSettings settings, final Definition definition, final Variables variables) { + AExpression cast(Variables variables) { child.expected = expected; child.explicit = explicit; + child.internal = internal; - return child.cast(settings, definition, variables); + return child.cast(variables); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index 0c8500c528b..3a8005b20d6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Opcodes; @@ -30,12 +29,12 @@ import org.elasticsearch.painless.MethodWriter; */ public final class ENull extends AExpression { - public ENull(final int line, final String location) { + public ENull(int line, String location) { super(line, location); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { isNull = true; if (expected != null) { @@ -45,12 +44,12 @@ public final class ENull extends AExpression { actual = expected; } else { - actual = definition.objectType; + actual = Definition.OBJECT_TYPE; } } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { adapter.visitInsn(Opcodes.ACONST_NULL); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java index ed7314b3571..ee70bb77d44 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Variables; @@ -33,7 +32,7 @@ public final class ENumeric extends AExpression { final String value; int radix; - public ENumeric(final int line, final String location, final String value, final int radix) { + public ENumeric(int line, String location, String value, int radix) { super(line, location); this.value = value; @@ -41,7 +40,7 @@ public final class ENumeric extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (value.endsWith("d") || value.endsWith("D")) { if (radix != 10) { throw new IllegalStateException(error("Invalid tree structure.")); @@ -49,7 +48,7 @@ public final class ENumeric extends AExpression { try { constant = Double.parseDouble(value.substring(0, value.length() - 1)); - actual = definition.doubleType; + actual = Definition.DOUBLE_TYPE; } catch (final NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid double constant [" + value + "].")); } @@ -60,14 +59,14 @@ public final class ENumeric extends AExpression { try { constant = Float.parseFloat(value.substring(0, value.length() - 1)); - actual = definition.floatType; + actual = Definition.FLOAT_TYPE; } catch (final NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid float constant [" + value + "].")); } } else if (value.endsWith("l") || value.endsWith("L")) { try { constant = Long.parseLong(value.substring(0, value.length() - 1), radix); - actual = definition.longType; + actual = Definition.LONG_TYPE; } catch (final NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid long constant [" + value + "].")); } @@ -78,16 +77,16 @@ public final class ENumeric extends AExpression { if (sort == Sort.BYTE && integer >= Byte.MIN_VALUE && integer <= Byte.MAX_VALUE) { constant = (byte)integer; - actual = definition.byteType; + actual = Definition.BYTE_TYPE; } else if (sort == Sort.CHAR && integer >= Character.MIN_VALUE && integer <= Character.MAX_VALUE) { constant = (char)integer; - actual = definition.charType; + actual = Definition.CHAR_TYPE; } else if (sort == Sort.SHORT && integer >= Short.MIN_VALUE && integer <= Short.MAX_VALUE) { constant = (short)integer; - actual = definition.shortType; + actual = Definition.SHORT_TYPE; } else { constant = integer; - actual = definition.intType; + actual = Definition.INT_TYPE; } } catch (final NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid int constant [" + value + "].")); @@ -96,7 +95,7 @@ public final class ENumeric extends AExpression { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { throw new IllegalArgumentException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index f4205223a65..d62fa2c8ebf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; @@ -31,8 +30,7 @@ import org.elasticsearch.painless.MethodWriter; import static org.elasticsearch.painless.WriterConstants.DEF_NEG_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_NOT_CALL; -import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_LONG; +import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE; /** * Represents a unary math expression. @@ -42,7 +40,7 @@ public final class EUnary extends AExpression { Operation operation; AExpression child; - public EUnary(final int line, final String location, final Operation operation, final AExpression child) { + public EUnary(int line, String location, Operation operation, AExpression child) { super(line, location); this.operation = operation; @@ -50,43 +48,43 @@ public final class EUnary extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (operation == Operation.NOT) { - analyzeNot(settings, definition, variables); + analyzeNot(variables); } else if (operation == Operation.BWNOT) { - analyzeBWNot(settings, definition, variables); + analyzeBWNot(variables); } else if (operation == Operation.ADD) { - analyzerAdd(settings, definition, variables); + analyzerAdd(variables); } else if (operation == Operation.SUB) { - analyzerSub(settings, definition, variables); + analyzerSub(variables); } else { throw new IllegalStateException(error("Illegal tree structure.")); } } - void analyzeNot(final CompilerSettings settings, final Definition definition, final Variables variables) { - child.expected = definition.booleanType; - child.analyze(settings, definition, variables); - child = child.cast(settings, definition, variables); + void analyzeNot(Variables variables) { + child.expected = Definition.BOOLEAN_TYPE; + child.analyze(variables); + child = child.cast(variables); if (child.constant != null) { constant = !(boolean)child.constant; } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - void analyzeBWNot(final CompilerSettings settings, final Definition definition, final Variables variables) { - child.analyze(settings, definition, variables); + void analyzeBWNot(Variables variables) { + child.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, false, true); + final Type promote = AnalyzerCaster.promoteNumeric(child.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply not [~] to type [" + child.actual.name + "].")); } child.expected = promote; - child = child.cast(settings, definition, variables); + child = child.cast(variables); if (child.constant != null) { final Sort sort = promote.sort; @@ -103,17 +101,17 @@ public final class EUnary extends AExpression { actual = promote; } - void analyzerAdd(final CompilerSettings settings, final Definition definition, final Variables variables) { - child.analyze(settings, definition, variables); + void analyzerAdd(Variables variables) { + child.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply positive [+] to type [" + child.actual.name + "].")); } child.expected = promote; - child = child.cast(settings, definition, variables); + child = child.cast(variables); if (child.constant != null) { final Sort sort = promote.sort; @@ -134,27 +132,25 @@ public final class EUnary extends AExpression { actual = promote; } - void analyzerSub(final CompilerSettings settings, final Definition definition, final Variables variables) { - child.analyze(settings, definition, variables); + void analyzerSub(Variables variables) { + child.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, true, true); + final Type promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply negative [-] to type [" + child.actual.name + "].")); } child.expected = promote; - child = child.cast(settings, definition, variables); + child = child.cast(variables); if (child.constant != null) { - final boolean overflow = settings.getNumericOverflow(); final Sort sort = promote.sort; - if (sort == Sort.INT) { - constant = overflow ? -(int)child.constant : Math.negateExact((int)child.constant); + constant = -(int)child.constant; } else if (sort == Sort.LONG) { - constant = overflow ? -(long)child.constant : Math.negateExact((long)child.constant); + constant = -(long)child.constant; } else if (sort == Sort.FLOAT) { constant = -(float)child.constant; } else if (sort == Sort.DOUBLE) { @@ -168,14 +164,14 @@ public final class EUnary extends AExpression { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { if (operation == Operation.NOT) { if (tru == null && fals == null) { final Label localfals = new Label(); final Label end = new Label(); child.fals = localfals; - child.write(settings, definition, adapter); + child.write(adapter); adapter.push(false); adapter.goTo(end); @@ -185,17 +181,17 @@ public final class EUnary extends AExpression { } else { child.tru = fals; child.fals = tru; - child.write(settings, definition, adapter); + child.write(adapter); } } else { final org.objectweb.asm.Type type = actual.type; final Sort sort = actual.sort; - child.write(settings, definition, adapter); + child.write(adapter); if (operation == Operation.BWNOT) { if (sort == Sort.DEF) { - adapter.invokeStatic(definition.defobjType.type, DEF_NOT_CALL); + adapter.invokeStatic(DEF_UTIL_TYPE, DEF_NOT_CALL); } else { if (sort == Sort.INT) { adapter.push(-1); @@ -209,19 +205,9 @@ public final class EUnary extends AExpression { } } else if (operation == Operation.SUB) { if (sort == Sort.DEF) { - adapter.invokeStatic(definition.defobjType.type, DEF_NEG_CALL); + adapter.invokeStatic(DEF_UTIL_TYPE, DEF_NEG_CALL); } else { - if (settings.getNumericOverflow()) { - adapter.math(MethodWriter.NEG, type); - } else { - if (sort == Sort.INT) { - adapter.invokeStatic(definition.mathType.type, NEGATEEXACT_INT); - } else if (sort == Sort.LONG) { - adapter.invokeStatic(definition.mathType.type, NEGATEEXACT_LONG); - } else { - throw new IllegalStateException(error("Illegal tree structure.")); - } - } + adapter.math(MethodWriter.NEG, type); } } else if (operation != Operation.ADD) { throw new IllegalStateException(error("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java index 5803fcfa273..4a6df96073e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -31,14 +30,14 @@ public final class LArrayLength extends ALink { final String value; - LArrayLength(final int line, final String location, final String value) { + LArrayLength(int line, String location, String value) { super(line, location, -1); this.value = value; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if ("length".equals(value)) { if (!load) { throw new IllegalArgumentException(error("Must read array field [length].")); @@ -46,7 +45,7 @@ public final class LArrayLength extends ALink { throw new IllegalArgumentException(error("Cannot write to read-only array field [length].")); } - after = definition.intType; + after = Definition.INT_TYPE; } else { throw new IllegalArgumentException(error("Illegal field access [" + value + "].")); } @@ -55,17 +54,17 @@ public final class LArrayLength extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { adapter.arrayLength(); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java index b38826f9e7e..95cc02602f6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Variables; @@ -35,14 +34,14 @@ public final class LBrace extends ALink { AExpression index; - public LBrace(final int line, final String location, final AExpression index) { + public LBrace(int line, String location, AExpression index) { super(line, location, 2); this.index = index; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before == null) { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -50,36 +49,36 @@ public final class LBrace extends ALink { final Sort sort = before.sort; if (sort == Sort.ARRAY) { - index.expected = definition.intType; - index.analyze(settings, definition, variables); - index = index.cast(settings, definition, variables); + index.expected = Definition.INT_TYPE; + index.analyze(variables); + index = index.cast(variables); - after = definition.getType(before.struct, before.dimensions - 1); + after = Definition.getType(before.struct, before.dimensions - 1); return this; } else if (sort == Sort.DEF) { - return new LDefArray(line, location, index).copy(this).analyze(settings, definition, variables); + return new LDefArray(line, location, index).copy(this).analyze(variables); } else if (Map.class.isAssignableFrom(before.clazz)) { - return new LMapShortcut(line, location, index).copy(this).analyze(settings, definition, variables); + return new LMapShortcut(line, location, index).copy(this).analyze(variables); } else if (List.class.isAssignableFrom(before.clazz)) { - return new LListShortcut(line, location, index).copy(this).analyze(settings, definition, variables); + return new LListShortcut(line, location, index).copy(this).analyze(variables); } throw new IllegalArgumentException(error("Illegal array access on type [" + before.name + "].")); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - index.write(settings, definition, adapter); + void write(MethodWriter adapter) { + index.write(adapter); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { adapter.arrayLoad(after.type); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { adapter.arrayStore(after.type); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCall.java index bd76aa293cc..fdb6612e1f4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCall.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Struct; @@ -38,7 +37,7 @@ public final class LCall extends ALink { Method method = null; - public LCall(final int line, final String location, final String name, final List arguments) { + public LCall(int line, String location, String name, List arguments) { super(line, location, -1); this.name = name; @@ -46,7 +45,7 @@ public final class LCall extends ALink { } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before == null) { throw new IllegalStateException(error("Illegal tree structure.")); } else if (before.sort == Definition.Sort.ARRAY) { @@ -64,8 +63,9 @@ public final class LCall extends ALink { final AExpression expression = arguments.get(argument); expression.expected = method.arguments.get(argument); - expression.analyze(settings, definition, variables); - arguments.set(argument, expression.cast(settings, definition, variables)); + expression.internal = true; + expression.analyze(variables); + arguments.set(argument, expression.cast(variables)); } statement = true; @@ -76,22 +76,22 @@ public final class LCall extends ALink { final ALink link = new LDefCall(line, location, name, arguments); link.copy(this); - return link.analyze(settings, definition, variables); + return link.analyze(variables); } - throw new IllegalArgumentException(error("Unknown call [" + name + "] with [" + arguments.size() + + throw new IllegalArgumentException(error("Unknown call [" + name + "] with [" + arguments.size() + "] arguments on type [" + struct.name + "].")); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { for (final AExpression argument : arguments) { - argument.write(settings, definition, adapter); + argument.write(adapter); } if (java.lang.reflect.Modifier.isStatic(method.reflect.getModifiers())) { @@ -108,7 +108,7 @@ public final class LCall extends ALink { } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java index eb7fb3a6b10..8e917c5b99e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.AnalyzerCaster; @@ -35,14 +34,14 @@ public final class LCast extends ALink { Cast cast = null; - public LCast(final int line, final String location, final String type) { + public LCast(int line, String location, String type) { super(line, location, -1); this.type = type; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before == null) { throw new IllegalStateException(error("Illegal tree structure.")); } else if (store) { @@ -50,28 +49,28 @@ public final class LCast extends ALink { } try { - after = definition.getType(type); + after = Definition.getType(type); } catch (final IllegalArgumentException exception) { throw new IllegalArgumentException(error("Not a type [" + type + "].")); } - cast = AnalyzerCaster.getLegalCast(definition, location, before, after, true); + cast = AnalyzerCaster.getLegalCast(location, before, after, true, false); return cast != null ? this : null; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { adapter.writeCast(cast); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { // Do nothing. } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java index 98b2fbe7bf9..1ef12026e65 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Variables; @@ -35,37 +34,37 @@ final class LDefArray extends ALink implements IDefLink { AExpression index; - LDefArray(final int line, final String location, final AExpression index) { + LDefArray(int line, String location, AExpression index) { super(line, location, 2); this.index = index; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - index.analyze(settings, definition, variables); + ALink analyze(Variables variables) { + index.analyze(variables); index.expected = index.actual; - index = index.cast(settings, definition, variables); + index = index.cast(variables); - after = definition.defType; + after = Definition.DEF_TYPE; return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - index.write(settings, definition, adapter); + void write(MethodWriter adapter) { + index.write(adapter); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final String desc = Type.getMethodDescriptor(after.type, definition.defType.type, index.actual.type); + void load(MethodWriter adapter) { + final String desc = Type.getMethodDescriptor(after.type, Definition.DEF_TYPE.type, index.actual.type); adapter.invokeDynamic("arrayLoad", desc, DEF_BOOTSTRAP_HANDLE, DefBootstrap.ARRAY_LOAD); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final String desc = Type.getMethodDescriptor(definition.voidType.type, definition.defType.type, + void store(MethodWriter adapter) { + final String desc = Type.getMethodDescriptor(Definition.VOID_TYPE.type, Definition.DEF_TYPE.type, index.actual.type, after.type); adapter.invokeDynamic("arrayStore", desc, DEF_BOOTSTRAP_HANDLE, DefBootstrap.ARRAY_STORE); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java index 9dbe65110ee..a9d1cb2b892 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Variables; @@ -37,7 +36,7 @@ final class LDefCall extends ALink implements IDefLink { final String name; final List arguments; - LDefCall(final int line, final String location, final String name, final List arguments) { + LDefCall(int line, String location, String name, List arguments) { super(line, location, -1); this.name = name; @@ -45,39 +44,40 @@ final class LDefCall extends ALink implements IDefLink { } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { for (int argument = 0; argument < arguments.size(); ++argument) { final AExpression expression = arguments.get(argument); - expression.analyze(settings, definition, variables); + expression.internal = true; + expression.analyze(variables); expression.expected = expression.actual; - arguments.set(argument, expression.cast(settings, definition, variables)); + arguments.set(argument, expression.cast(variables)); } statement = true; - after = definition.defType; + after = Definition.DEF_TYPE; return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { final StringBuilder signature = new StringBuilder(); signature.append('('); // first parameter is the receiver, we never know its type: always Object - signature.append(definition.defType.type.getDescriptor()); + signature.append(Definition.DEF_TYPE.type.getDescriptor()); // TODO: remove our explicit conversions and feed more type information for return value, // it can avoid some unnecessary boxing etc. for (final AExpression argument : arguments) { signature.append(argument.actual.type.getDescriptor()); - argument.write(settings, definition, adapter); + argument.write(adapter); } signature.append(')'); @@ -88,7 +88,7 @@ final class LDefCall extends ALink implements IDefLink { } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java index 759b407cb5a..ee0cf990c5b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Variables; @@ -35,7 +34,7 @@ final class LDefField extends ALink implements IDefLink { final String value; - LDefField(final int line, final String location, final String value) { + LDefField(int line, String location, String value) { super(line, location, 1); this.value = value; @@ -43,26 +42,26 @@ final class LDefField extends ALink implements IDefLink { @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - after = definition.defType; + ALink analyze(Variables variables) { + after = Definition.DEF_TYPE; return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final String desc = Type.getMethodDescriptor(after.type, definition.defType.type); + void load(MethodWriter adapter) { + final String desc = Type.getMethodDescriptor(after.type, Definition.DEF_TYPE.type); adapter.invokeDynamic(value, desc, DEF_BOOTSTRAP_HANDLE, DefBootstrap.LOAD); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final String desc = Type.getMethodDescriptor(definition.voidType.type, definition.defType.type, after.type); + void store(MethodWriter adapter) { + final String desc = Type.getMethodDescriptor(Definition.VOID_TYPE.type, Definition.DEF_TYPE.type, after.type); adapter.invokeDynamic(value, desc, DEF_BOOTSTRAP_HANDLE, DefBootstrap.STORE); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java index 06f820eba26..44ba33acdda 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Field; import org.elasticsearch.painless.Definition.Sort; @@ -39,14 +38,14 @@ public final class LField extends ALink { Field field; - public LField(final int line, final String location, final String value) { + public LField(int line, String location, String value) { super(line, location, 1); this.value = value; } @Override - ALink analyze(CompilerSettings settings, Definition definition, Variables variables) { + ALink analyze(Variables variables) { if (before == null) { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -54,9 +53,9 @@ public final class LField extends ALink { final Sort sort = before.sort; if (sort == Sort.ARRAY) { - return new LArrayLength(line, location, value).copy(this).analyze(settings, definition, variables); + return new LArrayLength(line, location, value).copy(this).analyze(variables); } else if (sort == Sort.DEF) { - return new LDefField(line, location, value).copy(this).analyze(settings, definition, variables); + return new LDefField(line, location, value).copy(this).analyze(variables); } final Struct struct = before.struct; @@ -80,17 +79,17 @@ public final class LField extends ALink { Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (shortcut) { - return new LShortcut(line, location, value).copy(this).analyze(settings, definition, variables); + return new LShortcut(line, location, value).copy(this).analyze(variables); } else { final EConstant index = new EConstant(line, location, value); - index.analyze(settings, definition, variables); + index.analyze(variables); if (Map.class.isAssignableFrom(before.clazz)) { - return new LMapShortcut(line, location, index).copy(this).analyze(settings, definition, variables); + return new LMapShortcut(line, location, index).copy(this).analyze(variables); } if (List.class.isAssignableFrom(before.clazz)) { - return new LListShortcut(line, location, index).copy(this).analyze(settings, definition, variables); + return new LListShortcut(line, location, index).copy(this).analyze(variables); } } } @@ -99,29 +98,21 @@ public final class LField extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) { adapter.getStatic(field.owner.type, field.reflect.getName(), field.type.type); - - if (!field.generic.clazz.equals(field.type.clazz)) { - adapter.checkCast(field.generic.type); - } } else { adapter.getField(field.owner.type, field.reflect.getName(), field.type.type); - - if (!field.generic.clazz.equals(field.type.clazz)) { - adapter.checkCast(field.generic.type); - } } } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) { adapter.putStatic(field.owner.type, field.reflect.getName(), field.type.type); } else { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java index 8526ef1297e..52f82c9d1ca 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; @@ -35,14 +34,14 @@ final class LListShortcut extends ALink { Method getter; Method setter; - LListShortcut(final int line, final String location, final AExpression index) { + LListShortcut(int line, String location, AExpression index) { super(line, location, 2); this.index = index; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { getter = before.struct.methods.get(new Definition.MethodKey("get", 1)); setter = before.struct.methods.get(new Definition.MethodKey("set", 2)); @@ -61,9 +60,9 @@ final class LListShortcut extends ALink { } if ((load || store) && (!load || getter != null) && (!store || setter != null)) { - index.expected = definition.intType; - index.analyze(settings, definition, variables); - index = index.cast(settings, definition, variables); + index.expected = Definition.INT_TYPE; + index.analyze(variables); + index = index.cast(variables); after = setter != null ? setter.arguments.get(1) : getter.rtn; } else { @@ -74,12 +73,12 @@ final class LListShortcut extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - index.write(settings, definition, adapter); + void write(MethodWriter adapter) { + index.write(adapter); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) { adapter.invokeInterface(getter.owner.type, getter.method); } else { @@ -92,7 +91,7 @@ final class LListShortcut extends ALink { } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) { adapter.invokeInterface(setter.owner.type, setter.method); } else { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java index 4efbd4bdf0f..9fbc39d72b3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; @@ -35,14 +34,14 @@ final class LMapShortcut extends ALink { Method getter; Method setter; - LMapShortcut(final int line, final String location, final AExpression index) { + LMapShortcut(int line, String location, AExpression index) { super(line, location, 2); this.index = index; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { getter = before.struct.methods.get(new Definition.MethodKey("get", 1)); setter = before.struct.methods.get(new Definition.MethodKey("put", 2)); @@ -61,8 +60,8 @@ final class LMapShortcut extends ALink { if ((load || store) && (!load || getter != null) && (!store || setter != null)) { index.expected = setter != null ? setter.arguments.get(0) : getter.arguments.get(0); - index.analyze(settings, definition, variables); - index = index.cast(settings, definition, variables); + index.analyze(variables); + index = index.cast(variables); after = setter != null ? setter.arguments.get(1) : getter.rtn; } else { @@ -73,12 +72,12 @@ final class LMapShortcut extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - index.write(settings, definition, adapter); + void write(MethodWriter adapter) { + index.write(adapter); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) { adapter.invokeInterface(getter.owner.type, getter.method); } else { @@ -91,7 +90,7 @@ final class LMapShortcut extends ALink { } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) { adapter.invokeInterface(setter.owner.type, setter.method); } else { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java index 67f9769bd6a..da88b45e72d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Variables; @@ -35,7 +34,7 @@ public final class LNewArray extends ALink { final String type; final List arguments; - public LNewArray(final int line, final String location, final String type, final List arguments) { + public LNewArray(int line, String location, String type, List arguments) { super(line, location, -1); this.type = type; @@ -43,7 +42,7 @@ public final class LNewArray extends ALink { } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before != null) { throw new IllegalStateException(error("Illegal tree structure.")); } else if (store) { @@ -55,7 +54,7 @@ public final class LNewArray extends ALink { final Type type; try { - type = definition.getType(this.type); + type = Definition.getType(this.type); } catch (final IllegalArgumentException exception) { throw new IllegalArgumentException(error("Not a type [" + this.type + "].")); } @@ -63,36 +62,36 @@ public final class LNewArray extends ALink { for (int argument = 0; argument < arguments.size(); ++argument) { final AExpression expression = arguments.get(argument); - expression.expected = definition.intType; - expression.analyze(settings, definition, variables); - arguments.set(argument, expression.cast(settings, definition, variables)); + expression.expected = Definition.INT_TYPE; + expression.analyze(variables); + arguments.set(argument, expression.cast(variables)); } - after = definition.getType(type.struct, arguments.size()); + after = Definition.getType(type.struct, arguments.size()); return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { for (final AExpression argument : arguments) { - argument.write(settings, definition, adapter); + argument.write(adapter); } if (arguments.size() > 1) { adapter.visitMultiANewArrayInsn(after.type.getDescriptor(), after.type.getDimensions()); } else { - adapter.newArray(definition.getType(after.struct, 0).type); + adapter.newArray(Definition.getType(after.struct, 0).type); } } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java index 227b63cf31f..28209e96f2b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Constructor; import org.elasticsearch.painless.Definition.Struct; @@ -39,7 +38,7 @@ public final class LNewObj extends ALink { Constructor constructor; - public LNewObj(final int line, final String location, final String type, final List arguments) { + public LNewObj(int line, String location, String type, List arguments) { super(line, location, -1); this.type = type; @@ -47,7 +46,7 @@ public final class LNewObj extends ALink { } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before != null) { throw new IllegalStateException(error("Illegal tree structure")); } else if (store) { @@ -57,7 +56,7 @@ public final class LNewObj extends ALink { final Type type; try { - type = definition.getType(this.type); + type = Definition.getType(this.type); } catch (final IllegalArgumentException exception) { throw new IllegalArgumentException(error("Not a type [" + this.type + "].")); } @@ -78,8 +77,9 @@ public final class LNewObj extends ALink { final AExpression expression = arguments.get(argument); expression.expected = types[argument]; - expression.analyze(settings, definition, variables); - arguments.set(argument, expression.cast(settings, definition, variables)); + expression.internal = true; + expression.analyze(variables); + arguments.set(argument, expression.cast(variables)); } statement = true; @@ -92,27 +92,27 @@ public final class LNewObj extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { adapter.newInstance(after.type); if (load) { adapter.dup(); } - for (final AExpression argument : arguments) { - argument.write(settings, definition, adapter); + for (AExpression argument : arguments) { + argument.write(adapter); } adapter.invokeConstructor(constructor.owner.type, constructor.method); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java index c65077e6e28..85a7d70c46e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; @@ -37,14 +36,14 @@ final class LShortcut extends ALink { Method getter = null; Method setter = null; - LShortcut(final int line, final String location, final String value) { + LShortcut(int line, String location, String value) { super(line, location, 1); this.value = value; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { final Struct struct = before.struct; getter = struct.methods.get(new Definition.MethodKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); @@ -74,12 +73,12 @@ final class LShortcut extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) { adapter.invokeInterface(getter.owner.type, getter.method); } else { @@ -92,7 +91,7 @@ final class LShortcut extends ALink { } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) { adapter.invokeInterface(setter.owner.type, setter.method); } else { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java index 1d11652f483..07d76d42676 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -29,14 +28,14 @@ import org.elasticsearch.painless.MethodWriter; */ public final class LString extends ALink { - public LString(final int line, final String location, final String string) { + public LString(int line, String location, String string) { super(line, location, -1); this.string = string; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before != null) { throw new IllegalStateException("Illegal tree structure."); } else if (store) { @@ -45,23 +44,23 @@ public final class LString extends ALink { throw new IllegalArgumentException(error("Must read String constant [" + string + "].")); } - after = definition.stringType; + after = Definition.STRING_TYPE; return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { adapter.push(string); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java index 35a652f5b84..85a2048bdc6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Variables; @@ -36,14 +35,14 @@ public final class LVariable extends ALink { int slot; - public LVariable(final int line, final String location, final String name) { + public LVariable(int line, String location, String name) { super(line, location, 0); this.name = name; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before != null) { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -51,7 +50,7 @@ public final class LVariable extends ALink { Type type = null; try { - type = definition.getType(name); + type = Definition.getType(name); } catch (final IllegalArgumentException exception) { // Do nothing. } @@ -74,17 +73,17 @@ public final class LVariable extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter adapter) { adapter.visitVarInsn(after.type.getOpcode(Opcodes.ILOAD), slot); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter adapter) { adapter.visitVarInsn(after.type.getOpcode(Opcodes.ISTORE), slot); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java index 802d5b6c415..a925c411a2a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -34,17 +32,17 @@ public final class SBlock extends AStatement { final List statements; - public SBlock(final int line, final String location, final List statements) { + public SBlock(int line, String location, List statements) { super(line, location); this.statements = Collections.unmodifiableList(statements); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { final AStatement last = statements.get(statements.size() - 1); - for (final AStatement statement : statements) { + for (AStatement statement : statements) { if (allEscape) { throw new IllegalArgumentException(error("Unreachable statement.")); } @@ -53,7 +51,7 @@ public final class SBlock extends AStatement { statement.lastSource = lastSource && statement == last; statement.lastLoop = (beginLoop || lastLoop) && statement == last; - statement.analyze(settings, definition, variables); + statement.analyze(variables); methodEscape = statement.methodEscape; loopEscape = statement.loopEscape; @@ -65,11 +63,11 @@ public final class SBlock extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - for (final AStatement statement : statements) { + void write(MethodWriter adapter) { + for (AStatement statement : statements) { statement.continu = continu; statement.brake = brake; - statement.write(settings, definition, adapter); + statement.write(adapter); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java index 3998b1a9ff2..fb12a2b452e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -29,12 +27,12 @@ import org.elasticsearch.painless.MethodWriter; */ public final class SBreak extends AStatement { - public SBreak(final int line, final String location) { + public SBreak(int line, String location) { super(line, location); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (!inLoop) { throw new IllegalArgumentException(error("Break statement outside of a loop.")); } @@ -46,7 +44,7 @@ public final class SBreak extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); adapter.goTo(brake); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java index 2c4e33b6326..01ca3f5697d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -29,12 +27,12 @@ import org.elasticsearch.painless.MethodWriter; */ public final class SContinue extends AStatement { - public SContinue(final int line, final String location) { + public SContinue(int line, String location) { super(line, location); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (!inLoop) { throw new IllegalArgumentException(error("Continue statement outside of a loop.")); } @@ -49,7 +47,7 @@ public final class SContinue extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); adapter.goTo(continu); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java index 5494ef9c32a..917dd9b8e5a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -34,25 +32,25 @@ public final class SDeclBlock extends AStatement { final List declarations; - public SDeclBlock(final int line, final String location, final List declarations) { + public SDeclBlock(int line, String location, List declarations) { super(line, location); this.declarations = Collections.unmodifiableList(declarations); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { for (final SDeclaration declaration : declarations) { - declaration.analyze(settings, definition, variables); + declaration.analyze(variables); } statementCount = declarations.size(); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { for (final SDeclaration declaration : declarations) { - declaration.write(settings, definition, adapter); + declaration.write(adapter); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index 189ddc95936..0e366dfc19b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.Variables.Variable; @@ -38,7 +36,7 @@ public final class SDeclaration extends AStatement { Variable variable; - public SDeclaration(final int line, final String location, final String type, final String name, final AExpression expression) { + public SDeclaration(int line, String location, String type, String name, AExpression expression) { super(line, location); this.type = type; @@ -47,18 +45,18 @@ public final class SDeclaration extends AStatement { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { variable = variables.addVariable(location, type, name, false, false); if (expression != null) { expression.expected = variable.type; - expression.analyze(settings, definition, variables); - expression = expression.cast(settings, definition, variables); + expression.analyze(variables); + expression = expression.cast(variables); } } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); final org.objectweb.asm.Type type = variable.type.type; final Sort sort = variable.type.sort; @@ -66,7 +64,7 @@ public final class SDeclaration extends AStatement { final boolean initialize = expression == null; if (!initialize) { - expression.write(settings, definition, adapter); + expression.write(adapter); } switch (sort) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java index 63d3df4bcee..6c8785a923a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; @@ -32,30 +31,32 @@ public final class SDo extends AStatement { final AStatement block; AExpression condition; + final int maxLoopCounter; - public SDo(final int line, final String location, final AStatement block, final AExpression condition) { + public SDo(int line, String location, AStatement block, AExpression condition, int maxLoopCounter) { super(line, location); this.condition = condition; this.block = block; + this.maxLoopCounter = maxLoopCounter; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { variables.incrementScope(); block.beginLoop = true; block.inLoop = true; - block.analyze(settings, definition, variables); + block.analyze(variables); if (block.loopEscape && !block.anyContinue) { throw new IllegalArgumentException(error("Extraneous do while loop.")); } - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); if (condition.constant != null) { final boolean continuous = (boolean)condition.constant; @@ -72,7 +73,7 @@ public final class SDo extends AStatement { statementCount = 1; - if (settings.getMaxLoopCounter() > 0) { + if (maxLoopCounter > 0) { loopCounterSlot = variables.getVariable(location, "#loop").slot; } @@ -80,7 +81,7 @@ public final class SDo extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); final Label start = new Label(); final Label begin = new Label(); @@ -90,12 +91,12 @@ public final class SDo extends AStatement { block.continu = begin; block.brake = end; - block.write(settings, definition, adapter); + block.write(adapter); adapter.mark(begin); condition.fals = end; - condition.write(settings, definition, adapter); + condition.write(adapter); adapter.writeLoopCounter(loopCounterSlot, Math.max(1, block.statementCount)); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java index dd7cffaa970..9d2bc6385a5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Variables; @@ -32,16 +31,16 @@ public final class SExpression extends AStatement { AExpression expression; - public SExpression(final int line, final String location, final AExpression expression) { + public SExpression(int line, String location, AExpression expression) { super(line, location); this.expression = expression; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { expression.read = lastSource; - expression.analyze(settings, definition, variables); + expression.analyze(variables); if (!lastSource && !expression.statement) { throw new IllegalArgumentException(error("Not a statement.")); @@ -49,8 +48,9 @@ public final class SExpression extends AStatement { final boolean rtn = lastSource && expression.actual.sort != Sort.VOID; - expression.expected = rtn ? definition.objectType : expression.actual; - expression = expression.cast(settings, definition, variables); + expression.expected = rtn ? Definition.OBJECT_TYPE : expression.actual; + expression.internal = rtn; + expression = expression.cast(variables); methodEscape = rtn; loopEscape = rtn; @@ -59,9 +59,9 @@ public final class SExpression extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); - expression.write(settings, definition, adapter); + expression.write(adapter); if (methodEscape) { adapter.returnValue(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java index 43c978b47fa..56073e6e8db 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; @@ -34,31 +33,33 @@ public final class SFor extends AStatement { AExpression condition; AExpression afterthought; final AStatement block; + final int maxLoopCounter; - public SFor(final int line, final String location, - final ANode initializer, final AExpression condition, final AExpression afterthought, final AStatement block) { + public SFor(int line, String location, + ANode initializer, AExpression condition, AExpression afterthought, AStatement block, int maxLoopCounter) { super(line, location); this.initializer = initializer; this.condition = condition; this.afterthought = afterthought; this.block = block; + this.maxLoopCounter = maxLoopCounter; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { variables.incrementScope(); boolean continuous = false; if (initializer != null) { if (initializer instanceof SDeclBlock) { - ((SDeclBlock)initializer).analyze(settings, definition, variables); + ((SDeclBlock)initializer).analyze(variables); } else if (initializer instanceof AExpression) { final AExpression initializer = (AExpression)this.initializer; initializer.read = false; - initializer.analyze(settings, definition, variables); + initializer.analyze(variables); if (!initializer.statement) { throw new IllegalArgumentException(initializer.error("Not a statement.")); @@ -70,9 +71,9 @@ public final class SFor extends AStatement { if (condition != null) { - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); if (condition.constant != null) { continuous = (boolean)condition.constant; @@ -91,7 +92,7 @@ public final class SFor extends AStatement { if (afterthought != null) { afterthought.read = false; - afterthought.analyze(settings, definition, variables); + afterthought.analyze(variables); if (!afterthought.statement) { throw new IllegalArgumentException(afterthought.error("Not a statement.")); @@ -104,7 +105,7 @@ public final class SFor extends AStatement { block.beginLoop = true; block.inLoop = true; - block.analyze(settings, definition, variables); + block.analyze(variables); if (block.loopEscape && !block.anyContinue) { throw new IllegalArgumentException(error("Extraneous for loop.")); @@ -120,7 +121,7 @@ public final class SFor extends AStatement { statementCount = 1; - if (settings.getMaxLoopCounter() > 0) { + if (maxLoopCounter > 0) { loopCounterSlot = variables.getVariable(location, "#loop").slot; } @@ -128,18 +129,18 @@ public final class SFor extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); final Label start = new Label(); final Label begin = afterthought == null ? start : new Label(); final Label end = new Label(); if (initializer instanceof SDeclBlock) { - ((SDeclBlock)initializer).write(settings, definition, adapter); + ((SDeclBlock)initializer).write(adapter); } else if (initializer instanceof AExpression) { AExpression initializer = (AExpression)this.initializer; - initializer.write(settings, definition, adapter); + initializer.write(adapter); adapter.writePop(initializer.expected.sort.size); } @@ -147,7 +148,7 @@ public final class SFor extends AStatement { if (condition != null) { condition.fals = end; - condition.write(settings, definition, adapter); + condition.write(adapter); } boolean allEscape = false; @@ -162,14 +163,14 @@ public final class SFor extends AStatement { } adapter.writeLoopCounter(loopCounterSlot, statementCount); - block.write(settings, definition, adapter); + block.write(adapter); } else { adapter.writeLoopCounter(loopCounterSlot, 1); } if (afterthought != null) { adapter.mark(begin); - afterthought.write(settings, definition, adapter); + afterthought.write(adapter); } if (afterthought != null || !allEscape) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java index 698d8c8126c..560ce44e415 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; @@ -34,8 +33,7 @@ public final class SIfElse extends AStatement { final AStatement ifblock; final AStatement elseblock; - public SIfElse(final int line, final String location, - final AExpression condition, final AStatement ifblock, final AStatement elseblock) { + public SIfElse(int line, String location, AExpression condition, AStatement ifblock, AStatement elseblock) { super(line, location); this.condition = condition; @@ -44,10 +42,10 @@ public final class SIfElse extends AStatement { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + void analyze(Variables variables) { + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); if (condition.constant != null) { throw new IllegalArgumentException(error("Extraneous if statement.")); @@ -58,7 +56,7 @@ public final class SIfElse extends AStatement { ifblock.lastLoop = lastLoop; variables.incrementScope(); - ifblock.analyze(settings, definition, variables); + ifblock.analyze(variables); variables.decrementScope(); anyContinue = ifblock.anyContinue; @@ -71,7 +69,7 @@ public final class SIfElse extends AStatement { elseblock.lastLoop = lastLoop; variables.incrementScope(); - elseblock.analyze(settings, definition, variables); + elseblock.analyze(variables); variables.decrementScope(); methodEscape = ifblock.methodEscape && elseblock.methodEscape; @@ -84,17 +82,17 @@ public final class SIfElse extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); final Label end = new Label(); final Label fals = elseblock != null ? new Label() : end; condition.fals = fals; - condition.write(settings, definition, adapter); + condition.write(adapter); ifblock.continu = continu; ifblock.brake = brake; - ifblock.write(settings, definition, adapter); + ifblock.write(adapter); if (elseblock != null) { if (!ifblock.allEscape) { @@ -105,7 +103,7 @@ public final class SIfElse extends AStatement { elseblock.continu = continu; elseblock.brake = brake; - elseblock.write(settings, definition, adapter); + elseblock.write(adapter); } adapter.mark(end); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java index b959b47a96b..ecdf5c7b69a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -31,17 +30,18 @@ public final class SReturn extends AStatement { AExpression expression; - public SReturn(final int line, final String location, final AExpression expression) { + public SReturn(int line, String location, AExpression expression) { super(line, location); this.expression = expression; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - expression.expected = definition.objectType; - expression.analyze(settings, definition, variables); - expression = expression.cast(settings, definition, variables); + void analyze(Variables variables) { + expression.expected = Definition.OBJECT_TYPE; + expression.internal = true; + expression.analyze(variables); + expression = expression.cast(variables); methodEscape = true; loopEscape = true; @@ -51,9 +51,9 @@ public final class SReturn extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); - expression.write(settings, definition, adapter); + expression.write(adapter); adapter.returnValue(); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index e5a80802ce8..0932ae6e2f0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Opcodes; import org.elasticsearch.painless.MethodWriter; @@ -35,14 +33,14 @@ public final class SSource extends AStatement { final List statements; - public SSource(final int line, final String location, final List statements) { + public SSource(int line, String location, List statements) { super(line, location); this.statements = Collections.unmodifiableList(statements); } @Override - public void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + public void analyze(Variables variables) { variables.incrementScope(); final AStatement last = statements.get(statements.size() - 1); @@ -53,7 +51,7 @@ public final class SSource extends AStatement { } statement.lastSource = statement == last; - statement.analyze(settings, definition, variables); + statement.analyze(variables); methodEscape = statement.methodEscape; allEscape = statement.allEscape; @@ -63,9 +61,9 @@ public final class SSource extends AStatement { } @Override - public void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + public void write(MethodWriter adapter) { for (final AStatement statement : statements) { - statement.write(settings, definition, adapter); + statement.write(adapter); } if (!methodEscape) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java index eac039b998e..a0bfa12e267 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -31,17 +30,17 @@ public final class SThrow extends AStatement { AExpression expression; - public SThrow(final int line, final String location, final AExpression expression) { + public SThrow(int line, String location, AExpression expression) { super(line, location); this.expression = expression; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - expression.expected = definition.exceptionType; - expression.analyze(settings, definition, variables); - expression = expression.cast(settings, definition, variables); + void analyze(Variables variables) { + expression.expected = Definition.EXCEPTION_TYPE; + expression.analyze(variables); + expression = expression.cast(variables); methodEscape = true; loopEscape = true; @@ -50,9 +49,9 @@ public final class SThrow extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); - expression.write(settings, definition, adapter); + expression.write(adapter); adapter.throwException(); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STrap.java index acb50a2b962..9a791b1615d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STrap.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.Variables.Variable; import org.objectweb.asm.Label; @@ -42,7 +40,7 @@ public final class STrap extends AStatement { Label end; Label exception; - public STrap(final int line, final String location, final String type, final String name, final AStatement block) { + public STrap(int line, String location, String type, String name, AStatement block) { super(line, location); this.type = type; @@ -51,7 +49,7 @@ public final class STrap extends AStatement { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { variable = variables.addVariable(location, type, name, true, false); if (!Exception.class.isAssignableFrom(variable.type.clazz)) { @@ -63,7 +61,7 @@ public final class STrap extends AStatement { block.inLoop = inLoop; block.lastLoop = lastLoop; - block.analyze(settings, definition, variables); + block.analyze(variables); methodEscape = block.methodEscape; loopEscape = block.loopEscape; @@ -75,7 +73,7 @@ public final class STrap extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); final Label jump = new Label(); @@ -85,7 +83,7 @@ public final class STrap extends AStatement { if (block != null) { block.continu = continu; block.brake = brake; - block.write(settings, definition, adapter); + block.write(adapter); } adapter.visitTryCatchBlock(begin, end, jump, variable.type.type.getInternalName()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java index 0329826b02a..4f97d5835cc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; import org.elasticsearch.painless.MethodWriter; @@ -36,7 +34,7 @@ public final class STry extends AStatement { final AStatement block; final List traps; - public STry(final int line, final String location, final AStatement block, final List traps) { + public STry(int line, String location, AStatement block, List traps) { super(line, location); this.block = block; @@ -44,13 +42,13 @@ public final class STry extends AStatement { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { block.lastSource = lastSource; block.inLoop = inLoop; block.lastLoop = lastLoop; variables.incrementScope(); - block.analyze(settings, definition, variables); + block.analyze(variables); variables.decrementScope(); methodEscape = block.methodEscape; @@ -67,7 +65,7 @@ public final class STry extends AStatement { trap.lastLoop = lastLoop; variables.incrementScope(); - trap.analyze(settings, definition, variables); + trap.analyze(variables); variables.decrementScope(); methodEscape &= trap.methodEscape; @@ -83,7 +81,7 @@ public final class STry extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); final Label begin = new Label(); final Label end = new Label(); @@ -93,7 +91,7 @@ public final class STry extends AStatement { block.continu = continu; block.brake = brake; - block.write(settings, definition, adapter); + block.write(adapter); if (!block.allEscape) { adapter.goTo(exception); @@ -105,7 +103,7 @@ public final class STry extends AStatement { trap.begin = begin; trap.end = end; trap.exception = traps.size() > 1 ? exception : null; - trap.write(settings, definition, adapter); + trap.write(adapter); } if (!block.allEscape || traps.size() > 1) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java index f6f8ddc678f..48f59c55189 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; @@ -32,21 +31,23 @@ public final class SWhile extends AStatement { AExpression condition; final AStatement block; + final int maxLoopCounter; - public SWhile(final int line, final String location, final AExpression condition, final AStatement block) { + public SWhile(int line, String location, AExpression condition, AStatement block, int maxLoopCounter) { super(line, location); this.condition = condition; this.block = block; + this.maxLoopCounter = maxLoopCounter; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { variables.incrementScope(); - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); boolean continuous = false; @@ -68,7 +69,7 @@ public final class SWhile extends AStatement { block.beginLoop = true; block.inLoop = true; - block.analyze(settings, definition, variables); + block.analyze(variables); if (block.loopEscape && !block.anyContinue) { throw new IllegalArgumentException(error("Extranous while loop.")); @@ -84,7 +85,7 @@ public final class SWhile extends AStatement { statementCount = 1; - if (settings.getMaxLoopCounter() > 0) { + if (maxLoopCounter > 0) { loopCounterSlot = variables.getVariable(location, "#loop").slot; } @@ -92,7 +93,7 @@ public final class SWhile extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { writeDebugInfo(adapter); final Label begin = new Label(); final Label end = new Label(); @@ -100,14 +101,14 @@ public final class SWhile extends AStatement { adapter.mark(begin); condition.fals = end; - condition.write(settings, definition, adapter); + condition.write(adapter); if (block != null) { adapter.writeLoopCounter(loopCounterSlot, Math.max(1, block.statementCount)); block.continu = begin; block.brake = end; - block.write(settings, definition, adapter); + block.write(adapter); } else { adapter.writeLoopCounter(loopCounterSlot, 1); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java index ab6944619ca..0d4e993b39b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java @@ -22,6 +22,7 @@ *

* The following are the types of nodes: * A* (abstract) - These are the abstract nodes that are the superclasses for the other types. + * I* (interface) -- Thse are marker interfaces to denote a property of the node. * S* (statement) - These are nodes that represent a statement in Painless. These are the highest level nodes. * E* (expression) - These are nodess that represent an expression in Painless. These are the middle level nodes. * L* (link) - These are nodes that respresent a piece of a variable/method chain. The are the lowest level nodes. diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/definition.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/definition.txt new file mode 100644 index 00000000000..379fadb9865 --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/definition.txt @@ -0,0 +1,375 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# Painless definition file. This defines the hierarchy of classes, +# what methods and fields they have, etc. +# + +# primitive types + +class void -> void { +} + +class boolean -> boolean { +} + +class byte -> byte { +} + +class short -> short { +} + +class char -> char { +} + +class int -> int { +} + +class long -> long { +} + +class float -> float { +} + +class double -> double { +} + +# basic JDK classes + +class Object -> java.lang.Object { + boolean equals(Object) + int hashCode() + String toString() +} + +class def -> java.lang.Object { + boolean equals(Object) + int hashCode() + String toString() +} + +class Void -> java.lang.Void extends Object { +} + +class Boolean -> java.lang.Boolean extends Object { + Boolean TRUE + Boolean FALSE + int compare(boolean,boolean) + boolean parseBoolean(String) + Boolean valueOf(boolean) + boolean booleanValue() + int compareTo(Boolean) +} + +class Byte -> java.lang.Byte extends Number,Object { + byte MIN_VALUE + byte MAX_VALUE + int compare(byte,byte) + int compareTo(Byte) + byte parseByte(String) + Byte valueOf(byte) +} + +class Short -> java.lang.Short extends Number,Object { + short MIN_VALUE + short MAX_VALUE + int compare(short,short) + int compareTo(Short) + short parseShort(String) + Short valueOf(short) +} + +class Character -> java.lang.Character extends Object { + char MIN_VALUE + char MAX_VALUE + int charCount(int) + char charValue() + int compare(char,char) + int compareTo(Character) + int digit(int,int) + char forDigit(int,int) + String getName(int) + int getNumericValue(int) + boolean isAlphabetic(int) + boolean isDefined(int) + boolean isDigit(int) + boolean isIdeographic(int) + boolean isLetter(int) + boolean isLetterOrDigit(int) + boolean isLowerCase(int) + boolean isMirrored(int) + boolean isSpaceChar(int) + boolean isTitleCase(int) + boolean isUpperCase(int) + boolean isWhitespace(int) + Character valueOf(char) +} + +class Integer -> java.lang.Integer extends Number,Object { + int MIN_VALUE + int MAX_VALUE + int compare(int,int) + int compareTo(Integer) + int min(int,int) + int max(int,int) + int parseInt(String) + int signum(int) + String toHexString(int) + Integer valueOf(int) +} + +class Long -> java.lang.Long extends Number,Object { + long MIN_VALUE + long MAX_VALUE + int compare(long,long) + int compareTo(Long) + long min(long,long) + long max(long,long) + long parseLong(String) + int signum(long) + String toHexString(long) + Long valueOf(long) +} + +class Float -> java.lang.Float extends Number,Object { + float MIN_VALUE + float MAX_VALUE + int compare(float,float) + int compareTo(Float) + float min(float,float) + float max(float,float) + float parseFloat(String) + String toHexString(float) + Float valueOf(float) +} + +class Double -> java.lang.Double extends Number,Object { + double MIN_VALUE + double MAX_VALUE + int compare(double,double) + int compareTo(Double) + double min(double,double) + double max(double,double) + double parseDouble(String) + String toHexString(double) + Double valueOf(double) +} + +class Number -> java.lang.Number extends Object { + byte byteValue() + short shortValue() + int intValue() + long longValue() + float floatValue() + double doubleValue() +} + +class CharSequence -> java.lang.CharSequence extends Object { + char charAt(int) + int length() +} + +class String -> java.lang.String extends CharSequence,Object { + String () + int codePointAt(int) + int compareTo(String) + String concat(String) + boolean endsWith(String) + int indexOf(String) + int indexOf(String,int) + boolean isEmpty() + String replace(CharSequence,CharSequence) + boolean startsWith(String) + String substring(int,int) + char[] toCharArray() + String trim() +} + +class Math -> java.lang.Math { + double E + double PI + double abs(double) + double acos(double) + double asin(double) + double atan(double) + double atan2(double,double) + double cbrt(double) + double ceil(double) + double cos(double) + double cosh(double) + double exp(double) + double expm1(double) + double floor(double) + double hypot(double,double) + double log(double) + double log10(double) + double log1p(double) + double max(double,double) + double min(double,double) + double pow(double,double) + double random() + double rint(double) + long round(double) + double sin(double) + double sinh(double) + double sqrt(double) + double tan(double) + double tanh(double) + double toDegrees(double) + double toRadians(double) +} + +class Iterator -> java.util.Iterator extends Object { + boolean hasNext() + def next() + void remove() +} + +class Collection -> java.util.Collection extends Object { + boolean add(def) + void clear() + boolean contains(def) + boolean isEmpty() + Iterator iterator() + boolean remove(def) + int size() +} + +class List -> java.util.List extends Collection,Object { + def set(int,def) + def get(int) + def remove(int) + int getLength/size() +} + +class ArrayList -> java.util.ArrayList extends List,Collection,Object { + ArrayList () +} + +class Set -> java.util.Set extends Collection,Object { +} + +class HashSet -> java.util.HashSet extends Set,Collection,Object { + HashSet () +} + +class Map -> java.util.Map extends Object { + def put(def,def) + def get(def) + def remove(def) + boolean isEmpty() + int size() + boolean containsKey(def) + Set keySet() + Collection values() +} + +class HashMap -> java.util.HashMap extends Map,Object { + HashMap () +} + +class Exception -> java.lang.Exception extends Object { + String getMessage() +} + +class ArithmeticException -> java.lang.ArithmeticException extends Exception,Object { + ArithmeticException () +} + +class IllegalArgumentException -> java.lang.IllegalArgumentException extends Exception,Object { + IllegalArgumentException () +} + +class IllegalStateException -> java.lang.IllegalStateException extends Exception,Object { + IllegalStateException () +} + +class NumberFormatException -> java.lang.NumberFormatException extends Exception,Object { + NumberFormatException () +} + +# ES Scripting API + +class GeoPoint -> org.elasticsearch.common.geo.GeoPoint extends Object { + double getLat() + double getLon() +} + +class Strings -> org.elasticsearch.index.fielddata.ScriptDocValues$Strings extends List,Collection,Object { + String getValue() + List getValues() +} + +class Longs -> org.elasticsearch.index.fielddata.ScriptDocValues$Longs extends List,Collection,Object { + long getValue() + List getValues() +} + +class Doubles -> org.elasticsearch.index.fielddata.ScriptDocValues$Doubles extends List,Collection,Object { + double getValue() + List getValues() +} + +class GeoPoints -> org.elasticsearch.index.fielddata.ScriptDocValues$GeoPoints extends List,Collection,Object { + GeoPoint getValue() + List getValues() + double getLat() + double getLon() + double[] getLats() + double[] getLons() + + # geo distance functions... so many... + double factorDistance(double,double) + double factorDistanceWithDefault(double,double,double) + double factorDistance02(double,double) + double factorDistance13(double,double) + double arcDistance(double,double) + double arcDistanceWithDefault(double,double,double) + double arcDistanceInKm(double,double) + double arcDistanceInKmWithDefault(double,double,double) + double arcDistanceInMiles(double,double) + double arcDistanceInMilesWithDefault(double,double,double) + double distance(double,double) + double distanceWithDefault(double,double,double) + double distanceInKm(double,double) + double distanceInKmWithDefault(double,double,double) + double distanceInMiles(double,double) + double distanceInMilesWithDefault(double,double,double) + double geohashDistance(String) + double geohashDistanceInKm(String) + double geohashDistanceInMiles(String) +} + +# for testing. +# currently FeatureTest exposes overloaded constructor, field load store, and overloaded static methods +class FeatureTest -> org.elasticsearch.painless.FeatureTest extends Object { + FeatureTest () + FeatureTest (int,int) + int getX() + int getY() + void setX(int) + void setY(int) + boolean overloadedStatic() + boolean overloadedStatic(boolean) +} + +# currently needed internally +class Executable -> org.elasticsearch.painless.Executable { +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java index 2036c4fd04c..8dabacdd5f9 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java @@ -34,7 +34,7 @@ public class ArrayTests extends ScriptTestCase { assertArrayLength(10, new Integer[10]); assertArrayLength(11, new String[11][2]); } - + private void assertArrayLength(int length, Object array) throws Throwable { assertEquals(length, (int) Def.arrayLengthGetter(array.getClass()).invoke(array)); } @@ -43,36 +43,36 @@ public class ArrayTests extends ScriptTestCase { assertEquals(5, exec("def x = new int[5]; return x.length")); assertEquals(5, exec("def x = new int[4]; x[0] = 5; return x[0];")); } - + public void testArrayLoadStoreString() { assertEquals(5, exec("def x = new String[5]; return x.length")); assertEquals("foobar", exec("def x = new String[4]; x[0] = 'foobar'; return x[0];")); } - + public void testArrayLoadStoreDef() { assertEquals(5, exec("def x = new def[5]; return x.length")); assertEquals(5, exec("def x = new def[4]; x[0] = 5; return x[0];")); } - + public void testArrayCompoundInt() { assertEquals(6, exec("int[] x = new int[5]; x[0] = 5; x[0]++; return x[0];")); } - + public void testArrayCompoundDef() { assertEquals(6, exec("def x = new int[5]; x[0] = 5; x[0]++; return x[0];")); } - + public void testJacksCrazyExpression1() { assertEquals(1, exec("int x; def[] y = new def[1]; x = y[0] = 1; return x;")); } - + public void testJacksCrazyExpression2() { assertEquals(1, exec("int x; def y = new def[1]; x = y[0] = 1; return x;")); } - + public void testForLoop() { assertEquals(999*1000/2, exec("def a = new int[1000]; for (int x = 0; x < a.length; x++) { a[x] = x; } "+ "int total = 0; for (int x = 0; x < a.length; x++) { total += a[x]; } return total;")); } - + } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java index af067d15252..338e3f00113 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java @@ -24,10 +24,8 @@ public class BasicAPITests extends ScriptTestCase { public void testListIterator() { assertEquals(3, exec("List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + "int total = 0; while (y.hasNext()) total += y.next(); return total;")); - assertEquals(3, exec("List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + - "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); - assertEquals("abc", exec("List x = new ArrayList(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + - "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); + assertEquals("abc", exec("List x = new ArrayList(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); assertEquals(3, exec("def x = new ArrayList(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + "def total = 0; while (y.hasNext()) total += y.next(); return total;")); } @@ -35,10 +33,8 @@ public class BasicAPITests extends ScriptTestCase { public void testSetIterator() { assertEquals(3, exec("Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + "int total = 0; while (y.hasNext()) total += y.next(); return total;")); - assertEquals(3, exec("Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + - "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); - assertEquals("abc", exec("Set x = new HashSet(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + - "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); + assertEquals("abc", exec("Set x = new HashSet(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); assertEquals(3, exec("def x = new HashSet(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + "def total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); } @@ -49,41 +45,49 @@ public class BasicAPITests extends ScriptTestCase { assertEquals(3, exec("Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.values().iterator(); " + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); } - + /** Test loads and stores with a map */ public void testMapLoadStore() { assertEquals(5, exec("def x = new HashMap(); x.abc = 5; return x.abc;")); assertEquals(5, exec("def x = new HashMap(); x['abc'] = 5; return x['abc'];")); } - + /** Test loads and stores with a list */ public void testListLoadStore() { assertEquals(5, exec("def x = new ArrayList(); x.add(3); x.0 = 5; return x.0;")); assertEquals(5, exec("def x = new ArrayList(); x.add(3); x[0] = 5; return x[0];")); } - + /** Test shortcut for getters with isXXXX */ public void testListEmpty() { assertEquals(true, exec("def x = new ArrayList(); return x.empty;")); assertEquals(true, exec("def x = new HashMap(); return x.empty;")); } - + /** Test list method invocation */ public void testListGet() { assertEquals(5, exec("def x = new ArrayList(); x.add(5); return x.get(0);")); assertEquals(5, exec("def x = new ArrayList(); x.add(5); def index = 0; return x.get(index);")); } - + public void testListAsArray() { assertEquals(1, exec("def x = new ArrayList(); x.add(5); return x.length")); assertEquals(5, exec("def x = new ArrayList(); x.add(5); return x[0]")); assertEquals(1, exec("List x = new ArrayList(); x.add('Hallo'); return x.length")); - assertEquals(1, exec("List x = new ArrayList(); x.add('Hallo'); return x.length")); - assertEquals(1, exec("List x = new ArrayList(); x.add('Hallo'); return x.length")); } - + public void testDefAssignments() { assertEquals(2, exec("int x; def y = 2.0; x = (int)y;")); } - + + public void testInternalBoxing() { + assertBytecodeExists("def x = true", "INVOKESTATIC java/lang/Boolean.valueOf (Z)Ljava/lang/Boolean;"); + assertBytecodeExists("def x = (byte)1", "INVOKESTATIC java/lang/Byte.valueOf (B)Ljava/lang/Byte;"); + assertBytecodeExists("def x = (short)1", "INVOKESTATIC java/lang/Short.valueOf (S)Ljava/lang/Short;"); + assertBytecodeExists("def x = (char)1", "INVOKESTATIC java/lang/Character.valueOf (C)Ljava/lang/Character;"); + assertBytecodeExists("def x = 1", "INVOKESTATIC java/lang/Integer.valueOf (I)Ljava/lang/Integer;"); + assertBytecodeExists("def x = 1L", "INVOKESTATIC java/lang/Long.valueOf (J)Ljava/lang/Long;"); + assertBytecodeExists("def x = 1F", "INVOKESTATIC java/lang/Float.valueOf (F)Ljava/lang/Float;"); + assertBytecodeExists("def x = 1D", "INVOKESTATIC java/lang/Double.valueOf (D)Ljava/lang/Double;"); + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java index 2fb676bf299..2a8f3674ac1 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java @@ -96,15 +96,15 @@ public class BasicExpressionTests extends ScriptTestCase { } /** - * Test boxed objects in various places + * Test boxed def objects in various places */ public void testBoxing() { // return assertEquals(4, exec("return params.get(\"x\");", Collections.singletonMap("x", 4))); // assignment - assertEquals(4, exec("int y = (Integer)params.get(\"x\"); return y;", Collections.singletonMap("x", 4))); + assertEquals(4, exec("int y = params.get(\"x\"); return y;", Collections.singletonMap("x", 4))); // comparison - assertEquals(true, exec("return 5 > (Integer)params.get(\"x\");", Collections.singletonMap("x", 4))); + assertEquals(true, exec("return 5 > params.get(\"x\");", Collections.singletonMap("x", 4))); } public void testBool() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java index f0022e6bcf1..0d6a54b515b 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java @@ -135,7 +135,7 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(2.0, exec("double a = 2; return a;")); assertEquals(false, exec("boolean a = false; return a;")); assertEquals("string", exec("String a = \"string\"; return a;")); - assertEquals(HashMap.class, exec("Map a = new HashMap(); return a;").getClass()); + assertEquals(HashMap.class, exec("Map a = new HashMap(); return a;").getClass()); assertEquals(byte[].class, exec("byte[] a = new byte[1]; return a;").getClass()); assertEquals(short[].class, exec("short[] a = new short[1]; return a;").getClass()); @@ -146,7 +146,7 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(double[].class, exec("double[] a = new double[1]; return a;").getClass()); assertEquals(boolean[].class, exec("boolean[] a = new boolean[1]; return a;").getClass()); assertEquals(String[].class, exec("String[] a = new String[1]; return a;").getClass()); - assertEquals(Map[].class, exec("Map[] a = new Map[1]; return a;").getClass()); + assertEquals(Map[].class, exec("Map[] a = new Map[1]; return a;").getClass()); assertEquals(byte[][].class, exec("byte[][] a = new byte[1][2]; return a;").getClass()); assertEquals(short[][][].class, exec("short[][][] a = new short[1][2][3]; return a;").getClass()); @@ -157,7 +157,7 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(double[][][][].class, exec("double[][][][] a = new double[1][2][3][4]; return a;").getClass()); assertEquals(boolean[][][][][].class, exec("boolean[][][][][] a = new boolean[1][2][3][4][5]; return a;").getClass()); assertEquals(String[][].class, exec("String[][] a = new String[1][2]; return a;").getClass()); - assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3]; return a;").getClass()); + assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3]; return a;").getClass()); } public void testContinueStatement() { @@ -174,6 +174,6 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(5, exec("int x = 5; return x;")); assertEquals(4, exec("int[] x = new int[2]; x[1] = 4; return x[1];")); assertEquals(5, ((short[])exec("short[] s = new short[3]; s[1] = 5; return s;"))[1]); - assertEquals(10, ((Map)exec("Map s = new HashMap< String , Object >(); s.put(\"x\", 10); return s;")).get("x")); + assertEquals(10, ((Map)exec("Map s = new HashMap(); s.put(\"x\", 10); return s;")).get("x")); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java index d54b976d65d..03593116538 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java @@ -230,18 +230,18 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(false, exec("boolean x = true; x &= false; return x;")); assertEquals(false, exec("boolean x = false; x &= true; return x;")); assertEquals(false, exec("boolean x = false; x &= false; return x;")); - assertEquals(true, exec("Boolean x = true; x &= true; return x;")); - assertEquals(false, exec("Boolean x = true; x &= false; return x;")); - assertEquals(false, exec("Boolean x = false; x &= true; return x;")); - assertEquals(false, exec("Boolean x = false; x &= false; return x;")); + assertEquals(true, exec("def x = true; x &= true; return x;")); + assertEquals(false, exec("def x = true; x &= false; return x;")); + assertEquals(false, exec("def x = false; x &= true; return x;")); + assertEquals(false, exec("def x = false; x &= false; return x;")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] &= true; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] &= false; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] &= true; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] &= false; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] &= true; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] &= false; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= true; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= false; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = true; x[0] &= true; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = true; x[0] &= false; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = false; x[0] &= true; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = false; x[0] &= false; return x[0];")); // byte assertEquals((byte) (13 & 14), exec("byte x = 13; x &= 14; return x;")); @@ -261,18 +261,18 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(true, exec("boolean x = true; x |= false; return x;")); assertEquals(true, exec("boolean x = false; x |= true; return x;")); assertEquals(false, exec("boolean x = false; x |= false; return x;")); - assertEquals(true, exec("Boolean x = true; x |= true; return x;")); - assertEquals(true, exec("Boolean x = true; x |= false; return x;")); - assertEquals(true, exec("Boolean x = false; x |= true; return x;")); - assertEquals(false, exec("Boolean x = false; x |= false; return x;")); + assertEquals(true, exec("def x = true; x |= true; return x;")); + assertEquals(true, exec("def x = true; x |= false; return x;")); + assertEquals(true, exec("def x = false; x |= true; return x;")); + assertEquals(false, exec("def x = false; x |= false; return x;")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] |= true; return x[0];")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] |= false; return x[0];")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] |= true; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] |= false; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] |= true; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] |= false; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= true; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= false; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = true; x[0] |= true; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = true; x[0] |= false; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = false; x[0] |= true; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = false; x[0] |= false; return x[0];")); // byte assertEquals((byte) (13 | 14), exec("byte x = 13; x |= 14; return x;")); @@ -292,18 +292,18 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(true, exec("boolean x = true; x ^= false; return x;")); assertEquals(true, exec("boolean x = false; x ^= true; return x;")); assertEquals(false, exec("boolean x = false; x ^= false; return x;")); - assertEquals(false, exec("Boolean x = true; x ^= true; return x;")); - assertEquals(true, exec("Boolean x = true; x ^= false; return x;")); - assertEquals(true, exec("Boolean x = false; x ^= true; return x;")); - assertEquals(false, exec("Boolean x = false; x ^= false; return x;")); + assertEquals(false, exec("def x = true; x ^= true; return x;")); + assertEquals(true, exec("def x = true; x ^= false; return x;")); + assertEquals(true, exec("def x = false; x ^= true; return x;")); + assertEquals(false, exec("def x = false; x ^= false; return x;")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= true; return x[0];")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= false; return x[0];")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= true; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= false; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] ^= true; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] ^= false; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= true; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= false; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = true; x[0] ^= true; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = true; x[0] ^= false; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = false; x[0] ^= true; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = false; x[0] ^= false; return x[0];")); // byte assertEquals((byte) (13 ^ 14), exec("byte x = 13; x ^= 14; return x;")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java index 859825f129a..a3a09fee425 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java @@ -65,10 +65,8 @@ public class ConditionalTests extends ScriptTestCase { public void testPromotion() { assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? 2 : 4.0F);")); - assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new Long(2) : new Float(4.0F));")); assertEquals(false, exec("boolean x = false; boolean y = true; " + - "return (x ? new HashMap() : new ArrayList()) == (y ? new Long(2) : new Float(4.0F));")); - assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new HashMap() : new ArrayList());")); + "return (x ? new HashMap() : new ArrayList()) == (y ? new HashMap() : new ArrayList());")); } public void testIncompatibleAssignment() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefOperationTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefOperationTests.java index 58b37469f1b..9f171a96889 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefOperationTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefOperationTests.java @@ -22,10 +22,10 @@ package org.elasticsearch.painless; public class DefOperationTests extends ScriptTestCase { public void testIllegalCast() { Exception exception = expectThrows(ClassCastException.class, () -> exec("def x = 1.0; int y = x; return y;")); - assertTrue(exception.getMessage(), exception.getMessage().contains("cannot be cast to java.lang.Integer")); + assertTrue(exception.getMessage().contains("cannot be cast")); exception = expectThrows(ClassCastException.class, () -> exec("def x = (short)1; byte y = x; return y;")); - assertTrue(exception.getMessage(), exception.getMessage().contains("cannot be cast to java.lang.Byte")); + assertTrue(exception.getMessage().contains("cannot be cast")); } public void testNot() { @@ -103,13 +103,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(4D, exec("def x = (float)2; def y = (double)2; return x * y")); assertEquals(4D, exec("def x = (double)2; def y = (double)2; return x * y")); - assertEquals(4, exec("def x = (Byte)2; def y = (byte)2; return x * y")); - assertEquals(4, exec("def x = (Short)2; def y = (short)2; return x * y")); - assertEquals(4, exec("def x = (Character)2; def y = (char)2; return x * y")); - assertEquals(4, exec("def x = (Integer)2; def y = (int)2; return x * y")); - assertEquals(4L, exec("def x = (Long)2; def y = (long)2; return x * y")); - assertEquals(4F, exec("def x = (Float)2; def y = (float)2; return x * y")); - assertEquals(4D, exec("def x = (Double)2; def y = (double)2; return x * y")); + assertEquals(4, exec("def x = (byte)2; def y = (byte)2; return x * y")); + assertEquals(4, exec("def x = (short)2; def y = (short)2; return x * y")); + assertEquals(4, exec("def x = (char)2; def y = (char)2; return x * y")); + assertEquals(4, exec("def x = (int)2; def y = (int)2; return x * y")); + assertEquals(4L, exec("def x = (long)2; def y = (long)2; return x * y")); + assertEquals(4F, exec("def x = (float)2; def y = (float)2; return x * y")); + assertEquals(4D, exec("def x = (double)2; def y = (double)2; return x * y")); } public void testDiv() { @@ -169,13 +169,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(1D, exec("def x = (float)2; def y = (double)2; return x / y")); assertEquals(1D, exec("def x = (double)2; def y = (double)2; return x / y")); - assertEquals(1, exec("def x = (Byte)2; def y = (byte)2; return x / y")); - assertEquals(1, exec("def x = (Short)2; def y = (short)2; return x / y")); - assertEquals(1, exec("def x = (Character)2; def y = (char)2; return x / y")); - assertEquals(1, exec("def x = (Integer)2; def y = (int)2; return x / y")); - assertEquals(1L, exec("def x = (Long)2; def y = (long)2; return x / y")); - assertEquals(1F, exec("def x = (Float)2; def y = (float)2; return x / y")); - assertEquals(1D, exec("def x = (Double)2; def y = (double)2; return x / y")); + assertEquals(1, exec("def x = (byte)2; def y = (byte)2; return x / y")); + assertEquals(1, exec("def x = (short)2; def y = (short)2; return x / y")); + assertEquals(1, exec("def x = (char)2; def y = (char)2; return x / y")); + assertEquals(1, exec("def x = (int)2; def y = (int)2; return x / y")); + assertEquals(1L, exec("def x = (long)2; def y = (long)2; return x / y")); + assertEquals(1F, exec("def x = (float)2; def y = (float)2; return x / y")); + assertEquals(1D, exec("def x = (double)2; def y = (double)2; return x / y")); } public void testRem() { @@ -235,13 +235,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(0D, exec("def x = (float)2; def y = (double)2; return x % y")); assertEquals(0D, exec("def x = (double)2; def y = (double)2; return x % y")); - assertEquals(0, exec("def x = (Byte)2; def y = (byte)2; return x % y")); - assertEquals(0, exec("def x = (Short)2; def y = (short)2; return x % y")); - assertEquals(0, exec("def x = (Character)2; def y = (char)2; return x % y")); - assertEquals(0, exec("def x = (Integer)2; def y = (int)2; return x % y")); - assertEquals(0L, exec("def x = (Long)2; def y = (long)2; return x % y")); - assertEquals(0F, exec("def x = (Float)2; def y = (float)2; return x % y")); - assertEquals(0D, exec("def x = (Double)2; def y = (double)2; return x % y")); + assertEquals(0, exec("def x = (byte)2; def y = (byte)2; return x % y")); + assertEquals(0, exec("def x = (short)2; def y = (short)2; return x % y")); + assertEquals(0, exec("def x = (char)2; def y = (char)2; return x % y")); + assertEquals(0, exec("def x = (int)2; def y = (int)2; return x % y")); + assertEquals(0L, exec("def x = (long)2; def y = (long)2; return x % y")); + assertEquals(0F, exec("def x = (float)2; def y = (float)2; return x % y")); + assertEquals(0D, exec("def x = (double)2; def y = (double)2; return x % y")); } public void testAdd() { @@ -301,13 +301,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(2D, exec("def x = (float)1; def y = (double)1; return x + y")); assertEquals(2D, exec("def x = (double)1; def y = (double)1; return x + y")); - assertEquals(2, exec("def x = (Byte)1; def y = (byte)1; return x + y")); - assertEquals(2, exec("def x = (Short)1; def y = (short)1; return x + y")); - assertEquals(2, exec("def x = (Character)1; def y = (char)1; return x + y")); - assertEquals(2, exec("def x = (Integer)1; def y = (int)1; return x + y")); - assertEquals(2L, exec("def x = (Long)1; def y = (long)1; return x + y")); - assertEquals(2F, exec("def x = (Float)1; def y = (float)1; return x + y")); - assertEquals(2D, exec("def x = (Double)1; def y = (double)1; return x + y")); + assertEquals(2, exec("def x = (byte)1; def y = (byte)1; return x + y")); + assertEquals(2, exec("def x = (short)1; def y = (short)1; return x + y")); + assertEquals(2, exec("def x = (char)1; def y = (char)1; return x + y")); + assertEquals(2, exec("def x = (int)1; def y = (int)1; return x + y")); + assertEquals(2L, exec("def x = (long)1; def y = (long)1; return x + y")); + assertEquals(2F, exec("def x = (float)1; def y = (float)1; return x + y")); + assertEquals(2D, exec("def x = (double)1; def y = (double)1; return x + y")); } public void testSub() { @@ -367,13 +367,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(0D, exec("def x = (float)1; def y = (double)1; return x - y")); assertEquals(0D, exec("def x = (double)1; def y = (double)1; return x - y")); - assertEquals(0, exec("def x = (Byte)1; def y = (byte)1; return x - y")); - assertEquals(0, exec("def x = (Short)1; def y = (short)1; return x - y")); - assertEquals(0, exec("def x = (Character)1; def y = (char)1; return x - y")); - assertEquals(0, exec("def x = (Integer)1; def y = (int)1; return x - y")); - assertEquals(0L, exec("def x = (Long)1; def y = (long)1; return x - y")); - assertEquals(0F, exec("def x = (Float)1; def y = (float)1; return x - y")); - assertEquals(0D, exec("def x = (Double)1; def y = (double)1; return x - y")); + assertEquals(0, exec("def x = (byte)1; def y = (byte)1; return x - y")); + assertEquals(0, exec("def x = (short)1; def y = (short)1; return x - y")); + assertEquals(0, exec("def x = (char)1; def y = (char)1; return x - y")); + assertEquals(0, exec("def x = (int)1; def y = (int)1; return x - y")); + assertEquals(0L, exec("def x = (long)1; def y = (long)1; return x - y")); + assertEquals(0F, exec("def x = (float)1; def y = (float)1; return x - y")); + assertEquals(0D, exec("def x = (double)1; def y = (double)1; return x - y")); } public void testLsh() { @@ -433,13 +433,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(2L, exec("def x = (float)1; def y = (double)1; return x << y")); assertEquals(2L, exec("def x = (double)1; def y = (double)1; return x << y")); - assertEquals(2, exec("def x = (Byte)1; def y = (byte)1; return x << y")); - assertEquals(2, exec("def x = (Short)1; def y = (short)1; return x << y")); - assertEquals(2, exec("def x = (Character)1; def y = (char)1; return x << y")); - assertEquals(2, exec("def x = (Integer)1; def y = (int)1; return x << y")); - assertEquals(2L, exec("def x = (Long)1; def y = (long)1; return x << y")); - assertEquals(2L, exec("def x = (Float)1; def y = (float)1; return x << y")); - assertEquals(2L, exec("def x = (Double)1; def y = (double)1; return x << y")); + assertEquals(2, exec("def x = (byte)1; def y = (byte)1; return x << y")); + assertEquals(2, exec("def x = (short)1; def y = (short)1; return x << y")); + assertEquals(2, exec("def x = (char)1; def y = (char)1; return x << y")); + assertEquals(2, exec("def x = (int)1; def y = (int)1; return x << y")); + assertEquals(2L, exec("def x = (long)1; def y = (long)1; return x << y")); + assertEquals(2L, exec("def x = (float)1; def y = (float)1; return x << y")); + assertEquals(2L, exec("def x = (double)1; def y = (double)1; return x << y")); } public void testRsh() { @@ -499,13 +499,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(2L, exec("def x = (float)4; def y = (double)1; return x >> y")); assertEquals(2L, exec("def x = (double)4; def y = (double)1; return x >> y")); - assertEquals(2, exec("def x = (Byte)4; def y = (byte)1; return x >> y")); - assertEquals(2, exec("def x = (Short)4; def y = (short)1; return x >> y")); - assertEquals(2, exec("def x = (Character)4; def y = (char)1; return x >> y")); - assertEquals(2, exec("def x = (Integer)4; def y = (int)1; return x >> y")); - assertEquals(2L, exec("def x = (Long)4; def y = (long)1; return x >> y")); - assertEquals(2L, exec("def x = (Float)4; def y = (float)1; return x >> y")); - assertEquals(2L, exec("def x = (Double)4; def y = (double)1; return x >> y")); + assertEquals(2, exec("def x = (byte)4; def y = (byte)1; return x >> y")); + assertEquals(2, exec("def x = (short)4; def y = (short)1; return x >> y")); + assertEquals(2, exec("def x = (char)4; def y = (char)1; return x >> y")); + assertEquals(2, exec("def x = (int)4; def y = (int)1; return x >> y")); + assertEquals(2L, exec("def x = (long)4; def y = (long)1; return x >> y")); + assertEquals(2L, exec("def x = (float)4; def y = (float)1; return x >> y")); + assertEquals(2L, exec("def x = (double)4; def y = (double)1; return x >> y")); } public void testUsh() { @@ -565,13 +565,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(2L, exec("def x = (float)4; def y = (double)1; return x >>> y")); assertEquals(2L, exec("def x = (double)4; def y = (double)1; return x >>> y")); - assertEquals(2, exec("def x = (Byte)4; def y = (byte)1; return x >>> y")); - assertEquals(2, exec("def x = (Short)4; def y = (short)1; return x >>> y")); - assertEquals(2, exec("def x = (Character)4; def y = (char)1; return x >>> y")); - assertEquals(2, exec("def x = (Integer)4; def y = (int)1; return x >>> y")); - assertEquals(2L, exec("def x = (Long)4; def y = (long)1; return x >>> y")); - assertEquals(2L, exec("def x = (Float)4; def y = (float)1; return x >>> y")); - assertEquals(2L, exec("def x = (Double)4; def y = (double)1; return x >>> y")); + assertEquals(2, exec("def x = (byte)4; def y = (byte)1; return x >>> y")); + assertEquals(2, exec("def x = (short)4; def y = (short)1; return x >>> y")); + assertEquals(2, exec("def x = (char)4; def y = (char)1; return x >>> y")); + assertEquals(2, exec("def x = (int)4; def y = (int)1; return x >>> y")); + assertEquals(2L, exec("def x = (long)4; def y = (long)1; return x >>> y")); + assertEquals(2L, exec("def x = (float)4; def y = (float)1; return x >>> y")); + assertEquals(2L, exec("def x = (double)4; def y = (double)1; return x >>> y")); } public void testAnd() { @@ -631,13 +631,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(0L, exec("def x = (float)4; def y = (double)1; return x & y")); assertEquals(0L, exec("def x = (double)4; def y = (double)1; return x & y")); - assertEquals(0, exec("def x = (Byte)4; def y = (byte)1; return x & y")); - assertEquals(0, exec("def x = (Short)4; def y = (short)1; return x & y")); - assertEquals(0, exec("def x = (Character)4; def y = (char)1; return x & y")); - assertEquals(0, exec("def x = (Integer)4; def y = (int)1; return x & y")); - assertEquals(0L, exec("def x = (Long)4; def y = (long)1; return x & y")); - assertEquals(0L, exec("def x = (Float)4; def y = (float)1; return x & y")); - assertEquals(0L, exec("def x = (Double)4; def y = (double)1; return x & y")); + assertEquals(0, exec("def x = (byte)4; def y = (byte)1; return x & y")); + assertEquals(0, exec("def x = (short)4; def y = (short)1; return x & y")); + assertEquals(0, exec("def x = (char)4; def y = (char)1; return x & y")); + assertEquals(0, exec("def x = (int)4; def y = (int)1; return x & y")); + assertEquals(0L, exec("def x = (long)4; def y = (long)1; return x & y")); + assertEquals(0L, exec("def x = (float)4; def y = (float)1; return x & y")); + assertEquals(0L, exec("def x = (double)4; def y = (double)1; return x & y")); } public void testXor() { @@ -697,13 +697,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(5L, exec("def x = (float)4; def y = (double)1; return x ^ y")); assertEquals(5L, exec("def x = (double)4; def y = (double)1; return x ^ y")); - assertEquals(5, exec("def x = (Byte)4; def y = (byte)1; return x ^ y")); - assertEquals(5, exec("def x = (Short)4; def y = (short)1; return x ^ y")); - assertEquals(5, exec("def x = (Character)4; def y = (char)1; return x ^ y")); - assertEquals(5, exec("def x = (Integer)4; def y = (int)1; return x ^ y")); - assertEquals(5L, exec("def x = (Long)4; def y = (long)1; return x ^ y")); - assertEquals(5L, exec("def x = (Float)4; def y = (float)1; return x ^ y")); - assertEquals(5L, exec("def x = (Double)4; def y = (double)1; return x ^ y")); + assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x ^ y")); + assertEquals(5, exec("def x = (short)4; def y = (short)1; return x ^ y")); + assertEquals(5, exec("def x = (char)4; def y = (char)1; return x ^ y")); + assertEquals(5, exec("def x = (int)4; def y = (int)1; return x ^ y")); + assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x ^ y")); + assertEquals(5L, exec("def x = (float)4; def y = (float)1; return x ^ y")); + assertEquals(5L, exec("def x = (double)4; def y = (double)1; return x ^ y")); } public void testOr() { @@ -763,13 +763,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(5L, exec("def x = (float)4; def y = (double)1; return x | y")); assertEquals(5L, exec("def x = (double)4; def y = (double)1; return x | y")); - assertEquals(5, exec("def x = (Byte)4; def y = (byte)1; return x | y")); - assertEquals(5, exec("def x = (Short)4; def y = (short)1; return x | y")); - assertEquals(5, exec("def x = (Character)4; def y = (char)1; return x | y")); - assertEquals(5, exec("def x = (Integer)4; def y = (int)1; return x | y")); - assertEquals(5L, exec("def x = (Long)4; def y = (long)1; return x | y")); - assertEquals(5L, exec("def x = (Float)4; def y = (float)1; return x | y")); - assertEquals(5L, exec("def x = (Double)4; def y = (double)1; return x | y")); + assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x | y")); + assertEquals(5, exec("def x = (short)4; def y = (short)1; return x | y")); + assertEquals(5, exec("def x = (char)4; def y = (char)1; return x | y")); + assertEquals(5, exec("def x = (int)4; def y = (int)1; return x | y")); + assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x | y")); + assertEquals(5L, exec("def x = (float)4; def y = (float)1; return x | y")); + assertEquals(5L, exec("def x = (double)4; def y = (double)1; return x | y")); } public void testEq() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java index 8043d9da915..7e4448495a9 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java @@ -94,17 +94,8 @@ public class EqualsTests extends ScriptTestCase { } public void testEquals() { - assertEquals(true, exec("return new Long(3) == new Long(3);")); - assertEquals(false, exec("return new Long(3) === new Long(3);")); - assertEquals(true, exec("Integer x = new Integer(3); Object y = x; return x == y;")); - assertEquals(true, exec("Integer x = new Integer(3); Object y = x; return x === y;")); - assertEquals(true, exec("Integer x = new Integer(3); Object y = new Integer(3); return x == y;")); - assertEquals(false, exec("Integer x = new Integer(3); Object y = new Integer(3); return x === y;")); - assertEquals(true, exec("Integer x = new Integer(3); int y = 3; return x == y;")); - assertEquals(true, exec("Integer x = new Integer(3); short y = 3; return x == y;")); - assertEquals(true, exec("Integer x = new Integer(3); Short y = (short)3; return x == y;")); - assertEquals(false, exec("Integer x = new Integer(3); int y = 3; return x === y;")); - assertEquals(false, exec("Integer x = new Integer(3); double y = 3; return x === y;")); + assertEquals(true, exec("return 3 == 3;")); + assertEquals(false, exec("int x = 4; int y = 5; x == y")); assertEquals(true, exec("int[] x = new int[1]; Object y = x; return x == y;")); assertEquals(true, exec("int[] x = new int[1]; Object y = x; return x === y;")); assertEquals(false, exec("int[] x = new int[1]; Object y = new int[1]; return x == y;")); @@ -114,14 +105,8 @@ public class EqualsTests extends ScriptTestCase { } public void testNotEquals() { - assertEquals(false, exec("return new Long(3) != new Long(3);")); - assertEquals(true, exec("return new Long(3) !== new Long(3);")); - assertEquals(false, exec("Integer x = new Integer(3); Object y = x; return x != y;")); - assertEquals(false, exec("Integer x = new Integer(3); Object y = x; return x !== y;")); - assertEquals(false, exec("Integer x = new Integer(3); Object y = new Integer(3); return x != y;")); - assertEquals(true, exec("Integer x = new Integer(3); Object y = new Integer(3); return x !== y;")); - assertEquals(true, exec("Integer x = new Integer(3); int y = 3; return x !== y;")); - assertEquals(true, exec("Integer x = new Integer(3); double y = 3; return x !== y;")); + assertEquals(false, exec("return 3 != 3;")); + assertEquals(true, exec("int x = 4; int y = 5; x != y")); assertEquals(false, exec("int[] x = new int[1]; Object y = x; return x != y;")); assertEquals(false, exec("int[] x = new int[1]; Object y = x; return x !== y;")); assertEquals(true, exec("int[] x = new int[1]; Object y = new int[1]; return x != y;")); @@ -131,54 +116,36 @@ public class EqualsTests extends ScriptTestCase { } public void testBranchEquals() { - assertEquals(0, exec("Character a = (char)'a'; Character b = (char)'b'; if (a == b) return 1; else return 0;")); - assertEquals(1, exec("Character a = (char)'a'; Character b = (char)'a'; if (a == b) return 1; else return 0;")); - assertEquals(0, exec("Integer a = new Integer(1); Integer b = 1; if (a === b) return 1; else return 0;")); - assertEquals(0, exec("Character a = (char)'a'; Character b = new Character((char)'a'); if (a === b) return 1; else return 0;")); - assertEquals(1, exec("Character a = (char)'a'; Object b = a; if (a === b) return 1; else return 0;")); - assertEquals(1, exec("Integer a = 1; Number b = a; Number c = a; if (c === b) return 1; else return 0;")); - assertEquals(0, exec("Integer a = 1; Character b = (char)'a'; if (a === (Object)b) return 1; else return 0;")); + assertEquals(0, exec("def a = (char)'a'; def b = (char)'b'; if (a == b) return 1; else return 0;")); + assertEquals(1, exec("def a = (char)'a'; def b = (char)'a'; if (a == b) return 1; else return 0;")); + assertEquals(1, exec("def a = 1; def b = 1; if (a === b) return 1; else return 0;")); + assertEquals(1, exec("def a = (char)'a'; def b = (char)'a'; if (a === b) return 1; else return 0;")); + assertEquals(1, exec("def a = (char)'a'; Object b = a; if (a === b) return 1; else return 0;")); + assertEquals(1, exec("def a = 1; Number b = a; Number c = a; if (c === b) return 1; else return 0;")); + assertEquals(0, exec("def a = 1; Object b = new HashMap(); if (a === (Object)b) return 1; else return 0;")); } public void testBranchNotEquals() { - assertEquals(1, exec("Character a = (char)'a'; Character b = (char)'b'; if (a != b) return 1; else return 0;")); - assertEquals(0, exec("Character a = (char)'a'; Character b = (char)'a'; if (a != b) return 1; else return 0;")); - assertEquals(1, exec("Integer a = new Integer(1); Integer b = 1; if (a !== b) return 1; else return 0;")); - assertEquals(1, exec("Character a = (char)'a'; Character b = new Character((char)'a'); if (a !== b) return 1; else return 0;")); - assertEquals(0, exec("Character a = (char)'a'; Object b = a; if (a !== b) return 1; else return 0;")); - assertEquals(0, exec("Integer a = 1; Number b = a; Number c = a; if (c !== b) return 1; else return 0;")); - assertEquals(1, exec("Integer a = 1; Character b = (char)'a'; if (a !== (Object)b) return 1; else return 0;")); + assertEquals(1, exec("def a = (char)'a'; def b = (char)'b'; if (a != b) return 1; else return 0;")); + assertEquals(0, exec("def a = (char)'a'; def b = (char)'a'; if (a != b) return 1; else return 0;")); + assertEquals(0, exec("def a = 1; def b = 1; if (a !== b) return 1; else return 0;")); + assertEquals(0, exec("def a = (char)'a'; def b = (char)'a'; if (a !== b) return 1; else return 0;")); + assertEquals(0, exec("def a = (char)'a'; Object b = a; if (a !== b) return 1; else return 0;")); + assertEquals(0, exec("def a = 1; Number b = a; Number c = a; if (c !== b) return 1; else return 0;")); + assertEquals(1, exec("def a = 1; Object b = new HashMap(); if (a !== (Object)b) return 1; else return 0;")); } public void testRightHandNull() { - assertEquals(false, exec("Character a = (char)'a'; return a == null;")); - assertEquals(false, exec("Character a = (char)'a'; return a === null;")); - assertEquals(true, exec("Character a = (char)'a'; return a != null;")); - assertEquals(true, exec("Character a = (char)'a'; return a !== null;")); - assertEquals(true, exec("Character a = null; return a == null;")); - assertEquals(false, exec("Character a = null; return a != null;")); - assertEquals(false, exec("Character a = (char)'a'; Character b = null; return a == b;")); - assertEquals(true, exec("Character a = null; Character b = null; return a === b;")); - assertEquals(true, exec("Character a = (char)'a'; Character b = null; return a != b;")); - assertEquals(false, exec("Character a = null; Character b = null; return a !== b;")); - assertEquals(false, exec("Integer x = null; double y = 2.0; return x == y;")); - assertEquals(true, exec("Integer x = null; Short y = null; return x == y;")); + assertEquals(false, exec("HashMap a = new HashMap(); return a == null;")); + assertEquals(false, exec("HashMap a = new HashMap(); return a === null;")); + assertEquals(true, exec("HashMap a = new HashMap(); return a != null;")); + assertEquals(true, exec("HashMap a = new HashMap(); return a !== null;")); } public void testLeftHandNull() { - assertEquals(false, exec("Character a = (char)'a'; return null == a;")); - assertEquals(false, exec("Character a = (char)'a'; return null === a;")); - assertEquals(true, exec("Character a = (char)'a'; return null != a;")); - assertEquals(true, exec("Character a = (char)'a'; return null !== a;")); - assertEquals(true, exec("Character a = null; return null == a;")); - assertEquals(false, exec("Character a = null; return null != a;")); - assertEquals(false, exec("Character a = null; Character b = (char)'a'; return a == b;")); - assertEquals(true, exec("Character a = null; Character b = null; return a == b;")); - assertEquals(true, exec("Character a = null; Character b = null; return b === a;")); - assertEquals(true, exec("Character a = null; Character b = (char)'a'; return a != b;")); - assertEquals(false, exec("Character a = null; Character b = null; return b != a;")); - assertEquals(false, exec("Character a = null; Character b = null; return b !== a;")); - assertEquals(false, exec("Integer x = null; double y = 2.0; return y == x;")); - assertEquals(true, exec("Integer x = null; Short y = null; return y == x;")); + assertEquals(false, exec("HashMap a = new HashMap(); return null == a;")); + assertEquals(false, exec("HashMap a = new HashMap(); return null === a;")); + assertEquals(true, exec("HashMap a = new HashMap(); return null != a;")); + assertEquals(true, exec("HashMap a = new HashMap(); return null !== a;")); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java deleted file mode 100644 index 7bec0b110df..00000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java +++ /dev/null @@ -1,293 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import java.util.Collections; -import java.util.Map; - -/** Tests floating point overflow with numeric overflow disabled */ -public class FloatOverflowDisabledTests extends ScriptTestCase { - - /** wire overflow to false for all tests */ - @Override - public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "false")); - } - - public void testAssignmentAdditionOverflow() { - // float - try { - exec("float x = 3.4028234663852886E38f; x += 3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = -3.4028234663852886E38f; x += -3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double - try { - exec("double x = 1.7976931348623157E308; x += 1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = -1.7976931348623157E308; x += -1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentSubtractionOverflow() { - // float - try { - exec("float x = 3.4028234663852886E38f; x -= -3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = -3.4028234663852886E38f; x -= 3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double - try { - exec("double x = 1.7976931348623157E308; x -= -1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = -1.7976931348623157E308; x -= 1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentMultiplicationOverflow() { - // float - try { - exec("float x = 3.4028234663852886E38f; x *= 3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 3.4028234663852886E38f; x *= -3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double - try { - exec("double x = 1.7976931348623157E308; x *= 1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; x *= -1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentDivisionOverflow() { - // float - try { - exec("float x = 3.4028234663852886E38f; x /= 1.401298464324817E-45f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 3.4028234663852886E38f; x /= -1.401298464324817E-45f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 1.0f; x /= 0.0f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double - try { - exec("double x = 1.7976931348623157E308; x /= 4.9E-324; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; x /= -4.9E-324; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.0f; x /= 0.0; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAddition() throws Exception { - try { - exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x + y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x + y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAdditionConst() throws Exception { - try { - exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.7976931348623157E308 + 1.7976931348623157E308;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtraction() throws Exception { - try { - exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtractionConst() throws Exception { - try { - exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return -1.7976931348623157E308 - 1.7976931348623157E308;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplication() throws Exception { - try { - exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x * y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplicationConst() throws Exception { - try { - exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.7976931348623157E308 * 1.7976931348623157E308;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivision() throws Exception { - try { - exec("float x = 3.4028234663852886E38f; float y = 1.401298464324817E-45f; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 1.0f; float y = 0.0f; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; double y = 4.9E-324; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.0; double y = 0.0; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivisionConst() throws Exception { - try { - exec("return 3.4028234663852886E38f / 1.401298464324817E-45f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.0f / 0.0f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.7976931348623157E308 / 4.9E-324;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.0 / 0.0;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivisionNaN() throws Exception { - // float division, constant division, and assignment - try { - exec("float x = 0f; float y = 0f; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 0f / 0f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 0f; x /= 0f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double division, constant division, and assignment - try { - exec("double x = 0.0; double y = 0.0; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 0.0 / 0.0;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 0.0; x /= 0.0; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testRemainderNaN() throws Exception { - // float division, constant division, and assignment - try { - exec("float x = 1f; float y = 0f; return x % y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1f % 0f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 1f; x %= 0f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double division, constant division, and assignment - try { - exec("double x = 1.0; double y = 0.0; return x % y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.0 % 0.0;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.0; x %= 0.0; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowTests.java similarity index 94% rename from modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowTests.java index ccfd2232e88..4b3eb8f0e7f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowTests.java @@ -19,17 +19,8 @@ package org.elasticsearch.painless; -import java.util.Collections; -import java.util.Map; - -/** Tests floating point overflow with numeric overflow enabled */ -public class FloatOverflowEnabledTests extends ScriptTestCase { - - /** wire overflow to true for all tests */ - @Override - public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "true")); - } +/** Tests floating point overflow cases */ +public class FloatOverflowTests extends ScriptTestCase { public void testAssignmentAdditionOverflow() { // float diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java deleted file mode 100644 index f4adcfce878..00000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java +++ /dev/null @@ -1,444 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import java.util.Collections; -import java.util.Map; - -/** Tests integer overflow with numeric overflow disabled */ -public class IntegerOverflowDisabledTests extends ScriptTestCase { - - /** wire overflow to true for all tests */ - @Override - public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "false")); - } - - public void testAssignmentAdditionOverflow() { - // byte - try { - exec("byte x = 0; x += 128; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 0; x += -129; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // short - try { - exec("short x = 0; x += 32768; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 0; x += -32769; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // char - try { - exec("char x = 0; x += 65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = 0; x += -65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // int - try { - exec("int x = 1; x += 2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = -2; x += -2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = 1; x += 9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -2; x += -9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentSubtractionOverflow() { - // byte - try { - exec("byte x = 0; x -= -128; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 0; x -= 129; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // short - try { - exec("short x = 0; x -= -32768; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 0; x -= 32769; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // char - try { - exec("char x = 0; x -= -65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = 0; x -= 65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // int - try { - exec("int x = 1; x -= -2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = -2; x -= 2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = 1; x -= -9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -2; x -= 9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentMultiplicationOverflow() { - // byte - try { - exec("byte x = 2; x *= 128; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 2; x *= -128; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // char - try { - exec("char x = 2; x *= 65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = 2; x *= -65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // int - try { - exec("int x = 2; x *= 2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = 2; x *= -2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = 2; x *= 9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = 2; x *= -9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentDivisionOverflow() { - // byte - try { - exec("byte x = (byte) -128; x /= -1; return x;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - // short - try { - exec("short x = (short) -32768; x /= -1; return x;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - // cannot happen for char: unsigned - - // int - try { - exec("int x = -2147483647 - 1; x /= -1; return x;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = -9223372036854775807L - 1L; x /=-1L; return x;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testIncrementOverFlow() throws Exception { - // byte - try { - exec("byte x = 127; ++x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 127; x++; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = (byte) -128; --x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = (byte) -128; x--; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // short - try { - exec("short x = 32767; ++x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("short x = 32767; x++; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("short x = (short) -32768; --x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("short x = (short) -32768; x--; return x;"); - } catch (ArithmeticException expected) {} - - // char - try { - exec("char x = 65535; ++x; return x;"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = 65535; x++; return x;"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = (char) 0; --x; return x;"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = (char) 0; x--; return x;"); - } catch (ArithmeticException expected) {} - - // int - try { - exec("int x = 2147483647; ++x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = 2147483647; x++; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = (int) -2147483648L; --x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = (int) -2147483648L; x--; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = 9223372036854775807L; ++x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = 9223372036854775807L; x++; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -9223372036854775807L - 1L; --x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -9223372036854775807L - 1L; x--; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAddition() throws Exception { - try { - exec("int x = 2147483647; int y = 2147483647; return x + y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testAdditionConst() throws Exception { - try { - exec("return 2147483647 + 2147483647;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("return 9223372036854775807L + 9223372036854775807L;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - - public void testSubtraction() throws Exception { - try { - exec("int x = -10; int y = 2147483647; return x - y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -10L; long y = 9223372036854775807L; return x - y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtractionConst() throws Exception { - try { - exec("return -10 - 2147483647;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("return -10L - 9223372036854775807L;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplication() throws Exception { - try { - exec("int x = 2147483647; int y = 2147483647; return x * y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplicationConst() throws Exception { - try { - exec("return 2147483647 * 2147483647;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("return 9223372036854775807L * 9223372036854775807L;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivision() throws Exception { - try { - exec("int x = -2147483647 - 1; int y = -1; return x / y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -9223372036854775808L; long y = -1L; return x / y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivisionConst() throws Exception { - try { - exec("return (-2147483648) / -1;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("return (-9223372036854775808L) / -1L;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testNegationOverflow() throws Exception { - try { - exec("int x = -2147483648; x = -x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -9223372036854775808L; x = -x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testNegationOverflowConst() throws Exception { - try { - exec("int x = -(-2147483648); return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -(-9223372036854775808L); return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowTests.java similarity index 95% rename from modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowTests.java index 41b3f857c0a..1165547bf5a 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowTests.java @@ -19,17 +19,8 @@ package org.elasticsearch.painless; -import java.util.Collections; -import java.util.Map; - -/** Tests integer overflow with numeric overflow enabled */ -public class IntegerOverflowEnabledTests extends ScriptTestCase { - - /** wire overflow to true for all tests */ - @Override - public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "true")); - } +/** Tests integer overflow cases */ +public class IntegerOverflowTests extends ScriptTestCase { public void testAssignmentAdditionOverflow() { // byte diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java index 4b56d9751b6..3fe071c5221 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java @@ -60,6 +60,7 @@ public class NeedsScoreTests extends ESSingleNodeTestCase { ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "painless", compiled), lookup, Collections.emptyMap()); assertTrue(ss.needsScores()); + service.close(); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java index f2e65fc680c..e4af7a2d166 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java @@ -35,7 +35,7 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(2.0, exec("double a = 2; return a")); assertEquals(false, exec("boolean a = false; return a")); assertEquals("string", exec("String a = \"string\"; return a")); - assertEquals(HashMap.class, exec("Map a = new HashMap(); return a").getClass()); + assertEquals(HashMap.class, exec("Map a = new HashMap(); return a").getClass()); assertEquals(byte[].class, exec("byte[] a = new byte[1]; return a").getClass()); assertEquals(short[].class, exec("short[] a = new short[1]; return a").getClass()); @@ -46,7 +46,7 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(double[].class, exec("double[] a = new double[1]; return a").getClass()); assertEquals(boolean[].class, exec("boolean[] a = new boolean[1]; return a").getClass()); assertEquals(String[].class, exec("String[] a = new String[1]; return a").getClass()); - assertEquals(Map[].class, exec("Map[] a = new Map[1]; return a").getClass()); + assertEquals(Map[].class, exec("Map[] a = new Map[1]; return a").getClass()); assertEquals(byte[][].class, exec("byte[][] a = new byte[1][2]; return a").getClass()); assertEquals(short[][][].class, exec("short[][][] a = new short[1][2][3]; return a").getClass()); @@ -57,7 +57,7 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(double[][][][].class, exec("double[][][][] a = new double[1][2][3][4]; return a").getClass()); assertEquals(boolean[][][][][].class, exec("boolean[][][][][] a = new boolean[1][2][3][4][5]; return a").getClass()); assertEquals(String[][].class, exec("String[][] a = new String[1][2]; return a").getClass()); - assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3]; return a").getClass()); + assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3]; return a").getClass()); } public void testExpression() { @@ -73,6 +73,6 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(5, exec("int x = 5; return x")); assertEquals(4, exec("int[] x = new int[2]; x[1] = 4; return x[1]")); assertEquals(5, ((short[])exec("short[] s = new short[3]; s[1] = 5; return s"))[1]); - assertEquals(10, ((Map)exec("Map s = new HashMap< String,Object>(); s.put(\"x\", 10); return s")).get("x")); + assertEquals(10, ((Map)exec("Map s = new HashMap(); s.put(\"x\", 10); return s")).get("x")); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 2ccd2f1460a..c3ce127034e 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -48,7 +48,7 @@ public abstract class ScriptTestCase extends ESTestCase { /** Compiles and returns the result of {@code script} with access to {@code vars} */ public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, Boolean.toString(random().nextBoolean()))); + return exec(script, vars, Collections.emptyMap()); } /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java index e61541bf371..b06199cf903 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java @@ -70,13 +70,13 @@ public class StringTests extends ScriptTestCase { public void testAppendMultiple() { assertEquals("cat" + true + "abc" + null, exec("String s = \"cat\"; return s + true + 'abc' + null;")); } - + public void testAppendMany() { for (int i = MAX_INDY_STRING_CONCAT_ARGS - 5; i < MAX_INDY_STRING_CONCAT_ARGS + 5; i++) { doTestAppendMany(i); } } - + private void doTestAppendMany(int count) { StringBuilder script = new StringBuilder("String s = \"cat\"; return s"); StringBuilder result = new StringBuilder("cat"); @@ -90,11 +90,11 @@ public class StringTests extends ScriptTestCase { Debugger.toString(s).contains(String.format(Locale.ROOT, "LDC \"%03d\"", count/2))); assertEquals(result.toString(), exec(s)); } - + public void testNestedConcats() { assertEquals("foo1010foo", exec("String s = 'foo'; String x = '10'; return s + Integer.parseInt(x + x) + s;")); } - + public void testStringAPI() { assertEquals("", exec("return new String();")); assertEquals('x', exec("String s = \"x\"; return s.charAt(0);")); @@ -166,14 +166,14 @@ public class StringTests extends ScriptTestCase { assertEquals("cc", exec("return (String)(char)\"cc\"")); fail(); } catch (final ClassCastException cce) { - assertTrue(cce.getMessage().contains("Cannot cast from [String] to [char].")); + assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [char].")); } try { assertEquals("cc", exec("return (String)(char)'cc'")); fail(); } catch (final ClassCastException cce) { - assertTrue(cce.getMessage().contains("Cannot cast from [String] to [char].")); + assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [char].")); } try { @@ -189,41 +189,5 @@ public class StringTests extends ScriptTestCase { } catch (final ClassCastException cce) { assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [char].")); } - - assertEquals('c', exec("return (Character)\"c\"")); - assertEquals('c', exec("return (Character)'c'")); - assertEquals("c", exec("return (String)(Character)\"c\"")); - assertEquals("c", exec("return (String)(Character)'c'")); - - assertEquals('c', exec("String s = \"c\"; (Character)s")); - assertEquals('c', exec("String s = 'c'; (Character)s")); - - try { - assertEquals("cc", exec("return (String)(Character)\"cc\"")); - fail(); - } catch (final ClassCastException ise) { - assertTrue(ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character].")); - } - - try { - assertEquals("cc", exec("return (String)(Character)'cc'")); - fail(); - } catch (final ClassCastException ise) { - assertTrue(ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character].")); - } - - try { - assertEquals('c', exec("String s = \"cc\"; (Character)s")); - fail(); - } catch (final ClassCastException cce) { - assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character].")); - } - - try { - assertEquals('c', exec("String s = 'cc'; (Character)s")); - fail(); - } catch (final ClassCastException cce) { - assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character].")); - } } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java deleted file mode 100644 index ba476fac7f2..00000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import org.elasticsearch.test.ESTestCase; - -/** - * Tests utility methods (typically built-ins) - */ -public class UtilityTests extends ESTestCase { - - public void testDivideWithoutOverflowInt() { - assertEquals(5 / 2, Utility.divideWithoutOverflow(5, 2)); - - try { - Utility.divideWithoutOverflow(Integer.MIN_VALUE, -1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(5, 0); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivideWithoutOverflowLong() { - assertEquals(5L / 2L, Utility.divideWithoutOverflow(5L, 2L)); - - try { - Utility.divideWithoutOverflow(Long.MIN_VALUE, -1L); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(5L, 0L); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testToByteExact() { - for (int b = Byte.MIN_VALUE; b < Byte.MAX_VALUE; b++) { - assertEquals((byte)b, Utility.toByteExact(b)); - } - - try { - Utility.toByteExact(Byte.MIN_VALUE - 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.toByteExact(Byte.MAX_VALUE + 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testToShortExact() { - for (int s = Short.MIN_VALUE; s < Short.MAX_VALUE; s++) { - assertEquals((short)s, Utility.toShortExact(s)); - } - - try { - Utility.toShortExact(Short.MIN_VALUE - 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.toShortExact(Short.MAX_VALUE + 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testToCharExact() { - for (int c = Character.MIN_VALUE; c < Character.MAX_VALUE; c++) { - assertEquals((char)c, Utility.toCharExact(c)); - } - - try { - Utility.toCharExact(Character.MIN_VALUE - 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.toCharExact(Character.MAX_VALUE + 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAddWithoutOverflowFloat() { - assertEquals(10F, Utility.addWithoutOverflow(5F, 5F), 0F); - assertTrue(Float.isNaN(Utility.addWithoutOverflow(5F, Float.NaN))); - assertTrue(Float.isNaN(Utility.addWithoutOverflow(Float.POSITIVE_INFINITY, Float.NEGATIVE_INFINITY))); - - try { - Utility.addWithoutOverflow(Float.MAX_VALUE, Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.addWithoutOverflow(-Float.MAX_VALUE, -Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAddWithoutOverflowDouble() { - assertEquals(10D, Utility.addWithoutOverflow(5D, 5D), 0D); - assertTrue(Double.isNaN(Utility.addWithoutOverflow(5D, Double.NaN))); - assertTrue(Double.isNaN(Utility.addWithoutOverflow(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY))); - - try { - Utility.addWithoutOverflow(Double.MAX_VALUE, Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.addWithoutOverflow(-Double.MAX_VALUE, -Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtractWithoutOverflowFloat() { - assertEquals(5F, Utility.subtractWithoutOverflow(10F, 5F), 0F); - assertTrue(Float.isNaN(Utility.subtractWithoutOverflow(5F, Float.NaN))); - assertTrue(Float.isNaN(Utility.subtractWithoutOverflow(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY))); - - try { - Utility.subtractWithoutOverflow(Float.MAX_VALUE, -Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.subtractWithoutOverflow(-Float.MAX_VALUE, Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtractWithoutOverflowDouble() { - assertEquals(5D, Utility.subtractWithoutOverflow(10D, 5D), 0D); - assertTrue(Double.isNaN(Utility.subtractWithoutOverflow(5D, Double.NaN))); - assertTrue(Double.isNaN(Utility.subtractWithoutOverflow(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY))); - - try { - Utility.subtractWithoutOverflow(Double.MAX_VALUE, -Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.subtractWithoutOverflow(-Double.MAX_VALUE, Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplyWithoutOverflowFloat() { - assertEquals(25F, Utility.multiplyWithoutOverflow(5F, 5F), 0F); - assertTrue(Float.isNaN(Utility.multiplyWithoutOverflow(5F, Float.NaN))); - assertEquals(Float.POSITIVE_INFINITY, Utility.multiplyWithoutOverflow(5F, Float.POSITIVE_INFINITY), 0F); - - try { - Utility.multiplyWithoutOverflow(Float.MAX_VALUE, Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplyWithoutOverflowDouble() { - assertEquals(25D, Utility.multiplyWithoutOverflow(5D, 5D), 0D); - assertTrue(Double.isNaN(Utility.multiplyWithoutOverflow(5D, Double.NaN))); - assertEquals(Double.POSITIVE_INFINITY, Utility.multiplyWithoutOverflow(5D, Double.POSITIVE_INFINITY), 0D); - - try { - Utility.multiplyWithoutOverflow(Double.MAX_VALUE, Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivideWithoutOverflowFloat() { - assertEquals(5F, Utility.divideWithoutOverflow(25F, 5F), 0F); - assertTrue(Float.isNaN(Utility.divideWithoutOverflow(5F, Float.NaN))); - assertEquals(Float.POSITIVE_INFINITY, Utility.divideWithoutOverflow(Float.POSITIVE_INFINITY, 5F), 0F); - - try { - Utility.divideWithoutOverflow(Float.MAX_VALUE, Float.MIN_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(0F, 0F); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(5F, 0F); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivideWithoutOverflowDouble() { - assertEquals(5D, Utility.divideWithoutOverflow(25D, 5D), 0D); - assertTrue(Double.isNaN(Utility.divideWithoutOverflow(5D, Double.NaN))); - assertEquals(Double.POSITIVE_INFINITY, Utility.divideWithoutOverflow(Double.POSITIVE_INFINITY, 5D), 0D); - - try { - Utility.divideWithoutOverflow(Double.MAX_VALUE, Double.MIN_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(0D, 0D); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(5D, 0D); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testRemainderWithoutOverflowFloat() { - assertEquals(1F, Utility.remainderWithoutOverflow(25F, 4F), 0F); - - try { - Utility.remainderWithoutOverflow(5F, 0F); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testRemainderWithoutOverflowDouble() { - assertEquals(1D, Utility.remainderWithoutOverflow(25D, 4D), 0D); - - try { - Utility.remainderWithoutOverflow(5D, 0D); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } -} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index f8561d17acd..ee2f5484737 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -75,7 +75,7 @@ import static org.elasticsearch.search.sort.SortBuilders.fieldSort; * Abstract base for scrolling across a search and executing bulk actions on all results. All package private methods are package private so * their tests can use them. Most methods run in the listener thread pool because the are meant to be fast and don't expect to block. */ -public abstract class AbstractAsyncBulkByScrollAction, Response> { +public abstract class AbstractAsyncBulkByScrollAction> { /** * The request for this action. Named mainRequest because we create lots of request variables all representing child * requests of this mainRequest. @@ -92,12 +92,13 @@ public abstract class AbstractAsyncBulkByScrollAction listener; + private final ActionListener listener; private final BackoffPolicy backoffPolicy; private final Retry bulkRetry; public AbstractAsyncBulkByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, ActionListener listener) { + ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, + ActionListener listener) { this.task = task; this.logger = logger; this.client = client; @@ -111,8 +112,13 @@ public abstract class AbstractAsyncBulkByScrollAction docs); - protected abstract Response buildResponse(TimeValue took, List indexingFailures, List searchFailures, - boolean timedOut); + /** + * Build the response for reindex actions. + */ + protected BulkIndexByScrollResponse buildResponse(TimeValue took, List indexingFailures, + List searchFailures, boolean timedOut) { + return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); + } /** * Start the action by firing the initial search request. diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java index df9245346b3..0e3f3678851 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java @@ -20,15 +20,16 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.internal.IdFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; @@ -40,6 +41,7 @@ import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; @@ -47,9 +49,9 @@ import org.elasticsearch.search.SearchHitField; import org.elasticsearch.threadpool.ThreadPool; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.BiFunction; import static java.util.Collections.emptyMap; @@ -57,91 +59,106 @@ import static java.util.Collections.emptyMap; * Abstract base for scrolling across a search and executing bulk indexes on all * results. */ -public abstract class AbstractAsyncBulkIndexByScrollAction> - extends AbstractAsyncBulkByScrollAction { +public abstract class AbstractAsyncBulkIndexByScrollAction> + extends AbstractAsyncBulkByScrollAction { - private final ScriptService scriptService; - private final CompiledScript script; + protected final ScriptService scriptService; + protected final ClusterState clusterState; - public AbstractAsyncBulkIndexByScrollAction(BulkByScrollTask task, ESLogger logger, ScriptService scriptService, ClusterState state, - ParentTaskAssigningClient client, ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, - ActionListener listener) { + /** + * This BiFunction is used to apply various changes depending of the Reindex action and the search hit, + * from copying search hit metadata (parent, routing, etc) to potentially transforming the + * {@link RequestWrapper} completely. + */ + private final BiFunction, SearchHit, RequestWrapper> scriptApplier; + + public AbstractAsyncBulkIndexByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, + ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, + ActionListener listener, + ScriptService scriptService, ClusterState clusterState) { super(task, logger, client, threadPool, mainRequest, firstSearchRequest, listener); this.scriptService = scriptService; - if (mainRequest.getScript() == null) { - script = null; - } else { - script = scriptService.compile(mainRequest.getScript(), ScriptContext.Standard.UPDATE, emptyMap(), state); - } - } - - @Override - protected BulkIndexByScrollResponse buildResponse(TimeValue took, List indexingFailures, - List searchFailures, boolean timedOut) { - return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); + this.clusterState = clusterState; + this.scriptApplier = Objects.requireNonNull(buildScriptApplier(), "script applier must not be null"); } /** - * Build the IndexRequest for a single search hit. This shouldn't handle - * metadata or the script. That will be handled by copyMetadata and - * applyScript functions that can be overridden. + * Build the {@link BiFunction} to apply to all {@link RequestWrapper}. */ - protected abstract IndexRequest buildIndexRequest(SearchHit doc); + protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + // The default script applier executes a no-op + return (request, searchHit) -> request; + } @Override protected BulkRequest buildBulk(Iterable docs) { BulkRequest bulkRequest = new BulkRequest(); - ExecutableScript executableScript = null; - Map scriptCtx = null; - for (SearchHit doc : docs) { - if (doc.hasSource()) { - /* - * Either the document didn't store _source or we didn't fetch it for some reason. Since we don't allow the user to - * change the "fields" part of the search request it is unlikely that we got here because we didn't fetch _source. - * Thus the error message assumes that it wasn't stored. - */ - throw new IllegalArgumentException("[" + doc.index() + "][" + doc.type() + "][" + doc.id() + "] didn't store _source"); - } - IndexRequest index = buildIndexRequest(doc); - copyMetadata(index, doc); - if (script != null) { - if (executableScript == null) { - executableScript = scriptService.executable(script, mainRequest.getScript().getParams()); - scriptCtx = new HashMap<>(); - } - if (false == applyScript(index, doc, executableScript, scriptCtx)) { - continue; + if (accept(doc)) { + RequestWrapper request = scriptApplier.apply(copyMetadata(buildRequest(doc), doc), doc); + if (request != null) { + bulkRequest.add(request.self()); } } - bulkRequest.add(index); } - return bulkRequest; } /** - * Copies the metadata from a hit to the index request. + * Used to accept or ignore a search hit. Ignored search hits will be excluded + * from the bulk request. It is also where we fail on invalid search hits, like + * when the document has no source but it's required. */ - protected void copyMetadata(IndexRequest index, SearchHit doc) { - index.parent(fieldValue(doc, ParentFieldMapper.NAME)); - copyRouting(index, doc); - // Comes back as a Long but needs to be a string - Long timestamp = fieldValue(doc, TimestampFieldMapper.NAME); - if (timestamp != null) { - index.timestamp(timestamp.toString()); - } - Long ttl = fieldValue(doc, TTLFieldMapper.NAME); - if (ttl != null) { - index.ttl(ttl); + protected boolean accept(SearchHit doc) { + if (doc.hasSource()) { + /* + * Either the document didn't store _source or we didn't fetch it for some reason. Since we don't allow the user to + * change the "fields" part of the search request it is unlikely that we got here because we didn't fetch _source. + * Thus the error message assumes that it wasn't stored. + */ + throw new IllegalArgumentException("[" + doc.index() + "][" + doc.type() + "][" + doc.id() + "] didn't store _source"); } + return true; } /** - * Part of copyMetadata but called out individual for easy overwriting. + * Build the {@link RequestWrapper} for a single search hit. This shouldn't handle + * metadata or scripting. That will be handled by copyMetadata and + * apply functions that can be overridden. */ - protected void copyRouting(IndexRequest index, SearchHit doc) { - index.routing(fieldValue(doc, RoutingFieldMapper.NAME)); + protected abstract RequestWrapper buildRequest(SearchHit doc); + + /** + * Copies the metadata from a hit to the request. + */ + protected RequestWrapper copyMetadata(RequestWrapper request, SearchHit doc) { + copyParent(request, fieldValue(doc, ParentFieldMapper.NAME)); + copyRouting(request, fieldValue(doc, RoutingFieldMapper.NAME)); + + // Comes back as a Long but needs to be a string + Long timestamp = fieldValue(doc, TimestampFieldMapper.NAME); + if (timestamp != null) { + request.setTimestamp(timestamp.toString()); + } + Long ttl = fieldValue(doc, TTLFieldMapper.NAME); + if (ttl != null) { + request.setTtl(ttl); + } + return request; + } + + /** + * Copy the parent from a search hit to the request. + */ + protected void copyParent(RequestWrapper request, String parent) { + request.setParent(parent); + } + + /** + * Copy the routing from a search hit to the request. + */ + protected void copyRouting(RequestWrapper request, String routing) { + request.setRouting(routing); } protected T fieldValue(SearchHit doc, String fieldName) { @@ -150,106 +167,327 @@ public abstract class AbstractAsyncBulkIndexByScrollAction ctx) { - if (script == null) { - return true; - } - ctx.put(IndexFieldMapper.NAME, doc.index()); - ctx.put(TypeFieldMapper.NAME, doc.type()); - ctx.put(IdFieldMapper.NAME, doc.id()); - Long oldVersion = doc.getVersion(); - ctx.put(VersionFieldMapper.NAME, oldVersion); - String oldParent = fieldValue(doc, ParentFieldMapper.NAME); - ctx.put(ParentFieldMapper.NAME, oldParent); - String oldRouting = fieldValue(doc, RoutingFieldMapper.NAME); - ctx.put(RoutingFieldMapper.NAME, oldRouting); - Long oldTimestamp = fieldValue(doc, TimestampFieldMapper.NAME); - ctx.put(TimestampFieldMapper.NAME, oldTimestamp); - Long oldTTL = fieldValue(doc, TTLFieldMapper.NAME); - ctx.put(TTLFieldMapper.NAME, oldTTL); - ctx.put(SourceFieldMapper.NAME, index.sourceAsMap()); - ctx.put("op", "update"); - script.setNextVar("ctx", ctx); - script.run(); - Map resultCtx = (Map) script.unwrap(ctx); - String newOp = (String) resultCtx.remove("op"); - if (newOp == null) { - throw new IllegalArgumentException("Script cleared op!"); - } - if ("noop".equals(newOp)) { - task.countNoop(); - return false; - } - if (false == "update".equals(newOp)) { - throw new IllegalArgumentException("Invalid op [" + newOp + ']'); - } + interface RequestWrapper> { - /* - * It'd be lovely to only set the source if we know its been modified - * but it isn't worth keeping two copies of it around just to check! - */ - index.source((Map) resultCtx.remove(SourceFieldMapper.NAME)); + void setIndex(String index); - Object newValue = ctx.remove(IndexFieldMapper.NAME); - if (false == doc.index().equals(newValue)) { - scriptChangedIndex(index, newValue); - } - newValue = ctx.remove(TypeFieldMapper.NAME); - if (false == doc.type().equals(newValue)) { - scriptChangedType(index, newValue); - } - newValue = ctx.remove(IdFieldMapper.NAME); - if (false == doc.id().equals(newValue)) { - scriptChangedId(index, newValue); - } - newValue = ctx.remove(VersionFieldMapper.NAME); - if (false == Objects.equals(oldVersion, newValue)) { - scriptChangedVersion(index, newValue); - } - newValue = ctx.remove(ParentFieldMapper.NAME); - if (false == Objects.equals(oldParent, newValue)) { - scriptChangedParent(index, newValue); - } - /* - * Its important that routing comes after parent in case you want to - * change them both. - */ - newValue = ctx.remove(RoutingFieldMapper.NAME); - if (false == Objects.equals(oldRouting, newValue)) { - scriptChangedRouting(index, newValue); - } - newValue = ctx.remove(TimestampFieldMapper.NAME); - if (false == Objects.equals(oldTimestamp, newValue)) { - scriptChangedTimestamp(index, newValue); - } - newValue = ctx.remove(TTLFieldMapper.NAME); - if (false == Objects.equals(oldTTL, newValue)) { - scriptChangedTTL(index, newValue); - } - if (false == ctx.isEmpty()) { - throw new IllegalArgumentException("Invalid fields added to ctx [" + String.join(",", ctx.keySet()) + ']'); - } - return true; + void setType(String type); + + void setId(String id); + + void setVersion(long version); + + void setVersionType(VersionType versionType); + + void setParent(String parent); + + void setRouting(String routing); + + void setTimestamp(String timestamp); + + void setTtl(Long ttl); + + void setSource(Map source); + + Map getSource(); + + Self self(); } - protected abstract void scriptChangedIndex(IndexRequest index, Object to); + /** + * {@link RequestWrapper} for {@link IndexRequest} + */ + public static class IndexRequestWrapper implements RequestWrapper { - protected abstract void scriptChangedType(IndexRequest index, Object to); + private final IndexRequest request; - protected abstract void scriptChangedId(IndexRequest index, Object to); + IndexRequestWrapper(IndexRequest request) { + this.request = Objects.requireNonNull(request, "Wrapped IndexRequest can not be null"); + } - protected abstract void scriptChangedVersion(IndexRequest index, Object to); + @Override + public void setIndex(String index) { + request.index(index); + } - protected abstract void scriptChangedRouting(IndexRequest index, Object to); + @Override + public void setType(String type) { + request.type(type); + } - protected abstract void scriptChangedParent(IndexRequest index, Object to); + @Override + public void setId(String id) { + request.id(id); + } - protected abstract void scriptChangedTimestamp(IndexRequest index, Object to); + @Override + public void setVersion(long version) { + request.version(version); + } - protected abstract void scriptChangedTTL(IndexRequest index, Object to); + @Override + public void setVersionType(VersionType versionType) { + request.versionType(versionType); + } + + @Override + public void setParent(String parent) { + request.parent(parent); + } + + @Override + public void setRouting(String routing) { + request.routing(routing); + } + + @Override + public void setTimestamp(String timestamp) { + request.timestamp(timestamp); + } + + @Override + public void setTtl(Long ttl) { + if (ttl == null) { + request.ttl((TimeValue) null); + } else { + request.ttl(ttl); + } + } + + @Override + public Map getSource() { + return request.sourceAsMap(); + } + + @Override + public void setSource(Map source) { + request.source(source); + } + + @Override + public IndexRequest self() { + return request; + } + } + + /** + * Wraps a {@link IndexRequest} in a {@link RequestWrapper} + */ + static RequestWrapper wrap(IndexRequest request) { + return new IndexRequestWrapper(request); + } + + /** + * {@link RequestWrapper} for {@link DeleteRequest} + */ + public static class DeleteRequestWrapper implements RequestWrapper { + + private final DeleteRequest request; + + DeleteRequestWrapper(DeleteRequest request) { + this.request = Objects.requireNonNull(request, "Wrapped DeleteRequest can not be null"); + } + + @Override + public void setIndex(String index) { + request.index(index); + } + + @Override + public void setType(String type) { + request.type(type); + } + + @Override + public void setId(String id) { + request.id(id); + } + + @Override + public void setVersion(long version) { + request.version(version); + } + + @Override + public void setVersionType(VersionType versionType) { + request.versionType(versionType); + } + + @Override + public void setParent(String parent) { + request.parent(parent); + } + + @Override + public void setRouting(String routing) { + request.routing(routing); + } + + @Override + public void setTimestamp(String timestamp) { + throw new UnsupportedOperationException("unable to set [timestamp] on action request [" + request.getClass() + "]"); + } + + @Override + public void setTtl(Long ttl) { + throw new UnsupportedOperationException("unable to set [ttl] on action request [" + request.getClass() + "]"); + } + + @Override + public Map getSource() { + throw new UnsupportedOperationException("unable to get source from action request [" + request.getClass() + "]"); + } + + @Override + public void setSource(Map source) { + throw new UnsupportedOperationException("unable to set [source] on action request [" + request.getClass() + "]"); + } + + @Override + public DeleteRequest self() { + return request; + } + } + + /** + * Wraps a {@link DeleteRequest} in a {@link RequestWrapper} + */ + static RequestWrapper wrap(DeleteRequest request) { + return new DeleteRequestWrapper(request); + } + + /** + * Apply a {@link Script} to a {@link RequestWrapper} + */ + public abstract class ScriptApplier implements BiFunction, SearchHit, RequestWrapper> { + + private final BulkByScrollTask task; + private final ScriptService scriptService; + private final ClusterState state; + private final Script script; + private final Map params; + + private ExecutableScript executable; + private Map context; + + public ScriptApplier(BulkByScrollTask task, ScriptService scriptService, Script script, ClusterState state, + Map params) { + this.task = task; + this.scriptService = scriptService; + this.script = script; + this.state = state; + this.params = params; + } + + @Override + @SuppressWarnings("unchecked") + public RequestWrapper apply(RequestWrapper request, SearchHit doc) { + if (script == null) { + return request; + } + if (executable == null) { + CompiledScript compiled = scriptService.compile(script, ScriptContext.Standard.UPDATE, emptyMap(), state); + executable = scriptService.executable(compiled, params); + } + if (context == null) { + context = new HashMap<>(); + } + + context.put(IndexFieldMapper.NAME, doc.index()); + context.put(TypeFieldMapper.NAME, doc.type()); + context.put(IdFieldMapper.NAME, doc.id()); + Long oldVersion = doc.getVersion(); + context.put(VersionFieldMapper.NAME, oldVersion); + String oldParent = fieldValue(doc, ParentFieldMapper.NAME); + context.put(ParentFieldMapper.NAME, oldParent); + String oldRouting = fieldValue(doc, RoutingFieldMapper.NAME); + context.put(RoutingFieldMapper.NAME, oldRouting); + Long oldTimestamp = fieldValue(doc, TimestampFieldMapper.NAME); + context.put(TimestampFieldMapper.NAME, oldTimestamp); + Long oldTTL = fieldValue(doc, TTLFieldMapper.NAME); + context.put(TTLFieldMapper.NAME, oldTTL); + context.put(SourceFieldMapper.NAME, request.getSource()); + context.put("op", "update"); + executable.setNextVar("ctx", context); + executable.run(); + + Map resultCtx = (Map) executable.unwrap(context); + String newOp = (String) resultCtx.remove("op"); + if (newOp == null) { + throw new IllegalArgumentException("Script cleared op!"); + } + if ("noop".equals(newOp)) { + task.countNoop(); + return null; + } + if (false == "update".equals(newOp)) { + throw new IllegalArgumentException("Invalid op [" + newOp + ']'); + } + + /* + * It'd be lovely to only set the source if we know its been modified + * but it isn't worth keeping two copies of it around just to check! + */ + request.setSource((Map) resultCtx.remove(SourceFieldMapper.NAME)); + + Object newValue = context.remove(IndexFieldMapper.NAME); + if (false == doc.index().equals(newValue)) { + scriptChangedIndex(request, newValue); + } + newValue = context.remove(TypeFieldMapper.NAME); + if (false == doc.type().equals(newValue)) { + scriptChangedType(request, newValue); + } + newValue = context.remove(IdFieldMapper.NAME); + if (false == doc.id().equals(newValue)) { + scriptChangedId(request, newValue); + } + newValue = context.remove(VersionFieldMapper.NAME); + if (false == Objects.equals(oldVersion, newValue)) { + scriptChangedVersion(request, newValue); + } + newValue = context.remove(ParentFieldMapper.NAME); + if (false == Objects.equals(oldParent, newValue)) { + scriptChangedParent(request, newValue); + } + /* + * Its important that routing comes after parent in case you want to + * change them both. + */ + newValue = context.remove(RoutingFieldMapper.NAME); + if (false == Objects.equals(oldRouting, newValue)) { + scriptChangedRouting(request, newValue); + } + newValue = context.remove(TimestampFieldMapper.NAME); + if (false == Objects.equals(oldTimestamp, newValue)) { + scriptChangedTimestamp(request, newValue); + } + newValue = context.remove(TTLFieldMapper.NAME); + if (false == Objects.equals(oldTTL, newValue)) { + scriptChangedTTL(request, newValue); + } + if (false == context.isEmpty()) { + throw new IllegalArgumentException("Invalid fields added to context [" + String.join(",", context.keySet()) + ']'); + } + return request; + } + + protected abstract void scriptChangedIndex(RequestWrapper request, Object to); + + protected abstract void scriptChangedType(RequestWrapper request, Object to); + + protected abstract void scriptChangedId(RequestWrapper request, Object to); + + protected abstract void scriptChangedVersion(RequestWrapper request, Object to); + + protected abstract void scriptChangedRouting(RequestWrapper request, Object to); + + protected abstract void scriptChangedParent(RequestWrapper request, Object to); + + protected abstract void scriptChangedTimestamp(RequestWrapper request, Object to); + + protected abstract void scriptChangedTTL(RequestWrapper request, Object to); + } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java index d98735d3fb4..e78a6a9c350 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; @@ -45,32 +46,6 @@ public abstract class AbstractBaseReindexRestHandler< TA extends TransportAction > extends BaseRestHandler { - /** - * @return requests_per_second from the request as a float if it was on the request, null otherwise - */ - public static Float parseRequestsPerSecond(RestRequest request) { - String requestsPerSecondString = request.param("requests_per_second"); - if (requestsPerSecondString == null) { - return null; - } - if ("unlimited".equals(requestsPerSecondString)) { - return Float.POSITIVE_INFINITY; - } - float requestsPerSecond; - try { - requestsPerSecond = Float.parseFloat(requestsPerSecondString); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling.", e); - } - if (requestsPerSecond <= 0) { - // We validate here and in the setters because the setters use "Float.POSITIVE_INFINITY" instead of "unlimited" - throw new IllegalArgumentException( - "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling."); - } - return requestsPerSecond; - } - protected final IndicesQueriesRegistry indicesQueriesRegistry; protected final AggregatorParsers aggParsers; protected final Suggesters suggesters; @@ -88,41 +63,95 @@ public abstract class AbstractBaseReindexRestHandler< this.action = action; } - protected void execute(RestRequest request, Request internalRequest, RestChannel channel, - boolean includeCreated, boolean includeUpdated, boolean includeDeleted) throws IOException { - Float requestsPerSecond = parseRequestsPerSecond(request); - if (requestsPerSecond != null) { - internalRequest.setRequestsPerSecond(requestsPerSecond); - } + protected void handleRequest(RestRequest request, RestChannel channel, + boolean includeCreated, boolean includeUpdated, boolean includeDeleted) throws IOException { + // Build the internal request + Request internal = setCommonOptions(request, buildRequest(request)); + + // Executes the request and waits for completion if (request.paramAsBoolean("wait_for_completion", true)) { Map params = new HashMap<>(); params.put(BulkByScrollTask.Status.INCLUDE_CREATED, Boolean.toString(includeCreated)); params.put(BulkByScrollTask.Status.INCLUDE_UPDATED, Boolean.toString(includeUpdated)); params.put(BulkByScrollTask.Status.INCLUDE_DELETED, Boolean.toString(includeDeleted)); - action.execute(internalRequest, new BulkIndexByScrollResponseContentListener<>(channel, params)); + action.execute(internal, new BulkIndexByScrollResponseContentListener<>(channel, params)); return; } + /* * Lets try and validate before forking so the user gets some error. The * task can't totally validate until it starts but this is better than * nothing. */ - ActionRequestValidationException validationException = internalRequest.validate(); + ActionRequestValidationException validationException = internal.validate(); if (validationException != null) { channel.sendResponse(new BytesRestResponse(channel, validationException)); return; } - Task task = action.execute(internalRequest, LoggingTaskListener.instance()); - sendTask(channel, task); + sendTask(channel, action.execute(internal, LoggingTaskListener.instance())); + } + + /** + * Build the Request based on the RestRequest. + */ + protected abstract Request buildRequest(RestRequest request) throws IOException; + + /** + * Sets common options of {@link AbstractBulkByScrollRequest} requests. + */ + protected Request setCommonOptions(RestRequest restRequest, Request request) { + assert restRequest != null : "RestRequest should not be null"; + assert request != null : "Request should not be null"; + + request.setRefresh(restRequest.paramAsBoolean("refresh", request.isRefresh())); + request.setTimeout(restRequest.paramAsTime("timeout", request.getTimeout())); + + String consistency = restRequest.param("consistency"); + if (consistency != null) { + request.setConsistency(WriteConsistencyLevel.fromString(consistency)); + } + + Float requestsPerSecond = parseRequestsPerSecond(restRequest); + if (requestsPerSecond != null) { + request.setRequestsPerSecond(requestsPerSecond); + } + return request; } private void sendTask(RestChannel channel, Task task) throws IOException { - XContentBuilder builder = channel.newBuilder(); - builder.startObject(); - builder.field("task", clusterService.localNode().getId() + ":" + task.getId()); - builder.endObject(); - channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); + try (XContentBuilder builder = channel.newBuilder()) { + builder.startObject(); + builder.field("task", clusterService.localNode().getId() + ":" + task.getId()); + builder.endObject(); + channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); + } + } + + /** + * @return requests_per_second from the request as a float if it was on the request, null otherwise + */ + public static Float parseRequestsPerSecond(RestRequest request) { + String requestsPerSecondString = request.param("requests_per_second"); + if (requestsPerSecondString == null) { + return null; + } + if ("unlimited".equals(requestsPerSecondString)) { + return Float.POSITIVE_INFINITY; + } + float requestsPerSecond; + try { + requestsPerSecond = Float.parseFloat(requestsPerSecondString); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling.", e); + } + if (requestsPerSecond <= 0) { + // We validate here and in the setters because the setters use "Float.POSITIVE_INFINITY" instead of "unlimited" + throw new IllegalArgumentException( + "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling."); + } + return requestsPerSecond; } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java new file mode 100644 index 00000000000..926da3befdd --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -0,0 +1,118 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.rest.action.support.RestActions; +import org.elasticsearch.search.aggregations.AggregatorParsers; +import org.elasticsearch.search.suggest.Suggesters; + +import java.io.IOException; +import java.util.Map; +import java.util.function.Consumer; + +import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_ALL_MATCHES; + +/** + * Rest handler for reindex actions that accepts a search request like Update-By-Query or Delete-By-Query + */ +public abstract class AbstractBulkByQueryRestHandler< + Request extends AbstractBulkByScrollRequest, + TA extends TransportAction> extends AbstractBaseReindexRestHandler { + + protected AbstractBulkByQueryRestHandler(Settings settings, Client client, IndicesQueriesRegistry indicesQueriesRegistry, + AggregatorParsers aggParsers, Suggesters suggesters, ClusterService clusterService, + TA action) { + super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + } + + protected void parseInternalRequest(Request internal, RestRequest restRequest, + Map> consumers) throws IOException { + assert internal != null : "Request should not be null"; + assert restRequest != null : "RestRequest should not be null"; + + SearchRequest searchRequest = internal.getSearchRequest(); + int scrollSize = searchRequest.source().size(); + searchRequest.source().size(SIZE_ALL_MATCHES); + + parseSearchRequest(searchRequest, restRequest, consumers); + + internal.setSize(searchRequest.source().size()); + searchRequest.source().size(restRequest.paramAsInt("scroll_size", scrollSize)); + + String conflicts = restRequest.param("conflicts"); + if (conflicts != null) { + internal.setConflicts(conflicts); + } + + // Let the requester set search timeout. It is probably only going to be useful for testing but who knows. + if (restRequest.hasParam("search_timeout")) { + searchRequest.source().timeout(restRequest.paramAsTime("search_timeout", null)); + } + } + + protected void parseSearchRequest(SearchRequest searchRequest, RestRequest restRequest, + Map> consumers) throws IOException { + assert searchRequest != null : "SearchRequest should not be null"; + assert restRequest != null : "RestRequest should not be null"; + + /* + * We can't send parseSearchRequest REST content that it doesn't support + * so we will have to remove the content that is valid in addition to + * what it supports from the content first. This is a temporary hack and + * should get better when SearchRequest has full ObjectParser support + * then we can delegate and stuff. + */ + BytesReference content = RestActions.hasBodyContent(restRequest) ? RestActions.getRestContent(restRequest) : null; + if ((content != null) && (consumers != null && consumers.size() > 0)) { + Tuple> body = XContentHelper.convertToMap(content, false); + boolean modified = false; + for (Map.Entry> consumer : consumers.entrySet()) { + Object value = body.v2().remove(consumer.getKey()); + if (value != null) { + consumer.getValue().accept(value); + modified = true; + } + } + + if (modified) { + try (XContentBuilder builder = XContentFactory.contentBuilder(body.v1())) { + content = builder.map(body.v2()).bytes(); + } + } + } + + RestSearchAction.parseSearchRequest(searchRequest, indicesQueriesRegistry, restRequest, parseFieldMatcher, aggParsers, + suggesters, content); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java new file mode 100644 index 00000000000..c789e9c77b4 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +public class DeleteByQueryAction extends Action { + + public static final DeleteByQueryAction INSTANCE = new DeleteByQueryAction(); + public static final String NAME = "indices:data/write/delete/byquery"; + + private DeleteByQueryAction() { + super(NAME); + } + + @Override + public DeleteByQueryRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new DeleteByQueryRequestBuilder(client, this); + } + + @Override + public BulkIndexByScrollResponse newResponse() { + return new BulkIndexByScrollResponse(); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java new file mode 100644 index 00000000000..327459bd339 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.search.SearchRequest; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Creates a new {@link DeleteByQueryRequest} that uses scrolling and bulk requests to delete all documents matching + * the query. This can have performance as well as visibility implications. + * + * Delete-by-query now has the following semantics: + *
    + *
  • it's non-atomic, a delete-by-query may fail at any time while some documents matching the query have already been + * deleted
  • + *
  • it's syntactic sugar, a delete-by-query is equivalent to a scroll search and corresponding bulk-deletes by ID
  • + *
  • it's executed on a point-in-time snapshot, a delete-by-query will only delete the documents that are visible at the + * point in time the delete-by-query was started, equivalent to the scroll API
  • + *
  • it's consistent, a delete-by-query will yield consistent results across all replicas of a shard
  • + *
  • it's forward-compatible, a delete-by-query will only send IDs to the shards as deletes such that no queries are + * stored in the transaction logs that might not be supported in the future.
  • + *
  • it's results won't be visible until the index is refreshed.
  • + *
+ */ +public class DeleteByQueryRequest extends AbstractBulkByScrollRequest { + + public DeleteByQueryRequest() { + } + + public DeleteByQueryRequest(SearchRequest search) { + super(search); + // Delete-By-Query does not require the source + search.source().fetchSource(false); + } + + @Override + protected DeleteByQueryRequest self() { + return this; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException e = super.validate(); + if (getSearchRequest().indices() == null || getSearchRequest().indices().length == 0) { + e = addValidationError("use _all if you really want to delete from all existing indexes", e); + } + if (getSearchRequest() == null || getSearchRequest().source() == null) { + e = addValidationError("source is missing", e); + } + return e; + } + + @Override + public String toString() { + StringBuilder b = new StringBuilder(); + b.append("delete-by-query "); + searchToString(b); + return b.toString(); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java new file mode 100644 index 00000000000..f4d8a91f4cb --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +public class DeleteByQueryRequestBuilder extends + AbstractBulkByScrollRequestBuilder { + + public DeleteByQueryRequestBuilder(ElasticsearchClient client, + Action action) { + this(client, action, new SearchRequestBuilder(client, SearchAction.INSTANCE)); + } + + private DeleteByQueryRequestBuilder(ElasticsearchClient client, + Action action, + SearchRequestBuilder search) { + super(client, action, search, new DeleteByQueryRequest(search.request())); + } + + @Override + protected DeleteByQueryRequestBuilder self() { + return this; + } + + @Override + public DeleteByQueryRequestBuilder abortOnVersionConflict(boolean abortOnVersionConflict) { + request.setAbortOnVersionConflict(abortOnVersionConflict); + return this; + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java index 6e42f56dece..e3a826dbdad 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java @@ -39,12 +39,14 @@ public class ReindexPlugin extends Plugin { public void onModule(ActionModule actionModule) { actionModule.registerAction(ReindexAction.INSTANCE, TransportReindexAction.class); actionModule.registerAction(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class); + actionModule.registerAction(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class); actionModule.registerAction(RethrottleAction.INSTANCE, TransportRethrottleAction.class); } public void onModule(NetworkModule networkModule) { networkModule.registerRestHandler(RestReindexAction.class); networkModule.registerRestHandler(RestUpdateByQueryAction.class); + networkModule.registerRestHandler(RestDeleteByQueryAction.class); networkModule.registerRestHandler(RestRethrottleAction.class); networkModule.registerTaskStatus(BulkByScrollTask.Status.NAME, BulkByScrollTask.Status::new); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java new file mode 100644 index 00000000000..4750fe22313 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.search.aggregations.AggregatorParsers; +import org.elasticsearch.search.suggest.Suggesters; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Consumer; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { + + @Inject + public RestDeleteByQueryAction(Settings settings, RestController controller, Client client, + IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, + ClusterService clusterService, TransportDeleteByQueryAction action) { + super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + controller.registerHandler(POST, "/{index}/_delete_by_query", this); + controller.registerHandler(POST, "/{index}/{type}/_delete_by_query", this); + } + + @Override + protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception { + if (false == request.hasContent()) { + throw new ElasticsearchException("_delete_by_query requires a request body"); + } + handleRequest(request, channel, false, false, true); + } + + @Override + protected DeleteByQueryRequest buildRequest(RestRequest request) throws IOException { + /* + * Passing the search request through DeleteByQueryRequest first allows + * it to set its own defaults which differ from SearchRequest's + * defaults. Then the parseInternalRequest can override them. + */ + DeleteByQueryRequest internal = new DeleteByQueryRequest(new SearchRequest()); + + Map> consumers = new HashMap<>(); + consumers.put("conflicts", o -> internal.setConflicts((String) o)); + + parseInternalRequest(internal, request, consumers); + + return internal; + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index d61980fb8ce..267994672d4 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.WriteConsistencyLevel; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Client; @@ -27,7 +27,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcherSupplier; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -39,7 +38,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -53,13 +51,14 @@ import java.util.Map; import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; /** * Expose IndexBySearchRequest over rest. */ public class RestReindexAction extends AbstractBaseReindexRestHandler { + private static final ObjectParser PARSER = new ObjectParser<>("reindex"); + static { ObjectParser.Parser sourceParser = (parser, search, context) -> { /* @@ -114,41 +113,18 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler internalRequest, RestRequest request) { - internalRequest.setRefresh(request.paramAsBoolean("refresh", internalRequest.isRefresh())); - internalRequest.setTimeout(request.paramAsTime("timeout", internalRequest.getTimeout())); - String consistency = request.param("consistency"); - if (consistency != null) { - internalRequest.setConsistency(WriteConsistencyLevel.fromString(consistency)); + PARSER.parse(xcontent, internal, new ReindexParseContext(indicesQueriesRegistry, aggParsers, suggesters, parseFieldMatcher)); } + return internal; } /** diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java index a7c29d40a7d..382f5b51726 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java @@ -39,6 +39,7 @@ public class RestRethrottleAction extends BaseRestHandler { super(settings, client); this.action = action; controller.registerHandler(POST, "/_update_by_query/{taskId}/_rethrottle", this); + controller.registerHandler(POST, "/_delete_by_query/{taskId}/_rethrottle", this); controller.registerHandler(POST, "/_reindex/{taskId}/_rethrottle", this); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 9f76be3f0ff..f7dbbf893a8 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -22,31 +22,24 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.suggest.Suggesters; +import java.io.IOException; +import java.util.HashMap; import java.util.Map; +import java.util.function.Consumer; -import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_ALL_MATCHES; -import static org.elasticsearch.index.reindex.RestReindexAction.parseCommon; import static org.elasticsearch.rest.RestRequest.Method.POST; -public class RestUpdateByQueryAction extends AbstractBaseReindexRestHandler { +public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { @Inject public RestUpdateByQueryAction(Settings settings, RestController controller, Client client, @@ -59,60 +52,26 @@ public class RestUpdateByQueryAction extends AbstractBaseReindexRestHandler> body = XContentHelper.convertToMap(bodyContent, false); - boolean modified = false; - String conflicts = (String) body.v2().remove("conflicts"); - if (conflicts != null) { - internalRequest.setConflicts(conflicts); - modified = true; - } - @SuppressWarnings("unchecked") - Map script = (Map) body.v2().remove("script"); - if (script != null) { - internalRequest.setScript(Script.parse(script, false, parseFieldMatcher)); - modified = true; - } - if (modified) { - XContentBuilder builder = XContentFactory.contentBuilder(body.v1()); - builder.map(body.v2()); - bodyContent = builder.bytes(); - } - } - RestSearchAction.parseSearchRequest(internalRequest.getSearchRequest(), indicesQueriesRegistry, request, - parseFieldMatcher, aggParsers, suggesters, bodyContent); + UpdateByQueryRequest internal = new UpdateByQueryRequest(new SearchRequest()); - String conflicts = request.param("conflicts"); - if (conflicts != null) { - internalRequest.setConflicts(conflicts); - } - parseCommon(internalRequest, request); + Map> consumers = new HashMap<>(); + consumers.put("conflicts", o -> internal.setConflicts((String) o)); + consumers.put("script", o -> internal.setScript(Script.parse((Map)o, false, parseFieldMatcher))); - internalRequest.setSize(internalRequest.getSearchRequest().source().size()); - internalRequest.setPipeline(request.param("pipeline")); - internalRequest.getSearchRequest().source().size(request.paramAsInt("scroll_size", scrollSize)); - // Let the requester set search timeout. It is probably only going to be useful for testing but who knows. - if (request.hasParam("search_timeout")) { - internalRequest.getSearchRequest().source().timeout(request.paramAsTime("search_timeout", null)); - } + parseInternalRequest(internal, request, consumers); - execute(request, internalRequest, channel, false, true, false); + internal.setPipeline(request.param("pipeline")); + return internal; } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java new file mode 100644 index 00000000000..471bd066f94 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ParentTaskAssigningClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +public class TransportDeleteByQueryAction extends HandledTransportAction { + private final Client client; + private final ScriptService scriptService; + private final ClusterService clusterService; + + @Inject + public TransportDeleteByQueryAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver resolver, Client client, TransportService transportService, + ScriptService scriptService, ClusterService clusterService) { + super(settings, DeleteByQueryAction.NAME, threadPool, transportService, actionFilters, resolver, DeleteByQueryRequest::new); + this.client = client; + this.scriptService = scriptService; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(Task task, DeleteByQueryRequest request, ActionListener listener) { + ClusterState state = clusterService.state(); + ParentTaskAssigningClient client = new ParentTaskAssigningClient(this.client, clusterService.localNode(), task); + new AsyncDeleteBySearchAction((BulkByScrollTask) task, logger, client, threadPool, request, listener, scriptService, state).start(); + } + + @Override + protected void doExecute(DeleteByQueryRequest request, ActionListener listener) { + throw new UnsupportedOperationException("task required"); + } + + /** + * Implementation of delete-by-query using scrolling and bulk. + */ + static class AsyncDeleteBySearchAction extends AbstractAsyncBulkIndexByScrollAction { + + public AsyncDeleteBySearchAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, ThreadPool threadPool, + DeleteByQueryRequest request, ActionListener listener, + ScriptService scriptService, ClusterState clusterState) { + super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); + } + + @Override + protected boolean accept(SearchHit doc) { + // Delete-by-query does not require the source to delete a document + // and the default implementation checks for it + return true; + } + + @Override + protected RequestWrapper buildRequest(SearchHit doc) { + DeleteRequest delete = new DeleteRequest(); + delete.index(doc.index()); + delete.type(doc.type()); + delete.id(doc.id()); + delete.version(doc.version()); + return wrap(delete); + } + + /** + * Overrides the parent {@link AbstractAsyncBulkIndexByScrollAction#copyMetadata(RequestWrapper, SearchHit)} + * method that is much more Update/Reindex oriented and so also copies things like timestamp/ttl which we + * don't care for a deletion. + */ + @Override + protected RequestWrapper copyMetadata(RequestWrapper request, SearchHit doc) { + copyParent(request, fieldValue(doc, ParentFieldMapper.NAME)); + copyRouting(request, fieldValue(doc, RoutingFieldMapper.NAME)); + return request; + } + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 0f07cc560c8..a49ba0a3b32 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -35,16 +35,18 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.Map; import java.util.Objects; +import java.util.function.BiFunction; import static java.util.Objects.requireNonNull; import static org.elasticsearch.index.VersionType.INTERNAL; @@ -72,7 +74,7 @@ public class TransportReindexAction extends HandledTransportAction { - public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ScriptService scriptService, - ParentTaskAssigningClient client, ClusterState state, ThreadPool threadPool, ReindexRequest request, - ActionListener listener) { - super(task, logger, scriptService, state, client, threadPool, request, request.getSearchRequest(), listener); + + public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, ThreadPool threadPool, + ReindexRequest request, ActionListener listener, + ScriptService scriptService, ClusterState clusterState) { + super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); } @Override - protected IndexRequest buildIndexRequest(SearchHit doc) { + protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + Script script = mainRequest.getScript(); + if (script != null) { + return new ReindexScriptApplier(task, scriptService, script, clusterState, script.getParams()); + } + return super.buildScriptApplier(); + } + + @Override + protected RequestWrapper buildRequest(SearchHit doc) { IndexRequest index = new IndexRequest(); // Copy the index from the request so we always write where it asked to write @@ -161,109 +174,120 @@ public class TransportReindexAction extends HandledTransportAction request, String routing) { String routingSpec = mainRequest.getDestination().routing(); if (routingSpec == null) { - super.copyRouting(index, doc); + super.copyRouting(request, routing); return; } if (routingSpec.startsWith("=")) { - index.routing(mainRequest.getDestination().routing().substring(1)); + super.copyRouting(request, mainRequest.getDestination().routing().substring(1)); return; } switch (routingSpec) { case "keep": - super.copyRouting(index, doc); + super.copyRouting(request, routing); break; case "discard": - index.routing(null); + super.copyRouting(request, null); break; default: throw new IllegalArgumentException("Unsupported routing command"); } } - /* - * Methods below here handle script updating the index request. They try - * to be pretty liberal with regards to types because script are often - * dynamically typed. - */ - @Override - protected void scriptChangedIndex(IndexRequest index, Object to) { - requireNonNull(to, "Can't reindex without a destination index!"); - index.index(to.toString()); - } + class ReindexScriptApplier extends ScriptApplier { - @Override - protected void scriptChangedType(IndexRequest index, Object to) { - requireNonNull(to, "Can't reindex without a destination type!"); - index.type(to.toString()); - } - - @Override - protected void scriptChangedId(IndexRequest index, Object to) { - index.id(Objects.toString(to, null)); - } - - @Override - protected void scriptChangedVersion(IndexRequest index, Object to) { - if (to == null) { - index.version(Versions.MATCH_ANY).versionType(INTERNAL); - return; + ReindexScriptApplier(BulkByScrollTask task, ScriptService scriptService, Script script, ClusterState state, + Map params) { + super(task, scriptService, script, state, params); } - index.version(asLong(to, VersionFieldMapper.NAME)); - } - @Override - protected void scriptChangedParent(IndexRequest index, Object to) { - // Have to override routing with parent just in case its changed - String routing = Objects.toString(to, null); - index.parent(routing).routing(routing); - } - - @Override - protected void scriptChangedRouting(IndexRequest index, Object to) { - index.routing(Objects.toString(to, null)); - } - - @Override - protected void scriptChangedTimestamp(IndexRequest index, Object to) { - index.timestamp(Objects.toString(to, null)); - } - - @Override - protected void scriptChangedTTL(IndexRequest index, Object to) { - if (to == null) { - index.ttl((TimeValue) null); - return; - } - index.ttl(asLong(to, TTLFieldMapper.NAME)); - } - - private long asLong(Object from, String name) { /* - * Stuffing a number into the map will have converted it to - * some Number. + * Methods below here handle script updating the index request. They try + * to be pretty liberal with regards to types because script are often + * dynamically typed. */ - Number fromNumber; - try { - fromNumber = (Number) from; - } catch (ClassCastException e) { - throw new IllegalArgumentException(name + " may only be set to an int or a long but was [" + from + "]", e); + + @Override + protected void scriptChangedIndex(RequestWrapper request, Object to) { + requireNonNull(to, "Can't reindex without a destination index!"); + request.setIndex(to.toString()); } - long l = fromNumber.longValue(); - // Check that we didn't round when we fetched the value. - if (fromNumber.doubleValue() != l) { - throw new IllegalArgumentException(name + " may only be set to an int or a long but was [" + from + "]"); + + @Override + protected void scriptChangedType(RequestWrapper request, Object to) { + requireNonNull(to, "Can't reindex without a destination type!"); + request.setType(to.toString()); + } + + @Override + protected void scriptChangedId(RequestWrapper request, Object to) { + request.setId(Objects.toString(to, null)); + } + + @Override + protected void scriptChangedVersion(RequestWrapper request, Object to) { + if (to == null) { + request.setVersion(Versions.MATCH_ANY); + request.setVersionType(INTERNAL); + } else { + request.setVersion(asLong(to, VersionFieldMapper.NAME)); + } + } + + @Override + protected void scriptChangedParent(RequestWrapper request, Object to) { + // Have to override routing with parent just in case its changed + String routing = Objects.toString(to, null); + request.setParent(routing); + request.setRouting(routing); + } + + @Override + protected void scriptChangedRouting(RequestWrapper request, Object to) { + request.setRouting(Objects.toString(to, null)); + } + + @Override + protected void scriptChangedTimestamp(RequestWrapper request, Object to) { + request.setTimestamp(Objects.toString(to, null)); + } + + @Override + protected void scriptChangedTTL(RequestWrapper request, Object to) { + if (to == null) { + request.setTtl(null); + } else { + request.setTtl(asLong(to, TTLFieldMapper.NAME)); + } + } + + private long asLong(Object from, String name) { + /* + * Stuffing a number into the map will have converted it to + * some Number. + * */ + Number fromNumber; + try { + fromNumber = (Number) from; + } catch (ClassCastException e) { + throw new IllegalArgumentException(name + " may only be set to an int or a long but was [" + from + "]", e); + } + long l = fromNumber.longValue(); + // Check that we didn't round when we fetched the value. + if (fromNumber.doubleValue() != l) { + throw new IllegalArgumentException(name + " may only be set to an int or a long but was [" + from + "]"); + } + return l; } - return l; } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index 4d702be4e9d..7459972ce64 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -39,12 +39,16 @@ import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.Map; +import java.util.function.BiFunction; + public class TransportUpdateByQueryAction extends HandledTransportAction { private final Client client; private final ScriptService scriptService; @@ -65,8 +69,7 @@ public class TransportUpdateByQueryAction extends HandledTransportAction listener) { ClusterState state = clusterService.state(); ParentTaskAssigningClient client = new ParentTaskAssigningClient(this.client, clusterService.localNode(), task); - new AsyncIndexBySearchAction((BulkByScrollTask) task, logger, scriptService, client, threadPool, state, request, listener) - .start(); + new AsyncIndexBySearchAction((BulkByScrollTask) task, logger, client, threadPool, request, listener, scriptService, state).start(); } @Override @@ -78,14 +81,24 @@ public class TransportUpdateByQueryAction extends HandledTransportAction { - public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ScriptService scriptService, - ParentTaskAssigningClient client, ThreadPool threadPool, ClusterState clusterState, UpdateByQueryRequest request, - ActionListener listener) { - super(task, logger, scriptService, clusterState, client, threadPool, request, request.getSearchRequest(), listener); + + public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, ThreadPool threadPool, + UpdateByQueryRequest request, ActionListener listener, + ScriptService scriptService, ClusterState clusterState) { + super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); } @Override - protected IndexRequest buildIndexRequest(SearchHit doc) { + protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + Script script = mainRequest.getScript(); + if (script != null) { + return new UpdateByQueryScriptApplier(task, scriptService, script, clusterState, script.getParams()); + } + return super.buildScriptApplier(); + } + + @Override + protected RequestWrapper buildRequest(SearchHit doc) { IndexRequest index = new IndexRequest(); index.index(doc.index()); index.type(doc.type()); @@ -94,47 +107,55 @@ public class TransportUpdateByQueryAction extends HandledTransportAction params) { + super(task, scriptService, script, state, params); + } - @Override - protected void scriptChangedId(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + IdFieldMapper.NAME + "] not allowed"); - } + @Override + protected void scriptChangedIndex(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + IndexFieldMapper.NAME + "] not allowed"); + } - @Override - protected void scriptChangedVersion(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [_version] not allowed"); - } + @Override + protected void scriptChangedType(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + TypeFieldMapper.NAME + "] not allowed"); + } - @Override - protected void scriptChangedRouting(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + RoutingFieldMapper.NAME + "] not allowed"); - } + @Override + protected void scriptChangedId(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + IdFieldMapper.NAME + "] not allowed"); + } - @Override - protected void scriptChangedParent(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + ParentFieldMapper.NAME + "] not allowed"); - } + @Override + protected void scriptChangedVersion(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [_version] not allowed"); + } - @Override - protected void scriptChangedTimestamp(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + TimestampFieldMapper.NAME + "] not allowed"); - } + @Override + protected void scriptChangedRouting(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + RoutingFieldMapper.NAME + "] not allowed"); + } - @Override - protected void scriptChangedTTL(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + TTLFieldMapper.NAME + "] not allowed"); + @Override + protected void scriptChangedParent(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + ParentFieldMapper.NAME + "] not allowed"); + } + + @Override + protected void scriptChangedTimestamp(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + TimestampFieldMapper.NAME + "] not allowed"); + } + + @Override + protected void scriptChangedTTL(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + TTLFieldMapper.NAME + "] not allowed"); + } } } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java index b8f389d171a..27a8a42d5e6 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java @@ -22,10 +22,15 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.Index; +import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchHit; +import org.junit.Before; +import org.mockito.Matchers; import java.util.HashMap; import java.util.Map; @@ -33,18 +38,35 @@ import java.util.function.Consumer; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public abstract class AbstractAsyncBulkIndexByScrollActionScriptTestCase< Request extends AbstractBulkIndexByScrollRequest, Response extends BulkIndexByScrollResponse> extends AbstractAsyncBulkIndexByScrollActionTestCase { + + private static final Script EMPTY_SCRIPT = new Script(""); + + protected ScriptService scriptService; + + @Before + public void setupScriptService() { + scriptService = mock(ScriptService.class); + } + protected IndexRequest applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); Map fields = new HashMap<>(); InternalSearchHit doc = new InternalSearchHit(0, "id", new Text("type"), fields); doc.shardTarget(new SearchShardTarget("nodeid", new Index("index", "uuid"), 1)); - ExecutableScript script = new SimpleExecutableScript(scriptBody); - action().applyScript(index, doc, script, new HashMap<>()); + ExecutableScript executableScript = new SimpleExecutableScript(scriptBody); + + when(scriptService.executable(any(CompiledScript.class), Matchers.>any())) + .thenReturn(executableScript); + AbstractAsyncBulkIndexByScrollAction action = action(scriptService, request().setScript(EMPTY_SCRIPT)); + action.buildScriptApplier().apply(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc); return index; } @@ -53,7 +75,7 @@ public abstract class AbstractAsyncBulkIndexByScrollActionScriptTestCase< applyScript((Map ctx) -> ctx.put("junk", "junk")); fail("Expected error"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Invalid fields added to ctx [junk]")); + assertThat(e.getMessage(), equalTo("Invalid fields added to context [junk]")); } } @@ -65,4 +87,6 @@ public abstract class AbstractAsyncBulkIndexByScrollActionScriptTestCase< }); assertEquals("cat", index.sourceAsMap().get("bar")); } + + protected abstract AbstractAsyncBulkIndexByScrollAction action(ScriptService scriptService, Request request); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionTestCase.java index ab5f8f0d748..b9489e9f5d9 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionTestCase.java @@ -46,8 +46,6 @@ public abstract class AbstractAsyncBulkIndexByScrollActionTestCase< threadPool.shutdown(); } - protected abstract AbstractAsyncBulkIndexByScrollAction action(); - protected abstract Request request(); protected PlainActionFuture listener() { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java index 37386abf12e..5a9976fc005 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java @@ -49,13 +49,15 @@ public abstract class AbstractAsyncBulkIndexbyScrollActionMetadataTestCase< public void testTimestampIsCopied() { IndexRequest index = new IndexRequest(); - action().copyMetadata(index, doc(TimestampFieldMapper.NAME, 10L)); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(TimestampFieldMapper.NAME, 10L)); assertEquals("10", index.timestamp()); } public void testTTL() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(index, doc(TTLFieldMapper.NAME, 10L)); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(TTLFieldMapper.NAME, 10L)); assertEquals(timeValueMillis(10), index.ttl()); } + + protected abstract AbstractAsyncBulkIndexByScrollAction action(); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index c9bd22d3552..d78fcfe69ca 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -665,7 +665,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { } private class DummyAbstractAsyncBulkByScrollAction - extends AbstractAsyncBulkByScrollAction { + extends AbstractAsyncBulkByScrollAction { public DummyAbstractAsyncBulkByScrollAction() { super(testTask, logger, new ParentTaskAssigningClient(client, localNode, testTask), threadPool, testRequest, firstSearchRequest, listener); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java index 5bce3260929..4ef16c59141 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java @@ -30,6 +30,8 @@ public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher createdMatcher = equalTo(0L); private Matcher updatedMatcher = equalTo(0L); + private Matcher deletedMatcher = equalTo(0L); + /** * Matches for number of batches. Optional. */ @@ -56,6 +58,15 @@ public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher deletedMatcher) { + this.deletedMatcher = deletedMatcher; + return this; + } + + public BulkIndexByScrollResponseMatcher deleted(long deleted) { + return deleted(equalTo(deleted)); + } + /** * Set the matches for the number of batches. Defaults to matching any * integer because we usually don't care about how many batches the job @@ -110,6 +121,7 @@ public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "doc", String.valueOf(i)).setSource("fields1", 1)); + } + indexRandom(true, true, true, builders); + + assertThat(deleteByQuery().source("t*").refresh(true).get(), matcher().deleted(docs)); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); + } + + public void testDeleteByQueryWithMultipleIndices() throws Exception { + final int indices = randomIntBetween(2, 5); + final int docs = randomIntBetween(2, 10) * 2; + long[] candidates = new long[indices]; + + // total number of expected deletions + long deletions = 0; + + List builders = new ArrayList<>(); + for (int i = 0; i < indices; i++) { + // number of documents to be deleted with the upcoming delete-by-query + // (this number differs for each index) + candidates[i] = randomIntBetween(1, docs); + deletions = deletions + candidates[i]; + + for (int j = 0; j < docs; j++) { + boolean candidate = (j < candidates[i]); + builders.add(client().prepareIndex("test-" + i, "doc", String.valueOf(j)).setSource("candidate", candidate)); + } + } + indexRandom(true, true, true, builders); + + // Deletes all the documents with candidate=true + assertThat(deleteByQuery().source("test-*").filter(termQuery("candidate", true)).refresh(true).get(), + matcher().deleted(deletions)); + + for (int i = 0; i < indices; i++) { + long remaining = docs - candidates[i]; + assertHitCount(client().prepareSearch("test-" + i).setSize(0).get(), remaining); + } + + assertHitCount(client().prepareSearch().setSize(0).get(), (indices * docs) - deletions); + } + + public void testDeleteByQueryWithMissingIndex() throws Exception { + indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "a")); + assertHitCount(client().prepareSearch().setSize(0).get(), 1); + + try { + deleteByQuery().source("missing").get(); + fail("should have thrown an exception because of a missing index"); + } catch (IndexNotFoundException e) { + // Ok + } + } + + public void testDeleteByQueryWithRouting() throws Exception { + assertAcked(prepareCreate("test").setSettings("number_of_shards", 2)); + ensureGreen("test"); + + final int docs = randomIntBetween(2, 10); + logger.info("--> indexing [{}] documents with routing", docs); + + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "test", String.valueOf(i)).setRouting(String.valueOf(i)).setSource("field1", 1)); + } + indexRandom(true, true, true, builders); + + logger.info("--> counting documents with no routing, should be equal to [{}]", docs); + assertHitCount(client().prepareSearch().setSize(0).get(), docs); + + String routing = String.valueOf(randomIntBetween(2, docs)); + + logger.info("--> counting documents with routing [{}]", routing); + long expected = client().prepareSearch().setSize(0).setRouting(routing).get().getHits().totalHits(); + + logger.info("--> delete all documents with routing [{}] with a delete-by-query", routing); + DeleteByQueryRequestBuilder delete = deleteByQuery().source("test"); + delete.source().setRouting(routing); + assertThat(delete.refresh(true).get(), matcher().deleted(expected)); + + assertHitCount(client().prepareSearch().setSize(0).get(), docs - expected); + } + + public void testDeleteByMatchQuery() throws Exception { + assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); + + final int docs = scaledRandomIntBetween(10, 100); + + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "test", Integer.toString(i)) + .setRouting(randomAsciiOfLengthBetween(1, 5)) + .setSource("foo", "bar")); + } + indexRandom(true, true, true, builders); + + int n = between(0, docs - 1); + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(matchQuery("_id", Integer.toString(n))).get(), 1); + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get(), docs); + + DeleteByQueryRequestBuilder delete = deleteByQuery().source("alias").filter(matchQuery("_id", Integer.toString(n))); + assertThat(delete.refresh(true).get(), matcher().deleted(1L)); + + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get(), docs - 1); + } + + public void testDeleteByQueryWithDateMath() throws Exception { + indexRandom(true, client().prepareIndex("test", "type", "1").setSource("d", "2013-01-01")); + + DeleteByQueryRequestBuilder delete = deleteByQuery().source("test").filter(rangeQuery("d").to("now-1h")); + assertThat(delete.refresh(true).get(), matcher().deleted(1L)); + + assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); + } + + public void testDeleteByQueryOnReadOnlyIndex() throws Exception { + createIndex("test"); + + final int docs = randomIntBetween(1, 50); + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "test", Integer.toString(i)).setSource("field", 1)); + } + indexRandom(true, true, true, builders); + + try { + enableIndexBlock("test", IndexMetaData.SETTING_READ_ONLY); + assertThat(deleteByQuery().source("test").refresh(true).get(), matcher().deleted(0).failures(docs)); + } finally { + disableIndexBlock("test", IndexMetaData.SETTING_READ_ONLY); + } + + assertHitCount(client().prepareSearch("test").setSize(0).get(), docs); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java new file mode 100644 index 00000000000..6007b646429 --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java @@ -0,0 +1,184 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ListenableActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.shard.IndexingOperationListener; +import org.elasticsearch.plugins.Plugin; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +/** + * Tests that you can actually cancel a delete-by-query request and all the plumbing works. Doesn't test all of the different cancellation + * places - that is the responsibility of {@link AsyncBulkByScrollActionTests} which have more precise control to simulate failures but do + * not exercise important portion of the stack like transport and task management. + */ +public class DeleteByQueryCancelTests extends ReindexTestCase { + + private static final String INDEX = "test-delete-by-query"; + private static final String TYPE = "test"; + + private static final int MAX_DELETIONS = 10; + private static final CyclicBarrier barrier = new CyclicBarrier(2); + + @Override + protected int numberOfShards() { + // Only 1 shard and no replica so that test execution + // can be easily controlled within a {@link IndexingOperationListener#preDelete} + return 1; + } + + @Override + protected int numberOfReplicas() { + // Only 1 shard and no replica so that test execution + // can be easily controlled within a {@link IndexingOperationListener#preDelete} + return 0; + } + + @Override + protected Collection> nodePlugins() { + Collection> plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(DeleteByQueryCancellationPlugin.class); + return plugins; + } + + public void testCancel() throws Exception { + createIndex(INDEX); + + int totalNumShards = getNumShards(INDEX).totalNumShards; + + // Number of documents to be deleted in this test + final int nbDocsToDelete = totalNumShards * MAX_DELETIONS; + + // Total number of documents that will be created in this test + final int nbDocs = nbDocsToDelete * randomIntBetween(1, 5); + for (int i = 0; i < nbDocs; i++) { + indexRandom(false, client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i)); + } + + refresh(INDEX); + assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), nbDocs); + + // Executes the delete by query; each shard will block after MAX_DELETIONS + DeleteByQueryRequestBuilder deleteByQuery = deleteByQuery().source("_all"); + deleteByQuery.source().setSize(1); + + ListenableActionFuture future = deleteByQuery.execute(); + + // Waits for the indexing operation listener to block + barrier.await(30, TimeUnit.SECONDS); + + // Status should show running + ListTasksResponse tasksList = client().admin().cluster().prepareListTasks() + .setActions(DeleteByQueryAction.NAME).setDetailed(true).get(); + assertThat(tasksList.getNodeFailures(), empty()); + assertThat(tasksList.getTaskFailures(), empty()); + assertThat(tasksList.getTasks(), hasSize(1)); + BulkByScrollTask.Status status = (BulkByScrollTask.Status) tasksList.getTasks().get(0).getStatus(); + assertNull(status.getReasonCancelled()); + + // Cancel the request while the deletions are blocked. This will prevent further deletions requests from being sent. + List cancelledTasks = client().admin().cluster().prepareCancelTasks() + .setActions(DeleteByQueryAction.NAME).get().getTasks(); + assertThat(cancelledTasks, hasSize(1)); + + // The status should now show canceled. The request will still be in the list because the script is still blocked. + tasksList = client().admin().cluster().prepareListTasks().setActions(DeleteByQueryAction.NAME).setDetailed(true).get(); + assertThat(tasksList.getNodeFailures(), empty()); + assertThat(tasksList.getTaskFailures(), empty()); + assertThat(tasksList.getTasks(), hasSize(1)); + status = (BulkByScrollTask.Status) tasksList.getTasks().get(0).getStatus(); + assertEquals(CancelTasksRequest.DEFAULT_REASON, status.getReasonCancelled()); + + // Now unblock the listener so that it can proceed + barrier.await(); + + // And check the status of the response + BulkIndexByScrollResponse response = future.get(); + assertThat(response, matcher() + .deleted(lessThanOrEqualTo((long) MAX_DELETIONS)).batches(MAX_DELETIONS).reasonCancelled(equalTo("by user request"))); + } + + + public static class DeleteByQueryCancellationPlugin extends Plugin { + + @Override + public String name() { + return "delete-by-query-cancellation"; + } + + @Override + public String description() { + return "See " + DeleteByQueryCancellationPlugin.class.getName(); + } + + @Override + public void onIndexModule(IndexModule indexModule) { + indexModule.addIndexOperationListener(new BlockingDeleteListener()); + } + } + + /** + * A {@link IndexingOperationListener} that allows a given number of documents to be deleted + * and then blocks until it is notified to proceed. + */ + public static class BlockingDeleteListener implements IndexingOperationListener { + + private final CountDown blockAfter = new CountDown(MAX_DELETIONS); + + @Override + public Engine.Delete preDelete(Engine.Delete delete) { + if (blockAfter.isCountedDown() || (TYPE.equals(delete.type()) == false)) { + return delete; + } + + if (blockAfter.countDown()) { + try { + // Tell the test we've deleted enough documents. + barrier.await(30, TimeUnit.SECONDS); + + // Wait for the test to tell us to proceed. + barrier.await(30, TimeUnit.SECONDS); + } catch (InterruptedException | BrokenBarrierException | TimeoutException e) { + throw new RuntimeException(e); + } + } + return delete; + } + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryConcurrentTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryConcurrentTests.java new file mode 100644 index 00000000000..40a776626a3 --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryConcurrentTests.java @@ -0,0 +1,119 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicLong; + +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.equalTo; + +public class DeleteByQueryConcurrentTests extends ReindexTestCase { + + public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable { + final Thread[] threads = new Thread[scaledRandomIntBetween(2, 5)]; + final long docs = randomIntBetween(1, 50); + + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + for (int t = 0; t < threads.length; t++) { + builders.add(client().prepareIndex("test", "doc").setSource("field", t)); + } + } + indexRandom(true, true, true, builders); + + final CountDownLatch start = new CountDownLatch(1); + for (int t = 0; t < threads.length; t++) { + final int threadNum = t; + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", threadNum)).get(), docs); + + Runnable r = () -> { + try { + start.await(); + + assertThat(deleteByQuery().source("_all").filter(termQuery("field", threadNum)).refresh(true).get(), + matcher().deleted(docs)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + }; + threads[t] = new Thread(r); + threads[t].start(); + } + + start.countDown(); + for (Thread thread : threads) { + thread.join(); + } + + for (int t = 0; t < threads.length; t++) { + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", t)).get(), 0); + } + } + + public void testConcurrentDeleteByQueriesOnSameDocs() throws Throwable { + final long docs = randomIntBetween(50, 100); + + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "doc", String.valueOf(i)).setSource("foo", "bar")); + } + indexRandom(true, true, true, builders); + + final Thread[] threads = new Thread[scaledRandomIntBetween(2, 9)]; + + final CountDownLatch start = new CountDownLatch(1); + final MatchQueryBuilder query = matchQuery("foo", "bar"); + final AtomicLong deleted = new AtomicLong(0); + + for (int t = 0; t < threads.length; t++) { + Runnable r = () -> { + try { + start.await(); + + BulkIndexByScrollResponse response = deleteByQuery().source("test").filter(query).refresh(true).get(); + // Some deletions might fail due to version conflict, but + // what matters here is the total of successful deletions + deleted.addAndGet(response.getDeleted()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + }; + threads[t] = new Thread(r); + threads[t].start(); + } + + start.countDown(); + for (Thread thread : threads) { + thread.join(); + } + + assertHitCount(client().prepareSearch("test").setSize(0).get(), 0L); + assertThat(deleted.get(), equalTo(docs)); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java index e82dd12a407..670fcefbf55 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMetadataTestCase { public void testRoutingCopiedByDefault() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("foo", index.routing()); } @@ -37,7 +37,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("keep"); IndexRequest index = new IndexRequest(); - action.copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("foo", index.routing()); } @@ -45,7 +45,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("discard"); IndexRequest index = new IndexRequest(); - action.copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals(null, index.routing()); } @@ -53,7 +53,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("=cat"); IndexRequest index = new IndexRequest(); - action.copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("cat", index.routing()); } @@ -61,13 +61,13 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("==]"); IndexRequest index = new IndexRequest(); - action.copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("=]", index.routing()); } @Override protected TransportReindexAction.AsyncIndexBySearchAction action() { - return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, null, null, threadPool, request(), listener()); + return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request(), listener(), null, null); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java index b805dbd2772..74b7548cd63 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java @@ -20,7 +20,10 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptService; import java.util.Map; @@ -31,6 +34,7 @@ import static org.hamcrest.Matchers.containsString; * Tests index-by-search with a script modifying the documents. */ public class ReindexScriptTests extends AbstractAsyncBulkIndexByScrollActionScriptTestCase { + public void testSetIndex() throws Exception { Object dest = randomFrom(new Object[] {234, 234L, "pancake"}); IndexRequest index = applyScript((Map ctx) -> ctx.put("_index", dest)); @@ -129,11 +133,12 @@ public class ReindexScriptTests extends AbstractAsyncBulkIndexByScrollActionScri @Override protected ReindexRequest request() { - return new ReindexRequest(); + return new ReindexRequest(new SearchRequest(), new IndexRequest()); } @Override - protected AbstractAsyncBulkIndexByScrollAction action() { - return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, null, null, threadPool, request(), listener()); + protected AbstractAsyncBulkIndexByScrollAction action(ScriptService scriptService, ReindexRequest request) { + return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, listener(), scriptService, + null); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java index 33c72baa7cb..f4a777a1973 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java @@ -43,6 +43,10 @@ public abstract class ReindexTestCase extends ESIntegTestCase { return UpdateByQueryAction.INSTANCE.newRequestBuilder(client()); } + protected DeleteByQueryRequestBuilder deleteByQuery() { + return DeleteByQueryAction.INSTANCE.newRequestBuilder(client()); + } + protected RethrottleRequestBuilder rethrottle() { return RethrottleAction.INSTANCE.newRequestBuilder(client()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 0b557898552..ff821724316 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.reindex; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -43,6 +44,7 @@ import static org.hamcrest.Matchers.greaterThan; * Integration test for retry behavior. Useful because retrying relies on the way that the rest of Elasticsearch throws exceptions and unit * tests won't verify that. */ +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/18456") public class RetryTests extends ReindexTestCase { /** * The number of concurrent requests to test. diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java index 5c5e45993b9..bb6a33b593a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java @@ -27,14 +27,13 @@ public class UpdateByQueryMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMetadataTestCase { public void testRoutingIsCopied() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("foo", index.routing()); } @Override protected TransportUpdateByQueryAction.AsyncIndexBySearchAction action() { - return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, null, threadPool, null, request(), - listener()); + return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request(), listener(), null, null); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java index 62f405cb0cd..1c57c202766 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.script.ScriptService; + import java.util.Date; import java.util.Map; @@ -26,6 +28,7 @@ import static org.hamcrest.Matchers.containsString; public class UpdateByQueryWithScriptTests extends AbstractAsyncBulkIndexByScrollActionScriptTestCase { + public void testModifyingCtxNotAllowed() { /* * Its important that none of these actually match any of the fields. @@ -49,7 +52,8 @@ public class UpdateByQueryWithScriptTests } @Override - protected AbstractAsyncBulkIndexByScrollAction action() { - return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, null, threadPool, null, request(), listener()); + protected AbstractAsyncBulkIndexByScrollAction action(ScriptService scriptService, UpdateByQueryRequest request) { + return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, listener(), + scriptService, null); } } diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml new file mode 100644 index 00000000000..bdad5f581bc --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml @@ -0,0 +1,304 @@ +--- +"Basic response": + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: test + body: + query: + match_all: {} + + - is_false: timed_out + - match: {deleted: 1} + - is_false: created + - is_false: updated + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - match: {noops: 0} + - match: {throttled_millis: 0} + - gte: { took: 0 } + - is_false: task + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 0} + +--- +"wait_for_completion=false": + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + wait_for_completion: false + index: test + body: + query: + match_all: {} + + - match: {task: '/.+:\d+/'} + - set: {task: task} + - is_false: version_conflicts + - is_false: batches + - is_false: failures + - is_false: noops + - is_false: took + - is_false: throttled_millis + - is_false: created + - is_false: updated + - is_false: deleted + + - do: + tasks.list: + wait_for_completion: true + task_id: $task + - is_false: node_failures + +--- +"Response for version conflict": + - do: + indices.create: + index: test + body: + settings: + index.refresh_interval: -1 + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + # Creates a new version for reindex to miss on scan. + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test2" } + + - do: + catch: conflict + delete_by_query: + index: test + body: + query: + match_all: {} + + - match: {deleted: 0} + - match: {version_conflicts: 1} + - match: {batches: 1} + - match: {failures.0.index: test} + - match: {failures.0.type: foo} + - match: {failures.0.id: "1"} + - match: {failures.0.status: 409} + - match: {failures.0.cause.type: version_conflict_engine_exception} + # Use a regex so we don't mind if the current version isn't always 1. Sometimes it comes out 2. + - match: {failures.0.cause.reason: "/\\[foo\\]\\[1\\]:.version.conflict,.current.version.\\[\\d+\\].is.different.than.the.one.provided.\\[\\d+\\]/"} + - match: {failures.0.cause.shard: /\d+/} + - match: {failures.0.cause.index: test} + - gte: { took: 0 } + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 1} + +--- +"Response for version conflict with conflicts=proceed": + - do: + indices.create: + index: test + body: + settings: + index.refresh_interval: -1 + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + # Creates a new version for reindex to miss on scan. + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test2" } + + - do: + delete_by_query: + index: test + conflicts: proceed + body: + query: + match_all: {} + + - match: {deleted: 0} + - match: {version_conflicts: 1} + - match: {batches: 1} + - match: {noops: 0} + - match: {failures: []} + - match: {throttled_millis: 0} + - gte: { took: 0 } + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 1} + +--- +"Limit by query": + - do: + index: + index: twitter + type: tweet + id: 1 + body: { "user": "kimchy" } + - do: + index: + index: twitter + type: tweet + id: 2 + body: { "user": "junk" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: twitter + refresh: true + body: + query: + match: + user: kimchy + - match: {deleted: 1} + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - gte: { took: 0 } + + - do: + count: + index: twitter + + - match: {count: 1} + +--- +"Limit by size": + - do: + index: + index: twitter + type: tweet + id: 1 + body: { "user": "kimchy" } + - do: + index: + index: twitter + type: tweet + id: 2 + body: { "user": "kimchy" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: twitter + size: 1 + body: + query: + match_all: {} + + - match: {deleted: 1} + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - match: {throttled_millis: 0} + - gte: { took: 0 } + + - do: + indices.refresh: {} + + - do: + count: + index: twitter + + - match: {count: 1} + +--- +"Can override scroll_size": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: test + refresh: true + scroll_size: 1 + body: + query: + match_all: {} + + - match: {batches: 3} + - match: {deleted: 3} + + - do: + count: + index: test + + - match: {count: 0} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml new file mode 100644 index 00000000000..8648c9034ee --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml @@ -0,0 +1,99 @@ +--- +"no body fails": + - do: + catch: param + delete_by_query: + index: _all + +--- +"invalid conflicts fails": + - do: + index: + index: test + type: test + id: 1 + body: { "text": "test" } + - do: + catch: /conflicts may only be .* but was \[cat\]/ + delete_by_query: + index: test + conflicts: cat + body: + query: + match_all: {} + +--- +"invalid size fails": + - do: + index: + index: test + type: test + id: 1 + body: { "text": "test" } + - do: + catch: /size should be greater than 0 if the request is limited to some number of documents or -1 if it isn't but it was \[-4\]/ + delete_by_query: + index: test + size: -4 + body: + query: + match_all: {} + +--- +"invalid scroll_size fails": + - do: + index: + index: test + type: test + id: 1 + body: { "text": "test" } + - do: + catch: /Failed to parse int parameter \[scroll_size\] with value \[asdf\]/ + delete_by_query: + index: test + scroll_size: asdf + body: + query: + match_all: {} + +--- +"source fields may not be modified": + - do: + catch: /fields is not supported in this context/ + delete_by_query: + index: test + body: + fields: [_id] + +--- +"requests_per_second cannot be an empty string": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + delete_by_query: + requests_per_second: "" + index: test + body: + query: + match_all: {} + +--- +"requests_per_second cannot be negative": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + delete_by_query: + requests_per_second: -12 + index: test + body: + query: + match_all: {} + +--- +"requests_per_second cannot be zero": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + delete_by_query: + requests_per_second: 0 + index: test + body: + query: + match_all: {} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/30_by_type.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/30_by_type.yaml new file mode 100644 index 00000000000..1ee249fc6bb --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/30_by_type.yaml @@ -0,0 +1,72 @@ +--- +"Delete by type": + - do: + index: + index: test + type: t1 + id: 1 + body: { foo: bar } + - do: + index: + index: test + type: t1 + id: 2 + body: { foo: bar } + - do: + index: + index: test + type: t2 + id: 1 + body: { foo: bar } + - do: + index: + index: test + type: t2 + id: 2 + body: { foo: bar } + - do: + index: + index: test + type: t2 + id: 3 + body: { foo: baz } + - do: + indices.refresh: {} + - do: + count: + index: test + type: t2 + + - match: {count: 3} + + - do: + delete_by_query: + index: test + type: t2 + body: + query: + match: + foo: bar + + - is_false: timed_out + - match: {deleted: 2} + - is_false: created + - is_false: updated + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - match: {noops: 0} + - match: {throttled_millis: 0} + - gte: { took: 0 } + - is_false: task + + - do: + indices.refresh: {} + + - do: + count: + index: test + type: t2 + + - match: {count: 1} + diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml new file mode 100644 index 00000000000..a69dfdfac04 --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml @@ -0,0 +1,62 @@ +--- +"can override consistency": + - do: + indices.create: + index: test + body: + settings: + number_of_replicas: 5 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: test + id: 1 + body: {"text": "test"} + consistency: one + - do: + indices.refresh: {} + + - do: + catch: unavailable + delete_by_query: + index: test + timeout: 1s + body: + query: + match_all: {} + + - match: + failures.0.cause.reason: /Not.enough.active.copies.to.meet.write.consistency.of.\[QUORUM\].\(have.1,.needed.4\)..Timeout\:.\[1s\],.request:.\[BulkShardRequest.to.\[test\].containing.\[1\].requests\]/ + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 1} + + - do: + delete_by_query: + index: test + consistency: one + body: + query: + match_all: {} + + - match: {failures: []} + - match: {deleted: 1} + - match: {version_conflicts: 0} + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 0} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml new file mode 100644 index 00000000000..0ff382ff751 --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml @@ -0,0 +1,202 @@ +"Throttle the request": + # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard + # and a small batch size on the request + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: test + scroll_size: 1 + requests_per_second: 1 + body: + query: + match_all: {} + + - match: {batches: 3} + - match: {deleted: 3} + - gt: {throttled_millis: 1000} + - lt: {throttled_millis: 4000} + +--- +"requests_per_second supports unlimited which turns off throttling": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: test + scroll_size: 1 + requests_per_second: unlimited + body: + query: + match_all: {} + + - match: {batches: 3} + - match: {deleted: 3} + - match: {throttled_millis: 0} + +--- +"Rethrottle": + # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard + # and a small batch size on the request + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + requests_per_second: .00000001 # About 9.5 years to complete the request + wait_for_completion: false + index: test + scroll_size: 1 + body: + query: + match_all: {} + + - match: {task: '/.+:\d+/'} + - set: {task: task} + + - do: + reindex.rethrottle: + requests_per_second: unlimited + task_id: $task + + - do: + tasks.list: + wait_for_completion: true + task_id: $task + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 0} + +--- +"Rethrottle but not unlimited": + # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard + # and a small batch size on the request + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + requests_per_second: .00000001 # About 9.5 years to complete the request + wait_for_completion: false + index: test + scroll_size: 1 + body: + query: + match_all: {} + + - match: {task: '/.+:\d+/'} + - set: {task: task} + + - do: + reindex.rethrottle: + requests_per_second: 1 + task_id: $task + + - do: + tasks.list: + wait_for_completion: true + task_id: $task diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml index 63d0edefe14..f4025383321 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml @@ -24,6 +24,7 @@ - match: {throttled_millis: 0} - gte: { took: 0 } - is_false: task + - is_false: deleted --- "Response format for updated": @@ -57,6 +58,7 @@ - match: {throttled_millis: 0} - gte: { took: 0 } - is_false: task + - is_false: deleted --- "wait_for_completion=false": @@ -88,6 +90,7 @@ - is_false: took - is_false: throttled_millis - is_false: created + - is_false: deleted - do: tasks.list: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml index dc54c0d91cf..a7a5198e430 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml @@ -59,7 +59,7 @@ --- "search size fails if not a number": - do: - catch: '/NumberFormatException: For input string: "cat"/' + catch: '/number_format_exception.*For input string: \"cat\"/' reindex: body: source: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml index 89266198f2c..843bb9b6eb5 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml @@ -23,6 +23,7 @@ # Update by query can't create - is_false: created - is_false: task + - is_false: deleted --- "wait_for_completion=false": @@ -49,6 +50,7 @@ - is_false: took - is_false: throttled_millis - is_false: created + - is_false: deleted - do: tasks.list: diff --git a/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml b/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml index 063e959a807..124122a44bf 100644 --- a/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml +++ b/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml @@ -25,6 +25,10 @@ setup: --- "Basic delete_by_query": + - skip: + version: all + reason: "Test muted because of a REST test namespace conflict, see https://github.com/elastic/elasticsearch/issues/18469" + - do: delete_by_query: index: test_1 diff --git a/plugins/discovery-azure/licenses/httpclient-4.3.6.jar.sha1 b/plugins/discovery-azure/licenses/httpclient-4.3.6.jar.sha1 deleted file mode 100644 index 3d35ee99d07..00000000000 --- a/plugins/discovery-azure/licenses/httpclient-4.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c47155e3e6c9a41a28db36680b828ced53b8af4 diff --git a/plugins/discovery-azure/licenses/httpclient-4.5.2.jar.sha1 b/plugins/discovery-azure/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/discovery-azure/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/httpcore-4.3.3.jar.sha1 b/plugins/discovery-azure/licenses/httpcore-4.3.3.jar.sha1 deleted file mode 100644 index 5d9c0e26c09..00000000000 --- a/plugins/discovery-azure/licenses/httpcore-4.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f91b7a4aadc5cf486df6e4634748d7dd7a73f06d diff --git a/plugins/discovery-azure/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-azure/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/discovery-azure/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/httpclient-4.3.6.jar.sha1 b/plugins/discovery-ec2/licenses/httpclient-4.3.6.jar.sha1 deleted file mode 100644 index 3d35ee99d07..00000000000 --- a/plugins/discovery-ec2/licenses/httpclient-4.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c47155e3e6c9a41a28db36680b828ced53b8af4 diff --git a/plugins/discovery-ec2/licenses/httpclient-4.5.2.jar.sha1 b/plugins/discovery-ec2/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/discovery-ec2/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/httpcore-4.3.3.jar.sha1 b/plugins/discovery-ec2/licenses/httpcore-4.3.3.jar.sha1 deleted file mode 100644 index 5d9c0e26c09..00000000000 --- a/plugins/discovery-ec2/licenses/httpcore-4.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f91b7a4aadc5cf486df6e4634748d7dd7a73f06d diff --git a/plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/httpclient-4.3.6.jar.sha1 b/plugins/discovery-gce/licenses/httpclient-4.3.6.jar.sha1 deleted file mode 100644 index 3d35ee99d07..00000000000 --- a/plugins/discovery-gce/licenses/httpclient-4.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c47155e3e6c9a41a28db36680b828ced53b8af4 diff --git a/plugins/discovery-gce/licenses/httpclient-4.5.2.jar.sha1 b/plugins/discovery-gce/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/discovery-gce/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/httpcore-4.3.3.jar.sha1 b/plugins/discovery-gce/licenses/httpcore-4.3.3.jar.sha1 deleted file mode 100644 index 5d9c0e26c09..00000000000 --- a/plugins/discovery-gce/licenses/httpcore-4.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f91b7a4aadc5cf486df6e4634748d7dd7a73f06d diff --git a/plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle new file mode 100644 index 00000000000..9968d4408e4 --- /dev/null +++ b/plugins/repository-gcs/build.gradle @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +esplugin { + description 'The GCS repository plugin adds Google Cloud Storage support for repositories.' + classname 'org.elasticsearch.plugin.repository.gcs.GoogleCloudStoragePlugin' +} + +versions << [ + 'google': '1.21.0', +] + +dependencies { + compile "com.google.apis:google-api-services-storage:v1-rev66-${versions.google}" + compile "com.google.api-client:google-api-client:${versions.google}" + compile "com.google.oauth-client:google-oauth-client:${versions.google}" + compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" + compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" + compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "commons-codec:commons-codec:${versions.commonscodec}" + compile "com.google.http-client:google-http-client:${versions.google}" + compile "com.google.http-client:google-http-client-jackson2:${versions.google}" +} + +dependencyLicenses { + mapping from: /google-.*/, to: 'google' +} + +thirdPartyAudit.excludes = [ + // classes are missing + 'com.google.common.base.Splitter', + 'com.google.common.collect.Lists', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', +] diff --git a/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 b/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 new file mode 100644 index 00000000000..3fe8682a1b0 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 @@ -0,0 +1 @@ +4b95f4897fa13f2cd904aee711aeafc0c5295cd8 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt b/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt b/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt new file mode 100644 index 00000000000..56916449bbe --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt @@ -0,0 +1,17 @@ +Apache Commons Codec +Copyright 2002-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +src/test/org/apache/commons/codec/language/DoubleMetaphoneTest.java +contains test data from http://aspell.net/test/orig/batch0.tab. +Copyright (C) 2002 Kevin Atkinson (kevina@gnu.org) + +=============================================================================== + +The content of package org.apache.commons.codec.language.bm has been translated +from the original php source code available at http://stevemorse.org/phoneticinfo.htm +with permission from the original authors. +Original source copyright: +Copyright (c) 2008 Alexander Beider & Stephen P. Morse. diff --git a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 new file mode 100644 index 00000000000..5b8f029e582 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 @@ -0,0 +1 @@ +f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt b/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt new file mode 100644 index 00000000000..57bc88a15a0 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt b/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt new file mode 100644 index 00000000000..72eb32a9024 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons CLI +Copyright 2001-2009 The Apache Software Foundation + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/repository-gcs/licenses/google-LICENSE.txt b/plugins/repository-gcs/licenses/google-LICENSE.txt new file mode 100644 index 00000000000..980a15ac24e --- /dev/null +++ b/plugins/repository-gcs/licenses/google-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-NOTICE.txt b/plugins/repository-gcs/licenses/google-NOTICE.txt new file mode 100644 index 00000000000..8d1c8b69c3f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/plugins/repository-gcs/licenses/google-api-client-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-api-client-1.21.0.jar.sha1 new file mode 100644 index 00000000000..56988521028 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-client-1.21.0.jar.sha1 @@ -0,0 +1 @@ +16a6b3c680f3bf7b81bb42790ff5c1b72c5bbedc \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev66-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev66-1.21.0.jar.sha1 new file mode 100644 index 00000000000..2a97aed2b79 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev66-1.21.0.jar.sha1 @@ -0,0 +1 @@ +eb753d716e4f8dec203deb0f8fdca86913a79029 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-1.21.0.jar.sha1 new file mode 100644 index 00000000000..401abd444ce --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-1.21.0.jar.sha1 @@ -0,0 +1 @@ +42631630fe1276d4d6d6397bb07d53a4e4fec278 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.21.0.jar.sha1 new file mode 100644 index 00000000000..e7ca5887412 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.21.0.jar.sha1 @@ -0,0 +1 @@ +8ce17bdd15fff0fd8cf359757f29e778fc7191ad \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-oauth-client-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-oauth-client-1.21.0.jar.sha1 new file mode 100644 index 00000000000..7e3de94a9bc --- /dev/null +++ b/plugins/repository-gcs/licenses/google-oauth-client-1.21.0.jar.sha1 @@ -0,0 +1 @@ +61ec42bbfc51aafde5eb8b4923c602c5b5965bc2 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 b/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpclient-LICENSE.txt b/plugins/repository-gcs/licenses/httpclient-LICENSE.txt new file mode 100644 index 00000000000..32f01eda18f --- /dev/null +++ b/plugins/repository-gcs/licenses/httpclient-LICENSE.txt @@ -0,0 +1,558 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project includes Public Suffix List copied from + +licensed under the terms of the Mozilla Public License, v. 2.0 + +Full license text: + +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/plugins/repository-gcs/licenses/httpclient-NOTICE.txt b/plugins/repository-gcs/licenses/httpclient-NOTICE.txt new file mode 100644 index 00000000000..4f6058178b2 --- /dev/null +++ b/plugins/repository-gcs/licenses/httpclient-NOTICE.txt @@ -0,0 +1,5 @@ +Apache HttpComponents Client +Copyright 1999-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpcore-LICENSE.txt b/plugins/repository-gcs/licenses/httpcore-LICENSE.txt new file mode 100644 index 00000000000..72819a9f06f --- /dev/null +++ b/plugins/repository-gcs/licenses/httpcore-LICENSE.txt @@ -0,0 +1,241 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project contains annotations in the package org.apache.http.annotation +which are derived from JCIP-ANNOTATIONS +Copyright (c) 2005 Brian Goetz and Tim Peierls. +See http://www.jcip.net and the Creative Commons Attribution License +(http://creativecommons.org/licenses/by/2.5) +Full text: http://creativecommons.org/licenses/by/2.5/legalcode + +License + +THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. + +BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS. + +1. Definitions + + "Collective Work" means a work, such as a periodical issue, anthology or encyclopedia, in which the Work in its entirety in unmodified form, along with a number of other contributions, constituting separate and independent works in themselves, are assembled into a collective whole. A work that constitutes a Collective Work will not be considered a Derivative Work (as defined below) for the purposes of this License. + "Derivative Work" means a work based upon the Work or upon the Work and other pre-existing works, such as a translation, musical arrangement, dramatization, fictionalization, motion picture version, sound recording, art reproduction, abridgment, condensation, or any other form in which the Work may be recast, transformed, or adapted, except that a work that constitutes a Collective Work will not be considered a Derivative Work for the purpose of this License. For the avoidance of doubt, where the Work is a musical composition or sound recording, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered a Derivative Work for the purpose of this License. + "Licensor" means the individual or entity that offers the Work under the terms of this License. + "Original Author" means the individual or entity who created the Work. + "Work" means the copyrightable work of authorship offered under the terms of this License. + "You" means an individual or entity exercising rights under this License who has not previously violated the terms of this License with respect to the Work, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation. + +2. Fair Use Rights. Nothing in this license is intended to reduce, limit, or restrict any rights arising from fair use, first sale or other limitations on the exclusive rights of the copyright owner under copyright law or other applicable laws. + +3. License Grant. Subject to the terms and conditions of this License, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) license to exercise the rights in the Work as stated below: + + to reproduce the Work, to incorporate the Work into one or more Collective Works, and to reproduce the Work as incorporated in the Collective Works; + to create and reproduce Derivative Works; + to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission the Work including as incorporated in Collective Works; + to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission Derivative Works. + + For the avoidance of doubt, where the work is a musical composition: + Performance Royalties Under Blanket Licenses. Licensor waives the exclusive right to collect, whether individually or via a performance rights society (e.g. ASCAP, BMI, SESAC), royalties for the public performance or public digital performance (e.g. webcast) of the Work. + Mechanical Rights and Statutory Royalties. Licensor waives the exclusive right to collect, whether individually or via a music rights agency or designated agent (e.g. Harry Fox Agency), royalties for any phonorecord You create from the Work ("cover version") and distribute, subject to the compulsory license created by 17 USC Section 115 of the US Copyright Act (or the equivalent in other jurisdictions). + Webcasting Rights and Statutory Royalties. For the avoidance of doubt, where the Work is a sound recording, Licensor waives the exclusive right to collect, whether individually or via a performance-rights society (e.g. SoundExchange), royalties for the public digital performance (e.g. webcast) of the Work, subject to the compulsory license created by 17 USC Section 114 of the US Copyright Act (or the equivalent in other jurisdictions). + +The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats. All rights not expressly granted by Licensor are hereby reserved. + +4. Restrictions.The license granted in Section 3 above is expressly made subject to and limited by the following restrictions: + + You may distribute, publicly display, publicly perform, or publicly digitally perform the Work only under the terms of this License, and You must include a copy of, or the Uniform Resource Identifier for, this License with every copy or phonorecord of the Work You distribute, publicly display, publicly perform, or publicly digitally perform. You may not offer or impose any terms on the Work that alter or restrict the terms of this License or the recipients' exercise of the rights granted hereunder. You may not sublicense the Work. You must keep intact all notices that refer to this License and to the disclaimer of warranties. You may not distribute, publicly display, publicly perform, or publicly digitally perform the Work with any technological measures that control access or use of the Work in a manner inconsistent with the terms of this License Agreement. The above applies to the Work as incorporated in a Collective Work, but this does not require the Collective Work apart from the Work itself to be made subject to the terms of this License. If You create a Collective Work, upon notice from any Licensor You must, to the extent practicable, remove from the Collective Work any credit as required by clause 4(b), as requested. If You create a Derivative Work, upon notice from any Licensor You must, to the extent practicable, remove from the Derivative Work any credit as required by clause 4(b), as requested. + If you distribute, publicly display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works, You must keep intact all copyright notices for the Work and provide, reasonable to the medium or means You are utilizing: (i) the name of the Original Author (or pseudonym, if applicable) if supplied, and/or (ii) if the Original Author and/or Licensor designate another party or parties (e.g. a sponsor institute, publishing entity, journal) for attribution in Licensor's copyright notice, terms of service or by other reasonable means, the name of such party or parties; the title of the Work if supplied; to the extent reasonably practicable, the Uniform Resource Identifier, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work; and in the case of a Derivative Work, a credit identifying the use of the Work in the Derivative Work (e.g., "French translation of the Work by Original Author," or "Screenplay based on original Work by Original Author"). Such credit may be implemented in any reasonable manner; provided, however, that in the case of a Derivative Work or Collective Work, at a minimum such credit will appear where any other comparable authorship credit appears and in a manner at least as prominent as such other comparable authorship credit. + +5. Representations, Warranties and Disclaimer + +UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. + +6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +7. Termination + + This License and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this License. Individuals or entities who have received Derivative Works or Collective Works from You under this License, however, will not have their licenses terminated provided such individuals or entities remain in full compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this License. + Subject to the above terms and conditions, the license granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different license terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above. + +8. Miscellaneous + + Each time You distribute or publicly digitally perform the Work or a Collective Work, the Licensor offers to the recipient a license to the Work on the same terms and conditions as the license granted to You under this License. + Each time You distribute or publicly digitally perform a Derivative Work, Licensor offers to the recipient a license to the original Work on the same terms and conditions as the license granted to You under this License. + If any provision of this License is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this License, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. + No term or provision of this License shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. + This License constitutes the entire agreement between the parties with respect to the Work licensed here. There are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You. This License may not be modified without the mutual written agreement of the Licensor and You. diff --git a/plugins/repository-gcs/licenses/httpcore-NOTICE.txt b/plugins/repository-gcs/licenses/httpcore-NOTICE.txt new file mode 100644 index 00000000000..c0be50a505e --- /dev/null +++ b/plugins/repository-gcs/licenses/httpcore-NOTICE.txt @@ -0,0 +1,8 @@ +Apache HttpComponents Core +Copyright 2005-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This project contains annotations derived from JCIP-ANNOTATIONS +Copyright (c) 2005 Brian Goetz and Tim Peierls. See http://www.jcip.net diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java new file mode 100644 index 00000000000..d8117180ce3 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStoreException; +import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; +import org.elasticsearch.common.bytes.BytesReference; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + + +public class GoogleCloudStorageBlobContainer extends AbstractBlobContainer { + + private final GoogleCloudStorageBlobStore blobStore; + private final String path; + + GoogleCloudStorageBlobContainer(BlobPath path, GoogleCloudStorageBlobStore blobStore) { + super(path); + this.blobStore = blobStore; + + String keyPath = path.buildAsString("/"); + // TODO Move this keyPath logic to the buildAsString() method + if (!keyPath.isEmpty()) { + keyPath = keyPath + "/"; + } + this.path = keyPath; + } + + @Override + public boolean blobExists(String blobName) { + try { + return blobStore.blobExists(buildKey(blobName)); + } catch (Exception e) { + throw new BlobStoreException("Failed to check if blob [" + blobName + "] exists", e); + } + } + + @Override + public Map listBlobs() throws IOException { + return blobStore.listBlobs(path); + } + + @Override + public Map listBlobsByPrefix(String prefix) throws IOException { + return blobStore.listBlobsByPrefix(path, prefix); + } + + @Override + public InputStream readBlob(String blobName) throws IOException { + return blobStore.readBlob(buildKey(blobName)); + } + + @Override + public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + blobStore.writeBlob(buildKey(blobName), inputStream, blobSize); + } + + @Override + public void writeBlob(String blobName, BytesReference bytes) throws IOException { + writeBlob(blobName, bytes.streamInput(), bytes.length()); + } + + @Override + public void deleteBlob(String blobName) throws IOException { + blobStore.deleteBlob(buildKey(blobName)); + } + + @Override + public void deleteBlobsByPrefix(String prefix) throws IOException { + blobStore.deleteBlobsByPrefix(buildKey(prefix)); + } + + @Override + public void deleteBlobs(Collection blobNames) throws IOException { + blobStore.deleteBlobs(buildKeys(blobNames)); + } + + @Override + public void move(String sourceBlobName, String targetBlobName) throws IOException { + blobStore.moveBlob(buildKey(sourceBlobName), buildKey(targetBlobName)); + } + + protected String buildKey(String blobName) { + assert blobName != null; + return path + blobName; + } + + protected Set buildKeys(Collection blobNames) { + Set keys = new HashSet<>(); + if (blobNames != null) { + keys.addAll(blobNames.stream().map(this::buildKey).collect(Collectors.toList())); + } + return keys; + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java new file mode 100644 index 00000000000..7bf79494440 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java @@ -0,0 +1,432 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import com.google.api.client.googleapis.batch.BatchRequest; +import com.google.api.client.googleapis.batch.json.JsonBatchCallback; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.api.client.http.HttpHeaders; +import com.google.api.client.http.InputStreamContent; +import com.google.api.services.storage.Storage; +import com.google.api.services.storage.model.Bucket; +import com.google.api.services.storage.model.Objects; +import com.google.api.services.storage.model.StorageObject; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.BlobStoreException; +import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.CountDown; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Spliterator; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; + +public class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore { + + /** + * Google Cloud Storage batch requests are limited to 1000 operations + **/ + private static final int MAX_BATCHING_REQUESTS = 999; + + private final Storage client; + private final String bucket; + + public GoogleCloudStorageBlobStore(Settings settings, String bucket, Storage storageClient) { + super(settings); + this.bucket = bucket; + this.client = storageClient; + + if (doesBucketExist(bucket) == false) { + throw new BlobStoreException("Bucket [" + bucket + "] does not exist"); + } + } + + @Override + public BlobContainer blobContainer(BlobPath path) { + return new GoogleCloudStorageBlobContainer(path, this); + } + + @Override + public void delete(BlobPath path) throws IOException { + String keyPath = path.buildAsString("/"); + // TODO Move this keyPath logic to the buildAsString() method + if (!keyPath.isEmpty()) { + keyPath = keyPath + "/"; + } + deleteBlobsByPrefix(keyPath); + } + + @Override + public void close() { + } + + /** + * Return true if the given bucket exists + * + * @param bucketName name of the bucket + * @return true if the bucket exists, false otherwise + */ + boolean doesBucketExist(String bucketName) { + try { + return doPrivileged(() -> { + try { + Bucket bucket = client.buckets().get(bucketName).execute(); + if (bucket != null) { + return Strings.hasText(bucket.getId()); + } + } catch (GoogleJsonResponseException e) { + GoogleJsonError error = e.getDetails(); + if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { + return false; + } + throw e; + } + return false; + }); + } catch (IOException e) { + throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); + } + } + + /** + * List all blobs in the bucket + * + * @param path base path of the blobs to list + * @return a map of blob names and their metadata + */ + Map listBlobs(String path) throws IOException { + return doPrivileged(() -> listBlobsByPath(bucket, path, path)); + } + + /** + * List all blobs in the bucket which have a prefix + * + * @param path base path of the blobs to list + * @param prefix prefix of the blobs to list + * @return a map of blob names and their metadata + */ + Map listBlobsByPrefix(String path, String prefix) throws IOException { + return doPrivileged(() -> listBlobsByPath(bucket, buildKey(path, prefix), path)); + } + + /** + * Lists all blobs in a given bucket + * + * @param bucketName name of the bucket + * @param path base path of the blobs to list + * @param pathToRemove if true, this path part is removed from blob name + * @return a map of blob names and their metadata + */ + private Map listBlobsByPath(String bucketName, String path, String pathToRemove) throws IOException { + return blobsStream(client, bucketName, path, MAX_BATCHING_REQUESTS) + .map(new BlobMetaDataConverter(pathToRemove)) + .collect(Collectors.toMap(PlainBlobMetaData::name, Function.identity())); + } + + /** + * Returns true if the blob exists in the bucket + * + * @param blobName name of the blob + * @return true if the blob exists, false otherwise + */ + boolean blobExists(String blobName) throws IOException { + return doPrivileged(() -> { + try { + StorageObject blob = client.objects().get(bucket, blobName).execute(); + if (blob != null) { + return Strings.hasText(blob.getId()); + } + } catch (GoogleJsonResponseException e) { + GoogleJsonError error = e.getDetails(); + if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { + return false; + } + throw e; + } + return false; + }); + } + + /** + * Returns an {@link java.io.InputStream} for a given blob + * + * @param blobName name of the blob + * @return an InputStream + */ + InputStream readBlob(String blobName) throws IOException { + return doPrivileged(() -> { + try { + Storage.Objects.Get object = client.objects().get(bucket, blobName); + return object.executeMediaAsInputStream(); + } catch (GoogleJsonResponseException e) { + GoogleJsonError error = e.getDetails(); + if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { + throw new FileNotFoundException(e.getMessage()); + } + throw e; + } + }); + } + + /** + * Writes a blob in the bucket. + * + * @param inputStream content of the blob to be written + * @param blobSize expected size of the blob to be written + */ + void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + doPrivileged(() -> { + InputStreamContent stream = new InputStreamContent(null, inputStream); + stream.setLength(blobSize); + + Storage.Objects.Insert insert = client.objects().insert(bucket, null, stream); + insert.setName(blobName); + insert.execute(); + return null; + }); + } + + /** + * Deletes a blob in the bucket + * + * @param blobName name of the blob + */ + void deleteBlob(String blobName) throws IOException { + doPrivileged(() -> client.objects().delete(bucket, blobName).execute()); + } + + /** + * Deletes multiple blobs in the bucket that have a given prefix + * + * @param prefix prefix of the buckets to delete + */ + void deleteBlobsByPrefix(String prefix) throws IOException { + doPrivileged(() -> { + deleteBlobs(listBlobsByPath(bucket, prefix, null).keySet()); + return null; + }); + } + + /** + * Deletes multiple blobs in the given bucket (uses a batch request to perform this) + * + * @param blobNames names of the bucket to delete + */ + void deleteBlobs(Collection blobNames) throws IOException { + if (blobNames == null || blobNames.isEmpty()) { + return; + } + + if (blobNames.size() == 1) { + deleteBlob(blobNames.iterator().next()); + return; + } + + doPrivileged(() -> { + final List deletions = new ArrayList<>(); + final Iterator blobs = blobNames.iterator(); + + while (blobs.hasNext()) { + // Create a delete request for each blob to delete + deletions.add(client.objects().delete(bucket, blobs.next())); + + if (blobs.hasNext() == false || deletions.size() == MAX_BATCHING_REQUESTS) { + try { + // Deletions are executed using a batch request + BatchRequest batch = client.batch(); + + // Used to track successful deletions + CountDown countDown = new CountDown(deletions.size()); + + for (Storage.Objects.Delete delete : deletions) { + // Queue the delete request in batch + delete.queue(batch, new JsonBatchCallback() { + @Override + public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { + logger.error("failed to delete blob [{}] in bucket [{}]: {}", delete.getObject(), delete.getBucket(), e + .getMessage()); + } + + @Override + public void onSuccess(Void aVoid, HttpHeaders responseHeaders) throws IOException { + countDown.countDown(); + } + }); + } + + batch.execute(); + + if (countDown.isCountedDown() == false) { + throw new IOException("Failed to delete all [" + deletions.size() + "] blobs"); + } + } finally { + deletions.clear(); + } + } + } + return null; + }); + } + + /** + * Moves a blob within the same bucket + * + * @param sourceBlob name of the blob to move + * @param targetBlob new name of the blob in the target bucket + */ + void moveBlob(String sourceBlob, String targetBlob) throws IOException { + doPrivileged(() -> { + // There's no atomic "move" in GCS so we need to copy and delete + client.objects().copy(bucket, sourceBlob, bucket, targetBlob, null).execute(); + client.objects().delete(bucket, sourceBlob).execute(); + return null; + }); + } + + /** + * Executes a {@link PrivilegedExceptionAction} with privileges enabled. + */ + T doPrivileged(PrivilegedExceptionAction operation) throws IOException { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + try { + return AccessController.doPrivileged((PrivilegedExceptionAction) operation::run); + } catch (PrivilegedActionException e) { + throw (IOException) e.getException(); + } + } + + private String buildKey(String keyPath, String s) { + assert s != null; + return keyPath + s; + } + + /** + * Converts a {@link StorageObject} to a {@link PlainBlobMetaData} + */ + class BlobMetaDataConverter implements Function { + + private final String pathToRemove; + + BlobMetaDataConverter(String pathToRemove) { + this.pathToRemove = pathToRemove; + } + + @Override + public PlainBlobMetaData apply(StorageObject storageObject) { + String blobName = storageObject.getName(); + if (Strings.hasLength(pathToRemove)) { + blobName = blobName.substring(pathToRemove.length()); + } + return new PlainBlobMetaData(blobName, storageObject.getSize().longValue()); + } + } + + /** + * Spliterator can be used to list storage objects stored in a bucket. + */ + static class StorageObjectsSpliterator implements Spliterator { + + private final Storage.Objects.List list; + + StorageObjectsSpliterator(Storage client, String bucketName, String prefix, long pageSize) throws IOException { + list = client.objects().list(bucketName); + list.setMaxResults(pageSize); + if (prefix != null) { + list.setPrefix(prefix); + } + } + + @Override + public boolean tryAdvance(Consumer action) { + try { + // Retrieves the next page of items + Objects objects = list.execute(); + + if ((objects == null) || (objects.getItems() == null) || (objects.getItems().isEmpty())) { + return false; + } + + // Consumes all the items + objects.getItems().forEach(action::accept); + + // Sets the page token of the next page, + // null indicates that all items have been consumed + String next = objects.getNextPageToken(); + if (next != null) { + list.setPageToken(next); + return true; + } + + return false; + } catch (Exception e) { + throw new BlobStoreException("Exception while listing objects", e); + } + } + + @Override + public Spliterator trySplit() { + return null; + } + + @Override + public long estimateSize() { + return Long.MAX_VALUE; + } + + @Override + public int characteristics() { + return 0; + } + } + + /** + * Returns a {@link Stream} of {@link StorageObject}s that are stored in a given bucket. + */ + static Stream blobsStream(Storage client, String bucketName, String prefix, long pageSize) throws IOException { + return StreamSupport.stream(new StorageObjectsSpliterator(client, bucketName, prefix, pageSize), false); + } + +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java new file mode 100644 index 00000000000..8a4bf88ed74 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.repository.gcs; + +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.repositories.gcs.GoogleCloudStorageService; + +public class GoogleCloudStorageModule extends AbstractModule { + + @Override + protected void configure() { + bind(GoogleCloudStorageService.class).to(GoogleCloudStorageService.InternalGoogleCloudStorageService.class).asEagerSingleton(); + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java new file mode 100644 index 00000000000..477a083fad5 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.repository.gcs; + +import com.google.api.client.auth.oauth2.TokenRequest; +import com.google.api.client.auth.oauth2.TokenResponse; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.http.GenericUrl; +import com.google.api.client.http.HttpHeaders; +import com.google.api.client.json.GenericJson; +import com.google.api.client.json.webtoken.JsonWebSignature; +import com.google.api.client.json.webtoken.JsonWebToken; +import com.google.api.client.util.ClassInfo; +import com.google.api.client.util.Data; +import com.google.api.services.storage.Storage; +import com.google.api.services.storage.model.Bucket; +import com.google.api.services.storage.model.Objects; +import com.google.api.services.storage.model.StorageObject; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.RepositoriesModule; +import org.elasticsearch.repositories.gcs.GoogleCloudStorageRepository; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Collection; +import java.util.Collections; + +public class GoogleCloudStoragePlugin extends Plugin { + + public static final String NAME = "repository-gcs"; + + static { + /* + * Google HTTP client changes access levels because its silly and we + * can't allow that on any old stack stack so we pull it here, up front, + * so we can cleanly check the permissions for it. Without this changing + * the permission can fail if any part of core is on the stack because + * our plugin permissions don't allow core to "reach through" plugins to + * change the permission. Because that'd be silly. + */ + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged((PrivilegedAction) () -> { + // ClassInfo put in cache all the fields of a given class + // that are annoted with @Key; at the same time it changes + // the field access level using setAccessible(). Calling + // them here put the ClassInfo in cache (they are never evicted) + // before the SecurityManager is installed. + ClassInfo.of(HttpHeaders.class, true); + + ClassInfo.of(JsonWebSignature.Header.class, false); + ClassInfo.of(JsonWebToken.Payload.class, false); + + ClassInfo.of(TokenRequest.class, false); + ClassInfo.of(TokenResponse.class, false); + + ClassInfo.of(GenericJson.class, false); + ClassInfo.of(GenericUrl.class, false); + + Data.nullOf(GoogleJsonError.ErrorInfo.class); + ClassInfo.of(GoogleJsonError.class, false); + + Data.nullOf(Bucket.Cors.class); + ClassInfo.of(Bucket.class, false); + ClassInfo.of(Bucket.Cors.class, false); + ClassInfo.of(Bucket.Lifecycle.class, false); + ClassInfo.of(Bucket.Logging.class, false); + ClassInfo.of(Bucket.Owner.class, false); + ClassInfo.of(Bucket.Versioning.class, false); + ClassInfo.of(Bucket.Website.class, false); + + ClassInfo.of(StorageObject.class, false); + ClassInfo.of(StorageObject.Owner.class, false); + + ClassInfo.of(Objects.class, false); + + ClassInfo.of(Storage.Buckets.Get.class, false); + ClassInfo.of(Storage.Buckets.Insert.class, false); + + ClassInfo.of(Storage.Objects.Get.class, false); + ClassInfo.of(Storage.Objects.Insert.class, false); + ClassInfo.of(Storage.Objects.Delete.class, false); + ClassInfo.of(Storage.Objects.Copy.class, false); + ClassInfo.of(Storage.Objects.List.class, false); + + return null; + }); + } + + @Override + public String name() { + return NAME; + } + + @Override + public String description() { + return "Google Cloud Storage Repository Plugin"; + } + + @Override + public Collection nodeModules() { + return Collections.singletonList(new GoogleCloudStorageModule()); + } + + public void onModule(RepositoriesModule repositoriesModule) { + repositoriesModule.registerRepository(GoogleCloudStorageRepository.TYPE, + GoogleCloudStorageRepository.class, BlobStoreIndexShardRepository.class); + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java new file mode 100644 index 00000000000..337fbcf8d72 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.gcs; + +import com.google.api.services.storage.Storage; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.gcs.GoogleCloudStorageBlobStore; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.snapshots.IndexShardRepository; +import org.elasticsearch.plugin.repository.gcs.GoogleCloudStoragePlugin; +import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.RepositoryName; +import org.elasticsearch.repositories.RepositorySettings; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; + +import java.util.function.Function; + +import static org.elasticsearch.common.settings.Setting.Property; +import static org.elasticsearch.common.settings.Setting.boolSetting; +import static org.elasticsearch.common.settings.Setting.byteSizeSetting; +import static org.elasticsearch.common.settings.Setting.simpleString; +import static org.elasticsearch.common.settings.Setting.timeSetting; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + +public class GoogleCloudStorageRepository extends BlobStoreRepository { + + public static final String TYPE = "gcs"; + + public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); + + public static final Setting BUCKET = + simpleString("bucket", Property.NodeScope, Property.Dynamic); + public static final Setting BASE_PATH = + simpleString("base_path", Property.NodeScope, Property.Dynamic); + public static final Setting COMPRESS = + boolSetting("compress", false, Property.NodeScope, Property.Dynamic); + public static final Setting CHUNK_SIZE = + byteSizeSetting("chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB), Property.NodeScope, Property.Dynamic); + public static final Setting APPLICATION_NAME = + new Setting<>("application_name", GoogleCloudStoragePlugin.NAME, Function.identity(), Property.NodeScope, Property.Dynamic); + public static final Setting SERVICE_ACCOUNT = + simpleString("service_account", Property.NodeScope, Property.Dynamic, Property.Filtered); + public static final Setting HTTP_READ_TIMEOUT = + timeSetting("http.read_timeout", NO_TIMEOUT, Property.NodeScope, Property.Dynamic); + public static final Setting HTTP_CONNECT_TIMEOUT = + timeSetting("http.connect_timeout", NO_TIMEOUT, Property.NodeScope, Property.Dynamic); + + private final ByteSizeValue chunkSize; + private final boolean compress; + private final BlobPath basePath; + private final GoogleCloudStorageBlobStore blobStore; + + @Inject + public GoogleCloudStorageRepository(RepositoryName repositoryName, RepositorySettings repositorySettings, + IndexShardRepository indexShardRepository, + GoogleCloudStorageService storageService) throws Exception { + super(repositoryName.getName(), repositorySettings, indexShardRepository); + + String bucket = get(BUCKET, repositoryName, repositorySettings); + String application = get(APPLICATION_NAME, repositoryName, repositorySettings); + String serviceAccount = get(SERVICE_ACCOUNT, repositoryName, repositorySettings); + + String basePath = BASE_PATH.get(repositorySettings.settings()); + if (Strings.hasLength(basePath)) { + BlobPath path = new BlobPath(); + for (String elem : basePath.split("/")) { + path = path.add(elem); + } + this.basePath = path; + } else { + this.basePath = BlobPath.cleanPath(); + } + + TimeValue connectTimeout = null; + TimeValue readTimeout = null; + + TimeValue timeout = HTTP_CONNECT_TIMEOUT.get(repositorySettings.settings()); + if ((timeout != null) && (timeout.millis() != NO_TIMEOUT.millis())) { + connectTimeout = timeout; + } + + timeout = HTTP_READ_TIMEOUT.get(repositorySettings.settings()); + if ((timeout != null) && (timeout.millis() != NO_TIMEOUT.millis())) { + readTimeout = timeout; + } + + this.compress = get(COMPRESS, repositoryName, repositorySettings); + this.chunkSize = get(CHUNK_SIZE, repositoryName, repositorySettings); + + logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}], application [{}]", + bucket, basePath, chunkSize, compress, application); + + Storage client = storageService.createClient(serviceAccount, application, connectTimeout, readTimeout); + this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, client); + } + + + @Override + protected BlobStore blobStore() { + return blobStore; + } + + @Override + protected BlobPath basePath() { + return basePath; + } + + @Override + protected boolean isCompress() { + return compress; + } + + @Override + protected ByteSizeValue chunkSize() { + return chunkSize; + } + + /** + * Get a given setting from the repository settings, throwing a {@link RepositoryException} if the setting does not exist or is empty. + */ + static T get(Setting setting, RepositoryName name, RepositorySettings repositorySettings) { + T value = setting.get(repositorySettings.settings()); + if (value == null) { + throw new RepositoryException(name.getName(), "Setting [" + setting.getKey() + "] is not defined for repository"); + } + if ((value instanceof String) && (Strings.hasText((String) value)) == false) { + throw new RepositoryException(name.getName(), "Setting [" + setting.getKey() + "] is empty for repository"); + } + return value; + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java new file mode 100644 index 00000000000..098ce5f1504 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -0,0 +1,176 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.gcs; + +import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; +import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; +import com.google.api.client.http.HttpBackOffIOExceptionHandler; +import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; +import com.google.api.client.http.HttpIOExceptionHandler; +import com.google.api.client.http.HttpRequest; +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpUnsuccessfulResponseHandler; +import com.google.api.client.http.javanet.NetHttpTransport; +import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.util.ExponentialBackOff; +import com.google.api.services.storage.Storage; +import com.google.api.services.storage.StorageScopes; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; + +public interface GoogleCloudStorageService { + + /** + * Creates a client that can be used to manage Google Cloud Storage objects. + * + * @param serviceAccount path to service account file + * @param application name of the application + * @param connectTimeout connection timeout for HTTP requests + * @param readTimeout read timeout for HTTP requests + * @return a Client instance that can be used to manage objects + */ + Storage createClient(String serviceAccount, String application, TimeValue connectTimeout, TimeValue readTimeout) throws Exception; + + /** + * Default implementation + */ + class InternalGoogleCloudStorageService extends AbstractComponent implements GoogleCloudStorageService { + + private static final String DEFAULT = "_default_"; + + private final Environment environment; + + @Inject + public InternalGoogleCloudStorageService(Settings settings, Environment environment) { + super(settings); + this.environment = environment; + } + + @Override + public Storage createClient(String serviceAccount, String application, TimeValue connectTimeout, TimeValue readTimeout) + throws Exception { + try { + GoogleCredential credentials = (DEFAULT.equalsIgnoreCase(serviceAccount)) ? loadDefault() : loadCredentials(serviceAccount); + NetHttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport(); + + Storage.Builder storage = new Storage.Builder(httpTransport, JacksonFactory.getDefaultInstance(), + new DefaultHttpRequestInitializer(credentials, connectTimeout, readTimeout)); + storage.setApplicationName(application); + + logger.debug("initializing client with service account [{}/{}]", + credentials.getServiceAccountId(), credentials.getServiceAccountUser()); + return storage.build(); + } catch (IOException e) { + throw new ElasticsearchException("Error when loading Google Cloud Storage credentials file", e); + } + } + + /** + * HTTP request initializer that loads credentials from the service account file + * and manages authentication for HTTP requests + */ + private GoogleCredential loadCredentials(String serviceAccount) throws IOException { + if (serviceAccount == null) { + throw new ElasticsearchException("Cannot load Google Cloud Storage service account file from a null path"); + } + + Path account = environment.configFile().resolve(serviceAccount); + if (Files.exists(account) == false) { + throw new ElasticsearchException("Unable to find service account file [" + serviceAccount + + "] defined for repository"); + } + + try (InputStream is = Files.newInputStream(account)) { + GoogleCredential credential = GoogleCredential.fromStream(is); + if (credential.createScopedRequired()) { + credential = credential.createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); + } + return credential; + } + } + + /** + * HTTP request initializer that loads default credentials when running on Compute Engine + */ + private GoogleCredential loadDefault() throws IOException { + return GoogleCredential.getApplicationDefault(); + } + + /** + * HTTP request initializer that set timeouts and backoff handler while deferring authentication to GoogleCredential. + * See https://cloud.google.com/storage/transfer/create-client#retry + */ + class DefaultHttpRequestInitializer implements HttpRequestInitializer { + + private final TimeValue connectTimeout; + private final TimeValue readTimeout; + private final GoogleCredential credential; + private final HttpUnsuccessfulResponseHandler handler; + private final HttpIOExceptionHandler ioHandler; + + DefaultHttpRequestInitializer(GoogleCredential credential, TimeValue connectTimeout, TimeValue readTimeout) { + this.credential = credential; + this.connectTimeout = connectTimeout; + this.readTimeout = readTimeout; + this.handler = new HttpBackOffUnsuccessfulResponseHandler(newBackOff()); + this.ioHandler = new HttpBackOffIOExceptionHandler(newBackOff()); + } + + @Override + public void initialize(HttpRequest request) throws IOException { + if (connectTimeout != null) { + request.setConnectTimeout((int) connectTimeout.millis()); + } + if (readTimeout != null) { + request.setReadTimeout((int) readTimeout.millis()); + } + + request.setIOExceptionHandler(ioHandler); + request.setInterceptor(credential); + + request.setUnsuccessfulResponseHandler((req, resp, supportsRetry) -> { + // Let the credential handle the response. If it failed, we rely on our backoff handler + return credential.handleResponse(req, resp, supportsRetry) || handler.handleResponse(req, resp, supportsRetry); + } + ); + } + + private ExponentialBackOff newBackOff() { + return new ExponentialBackOff.Builder() + .setInitialIntervalMillis(100) + .setMaxIntervalMillis(6000) + .setMaxElapsedTimeMillis(900000) + .setMultiplier(1.5) + .setRandomizationFactor(0.5) + .build(); + } + } + } +} diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 00000000000..bc7acd60602 --- /dev/null +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,26 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +grant { + permission java.lang.RuntimePermission "accessDeclaredMembers"; + permission java.lang.RuntimePermission "setFactory"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + permission java.net.URLPermission "http://www.googleapis.com/*", "*"; + permission java.net.URLPermission "https://www.googleapis.com/*", "*"; +}; diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java new file mode 100644 index 00000000000..4fe8c718345 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; + +import java.io.IOException; +import java.util.Locale; + +public class GoogleCloudStorageBlobStoreContainerTests extends ESBlobStoreContainerTestCase { + + @Override + protected BlobStore newBlobStore() throws IOException { + String bucket = randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockHttpTransport.newStorage(bucket, getTestName())); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java new file mode 100644 index 00000000000..b5489466b51 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.repositories.ESBlobStoreTestCase; + +import java.io.IOException; +import java.util.Locale; + +public class GoogleCloudStorageBlobStoreTests extends ESBlobStoreTestCase { + + @Override + protected BlobStore newBlobStore() throws IOException { + String bucket = randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockHttpTransport.newStorage(bucket, getTestName())); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/MockHttpTransport.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/MockHttpTransport.java new file mode 100644 index 00000000000..196fcf12f87 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/MockHttpTransport.java @@ -0,0 +1,432 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.LowLevelHttpRequest; +import com.google.api.client.http.LowLevelHttpResponse; +import com.google.api.client.json.Json; +import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.testing.http.MockLowLevelHttpRequest; +import com.google.api.client.testing.http.MockLowLevelHttpResponse; +import com.google.api.services.storage.Storage; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.path.PathTrie; +import org.elasticsearch.common.util.Callback; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.support.RestUtils; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + +/** + * Mock for {@link HttpTransport} to test Google Cloud Storage service. + *

+ * This basically handles each type of request used by the {@link GoogleCloudStorageBlobStore} and provides appropriate responses like + * the Google Cloud Storage service would do. It is largely based on official documentation available at https://cloud.google + * .com/storage/docs/json_api/v1/. + */ +public class MockHttpTransport extends com.google.api.client.testing.http.MockHttpTransport { + + private final AtomicInteger objectsCount = new AtomicInteger(0); + private final Map objectsNames = ConcurrentCollections.newConcurrentMap(); + private final Map objectsContent = ConcurrentCollections.newConcurrentMap(); + + private final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER); + + public MockHttpTransport(String bucket) { + + // GET Bucket + // + // https://cloud.google.com/storage/docs/json_api/v1/buckets/get + handlers.insert("GET https://www.googleapis.com/storage/v1/b/{bucket}", (url, params, req) -> { + String name = params.get("bucket"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "bucket name is missing"); + } + + if (name.equals(bucket)) { + return newMockResponse().setContent(buildBucketResource(bucket)); + } else { + return newMockError(RestStatus.NOT_FOUND, "bucket not found"); + } + }); + + // GET Object + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/get + handlers.insert("GET https://www.googleapis.com/storage/v1/b/{bucket}/o/{object}", (url, params, req) -> { + String name = params.get("object"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + + for (Map.Entry object : objectsNames.entrySet()) { + if (object.getValue().equals(name)) { + byte[] content = objectsContent.get(object.getKey()); + if (content != null) { + return newMockResponse().setContent(buildObjectResource(bucket, name, object.getKey(), content.length)); + } + } + } + return newMockError(RestStatus.NOT_FOUND, "object not found"); + }); + + // Download Object + // + // https://cloud.google.com/storage/docs/request-endpoints + handlers.insert("GET https://www.googleapis.com/download/storage/v1/b/{bucket}/o/{object}", (url, params, req) -> { + String name = params.get("object"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + + for (Map.Entry object : objectsNames.entrySet()) { + if (object.getValue().equals(name)) { + byte[] content = objectsContent.get(object.getKey()); + if (content == null) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object content is missing"); + } + return newMockResponse().setContent(new ByteArrayInputStream(content)); + } + } + return newMockError(RestStatus.NOT_FOUND, "object not found"); + }); + + // Insert Object (initialization) + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/insert + handlers.insert("POST https://www.googleapis.com/upload/storage/v1/b/{bucket}/o", (url, params, req) -> { + if ("resumable".equals(params.get("uploadType")) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable"); + } + + String name = params.get("name"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + + String objectId = String.valueOf(objectsCount.getAndIncrement()); + objectsNames.put(objectId, name); + + return newMockResponse() + .setStatusCode(RestStatus.CREATED.getStatus()) + .addHeader("Location", "https://www.googleapis.com/upload/storage/v1/b/" + bucket + + "/o?uploadType=resumable&upload_id=" + objectId); + }); + + // Insert Object (upload) + // + // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload + handlers.insert("PUT https://www.googleapis.com/upload/storage/v1/b/{bucket}/o", (url, params, req) -> { + String objectId = params.get("upload_id"); + if (Strings.hasText(objectId) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "upload id is missing"); + } + + String name = objectsNames.get(objectId); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.NOT_FOUND, "object name not found"); + } + + ByteArrayOutputStream os = new ByteArrayOutputStream((int) req.getContentLength()); + try { + req.getStreamingContent().writeTo(os); + os.close(); + } catch (IOException e) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage()); + } + + byte[] content = os.toByteArray(); + objectsContent.put(objectId, content); + return newMockResponse().setContent(buildObjectResource(bucket, name, objectId, content.length)); + }); + + // List Objects + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/list + handlers.insert("GET https://www.googleapis.com/storage/v1/b/{bucket}/o", (url, params, req) -> { + String prefix = params.get("prefix"); + + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + builder.field("kind", "storage#objects"); + builder.startArray("items"); + for (Map.Entry o : objectsNames.entrySet()) { + if (prefix != null && o.getValue().startsWith(prefix) == false) { + continue; + } + buildObjectResource(builder, bucket, o.getValue(), o.getKey(), objectsContent.get(o.getKey()).length); + } + builder.endArray(); + builder.endObject(); + return newMockResponse().setContent(builder.string()); + } catch (IOException e) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage()); + } + }); + + // Delete Object + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/delete + handlers.insert("DELETE https://www.googleapis.com/storage/v1/b/{bucket}/o/{object}", (url, params, req) -> { + String name = params.get("object"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + + String objectId = null; + for (Map.Entry object : objectsNames.entrySet()) { + if (object.getValue().equals(name)) { + objectId = object.getKey(); + break; + } + } + + if (objectId != null) { + objectsNames.remove(objectId); + objectsContent.remove(objectId); + return newMockResponse().setStatusCode(RestStatus.NO_CONTENT.getStatus()); + } + return newMockError(RestStatus.NOT_FOUND, "object not found"); + }); + + // Copy Object + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/copy + handlers.insert("POST https://www.googleapis.com/storage/v1/b/{srcBucket}/o/{srcObject}/copyTo/b/{destBucket}/o/{destObject}", + (url, params, req) -> { + String source = params.get("srcObject"); + if (Strings.hasText(source) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); + } + + String dest = params.get("destObject"); + if (Strings.hasText(dest) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "destination object name is missing"); + } + + String srcObjectId = null; + for (Map.Entry object : objectsNames.entrySet()) { + if (object.getValue().equals(source)) { + srcObjectId = object.getKey(); + break; + } + } + + if (srcObjectId == null) { + return newMockError(RestStatus.NOT_FOUND, "source object not found"); + } + + byte[] content = objectsContent.get(srcObjectId); + if (content == null) { + return newMockError(RestStatus.NOT_FOUND, "source content can not be found"); + } + + String destObjectId = String.valueOf(objectsCount.getAndIncrement()); + objectsNames.put(destObjectId, dest); + objectsContent.put(destObjectId, content); + + return newMockResponse().setContent(buildObjectResource(bucket, dest, destObjectId, content.length)); + }); + + // Batch + // + // https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch + handlers.insert("POST https://www.googleapis.com/batch", (url, params, req) -> { + List responses = new ArrayList<>(); + + // A batch request body looks like this: + // + // --__END_OF_PART__ + // Content-Length: 71 + // Content-Type: application/http + // content-id: 1 + // content-transfer-encoding: binary + // + // DELETE https://www.googleapis.com/storage/v1/b/ohifkgu/o/foo%2Ftest + // + // + // --__END_OF_PART__ + // Content-Length: 71 + // Content-Type: application/http + // content-id: 2 + // content-transfer-encoding: binary + // + // DELETE https://www.googleapis.com/storage/v1/b/ohifkgu/o/bar%2Ftest + // + // + // --__END_OF_PART__-- + + // Here we simply process the request body line by line and delegate to other handlers + // if possible. + try (ByteArrayOutputStream os = new ByteArrayOutputStream((int) req.getContentLength())) { + req.getStreamingContent().writeTo(os); + + Streams.readAllLines(new ByteArrayInputStream(os.toByteArray()), new Callback() { + @Override + public void handle(String line) { + Handler handler = handlers.retrieve(line, params); + if (handler != null) { + try { + responses.add(handler.execute(line, params, req)); + } catch (IOException e) { + responses.add(newMockError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } + } + } + }); + } + + // Now we can build the response + String boundary = "__END_OF_PART__"; + String sep = "--"; + String line = "\r\n"; + + StringBuilder builder = new StringBuilder(); + for (MockLowLevelHttpResponse resp : responses) { + builder.append(sep).append(boundary).append(line); + builder.append(line); + builder.append("HTTP/1.1 ").append(resp.getStatusCode()).append(' ').append(resp.getReasonPhrase()).append(line); + builder.append("Content-Length: ").append(resp.getContentLength()).append(line); + builder.append(line); + } + builder.append(line); + builder.append(sep).append(boundary).append(sep); + + return newMockResponse().setContentType("multipart/mixed; boundary=" + boundary).setContent(builder.toString()); + }); + } + + @Override + public LowLevelHttpRequest buildRequest(String method, String url) throws IOException { + return new MockLowLevelHttpRequest() { + @Override + public LowLevelHttpResponse execute() throws IOException { + String rawPath = url; + Map params = new HashMap<>(); + + int pathEndPos = url.indexOf('?'); + if (pathEndPos != -1) { + rawPath = url.substring(0, pathEndPos); + RestUtils.decodeQueryString(url, pathEndPos + 1, params); + } + + Handler handler = handlers.retrieve(method + " " + rawPath, params); + if (handler != null) { + return handler.execute(rawPath, params, this); + } + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "Unable to handle request [method=" + method + ", url=" + url + "]"); + } + }; + } + + private static MockLowLevelHttpResponse newMockResponse() { + return new MockLowLevelHttpResponse() + .setContentType(Json.MEDIA_TYPE) + .setStatusCode(RestStatus.OK.getStatus()) + .setReasonPhrase(RestStatus.OK.name()); + } + + private static MockLowLevelHttpResponse newMockError(RestStatus status, String message) { + MockLowLevelHttpResponse response = newMockResponse().setStatusCode(status.getStatus()).setReasonPhrase(status.name()); + try { + response.setContent(buildErrorResource(status, message)); + } catch (IOException e) { + response.setContent("Failed to build error resource [" + message + "] because of: " + e.getMessage()); + } + return response; + } + + /** + * Storage Error JSON representation + */ + private static String buildErrorResource(RestStatus status, String message) throws IOException { + return jsonBuilder() + .startObject() + .startObject("error") + .field("code", status.getStatus()) + .field("message", message) + .startArray("errors") + .startObject() + .field("domain", "global") + .field("reason", status.toString()) + .field("message", message) + .endObject() + .endArray() + .endObject() + .endObject() + .string(); + } + + /** + * Storage Bucket JSON representation as defined in + * https://cloud.google.com/storage/docs/json_api/v1/bucket#resource + */ + private static String buildBucketResource(String name) throws IOException { + return jsonBuilder().startObject() + .field("kind", "storage#bucket") + .field("id", name) + .endObject() + .string(); + } + + /** + * Storage Object JSON representation as defined in + * https://cloud.google.com/storage/docs/json_api/v1/objects#resource + */ + private static XContentBuilder buildObjectResource(XContentBuilder builder, String bucket, String name, String id, int size) + throws IOException { + return builder.startObject() + .field("kind", "storage#object") + .field("id", String.join("/", bucket, name, id)) + .field("name", name) + .field("size", String.valueOf(size)) + .endObject(); + } + + private static String buildObjectResource(String bucket, String name, String id, int size) throws IOException { + return buildObjectResource(jsonBuilder(), bucket, name, id, size).string(); + } + + interface Handler { + MockLowLevelHttpResponse execute(String url, Map params, MockLowLevelHttpRequest request) throws IOException; + } + + /** + * Instanciates a mocked Storage client for tests. + */ + public static Storage newStorage(String bucket, String applicationName) { + return new Storage.Builder(new MockHttpTransport(bucket), new JacksonFactory(), null) + .setApplicationName(applicationName) + .build(); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java new file mode 100644 index 00000000000..c5b57ba6cd6 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.gcs; + +import com.google.api.services.storage.Storage; +import org.elasticsearch.common.blobstore.gcs.MockHttpTransport; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.plugin.repository.gcs.GoogleCloudStorageModule; +import org.elasticsearch.plugin.repository.gcs.GoogleCloudStoragePlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.ESBlobStoreRepositoryIntegTestCase; +import org.junit.BeforeClass; + +import java.util.Collection; +import java.util.Collections; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + +public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCase { + + private static final String BUCKET = "gcs-repository-test"; + + // Static storage client shared among all nodes in order to act like a remote repository service: + // all nodes must see the same content + private static final AtomicReference storage = new AtomicReference<>(); + + @Override + protected Collection> nodePlugins() { + return pluginList(MockGoogleCloudStoragePlugin.class); + } + + @Override + protected void createTestRepository(String name) { + assertAcked(client().admin().cluster().preparePutRepository(name) + .setType(GoogleCloudStorageRepository.TYPE) + .setSettings(Settings.builder() + .put("bucket", BUCKET) + .put("base_path", GoogleCloudStorageBlobStoreRepositoryTests.class.getSimpleName()) + .put("service_account", "_default_") + .put("compress", randomBoolean()) + .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); + } + + @BeforeClass + public static void setUpStorage() { + storage.set(MockHttpTransport.newStorage(BUCKET, GoogleCloudStorageBlobStoreRepositoryTests.class.getName())); + } + + public static class MockGoogleCloudStoragePlugin extends GoogleCloudStoragePlugin { + + public MockGoogleCloudStoragePlugin() { + } + + @Override + public Collection nodeModules() { + return Collections.singletonList(new MockGoogleCloudStorageModule()); + } + } + + public static class MockGoogleCloudStorageModule extends GoogleCloudStorageModule { + @Override + protected void configure() { + bind(GoogleCloudStorageService.class).to(MockGoogleCloudStorageService.class).asEagerSingleton(); + } + } + + public static class MockGoogleCloudStorageService implements GoogleCloudStorageService { + + @Override + public Storage createClient(String serviceAccount, String application, TimeValue connectTimeout, TimeValue readTimeout) throws + Exception { + return storage.get(); + } + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java new file mode 100644 index 00000000000..18862d05aa0 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.gcs; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; + +public class GoogleCloudStorageRepositoryRestIT extends ESRestTestCase { + + public GoogleCloudStorageRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return createParameters(0, 1); + } +} + diff --git a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yaml b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yaml new file mode 100644 index 00000000000..a37fb779549 --- /dev/null +++ b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yaml @@ -0,0 +1,13 @@ +# Integration tests for Repository GCS component +# +"Repository GCS loaded": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: repository-gcs } diff --git a/plugins/repository-s3/licenses/httpclient-4.3.6.jar.sha1 b/plugins/repository-s3/licenses/httpclient-4.3.6.jar.sha1 deleted file mode 100644 index 3d35ee99d07..00000000000 --- a/plugins/repository-s3/licenses/httpclient-4.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c47155e3e6c9a41a28db36680b828ced53b8af4 diff --git a/plugins/repository-s3/licenses/httpclient-4.5.2.jar.sha1 b/plugins/repository-s3/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/repository-s3/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/httpcore-4.3.3.jar.sha1 b/plugins/repository-s3/licenses/httpcore-4.3.3.jar.sha1 deleted file mode 100644 index 5d9c0e26c09..00000000000 --- a/plugins/repository-s3/licenses/httpcore-4.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f91b7a4aadc5cf486df6e4634748d7dd7a73f06d diff --git a/plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 b/plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java new file mode 100644 index 00000000000..8bd2451da57 --- /dev/null +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; + +public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { + + @SuppressForbidden(reason = "manipulates system properties for testing") + public void testPathHome() throws Exception { + final String pathHome = System.getProperty("es.path.home"); + final String value = randomAsciiOfLength(16); + System.setProperty("es.path.home", value); + + runTest( + ExitCodes.OK, + true, + output -> {}, + (foreground, pidFile, esSettings) -> { + assertThat(esSettings.size(), equalTo(1)); + assertThat(esSettings, hasEntry("path.home", value)); + }); + + System.clearProperty("es.path.home"); + final String commandLineValue = randomAsciiOfLength(16); + runTest( + ExitCodes.OK, + true, + output -> {}, + (foreground, pidFile, esSettings) -> { + assertThat(esSettings.size(), equalTo(1)); + assertThat(esSettings, hasEntry("path.home", commandLineValue)); + }, + "-Epath.home=" + commandLineValue); + + if (pathHome != null) System.setProperty("es.path.home", pathHome); + else System.clearProperty("es.path.home"); + } + +} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index af36d96f442..22b2ef39a88 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.util.SuppressForbidden; import org.elasticsearch.Version; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.UserError; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtilsForTesting; import org.elasticsearch.common.settings.Settings; @@ -54,8 +55,10 @@ import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; @@ -129,7 +132,7 @@ public class InstallPluginCommandTests extends ESTestCase { } /** Creates a test environment with bin, config and plugins directories. */ - static Environment createEnv(FileSystem fs, Function temp) throws IOException { + static Tuple createEnv(FileSystem fs, Function temp) throws IOException { Path home = temp.apply("install-plugin-command-tests"); Files.createDirectories(home.resolve("bin")); Files.createFile(home.resolve("bin").resolve("elasticsearch")); @@ -140,7 +143,7 @@ public class InstallPluginCommandTests extends ESTestCase { Settings settings = Settings.builder() .put("path.home", home) .build(); - return new Environment(settings); + return Tuple.tuple(home, new Environment(settings)); } static Path createPluginDir(Function temp) throws IOException { @@ -185,20 +188,22 @@ public class InstallPluginCommandTests extends ESTestCase { return writeZip(structure, "elasticsearch"); } - static MockTerminal installPlugin(String pluginUrl, Environment env) throws Exception { - return installPlugin(pluginUrl, env, false); + static MockTerminal installPlugin(String pluginUrl, Path home) throws Exception { + return installPlugin(pluginUrl, home, false); } - static MockTerminal installPlugin(String pluginUrl, Environment env, boolean jarHellCheck) throws Exception { + static MockTerminal installPlugin(String pluginUrl, Path home, boolean jarHellCheck) throws Exception { + Map settings = new HashMap<>(); + settings.put("path.home", home.toString()); MockTerminal terminal = new MockTerminal(); - new InstallPluginCommand(env) { + new InstallPluginCommand() { @Override void jarHellCheck(Path candidate, Path pluginsDir) throws Exception { if (jarHellCheck) { super.jarHellCheck(candidate, pluginsDir); } } - }.execute(terminal, pluginUrl, true); + }.execute(terminal, pluginUrl, true, settings); return terminal; } @@ -275,192 +280,176 @@ public class InstallPluginCommandTests extends ESTestCase { } public void testSomethingWorks() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testSpaceInUrl() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); Path pluginZipWithSpaces = createTempFile("foo bar", ".zip"); try (InputStream in = new URL(pluginZip).openStream()) { Files.copy(in, pluginZipWithSpaces, StandardCopyOption.REPLACE_EXISTING); } - installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testMalformedUrlNotMaven() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); // has two colons, so it appears similar to maven coordinates - MalformedURLException e = expectThrows(MalformedURLException.class, () -> { - installPlugin("://host:1234", env); - }); + MalformedURLException e = expectThrows(MalformedURLException.class, () -> installPlugin("://host:1234", env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("no protocol")); } public void testPluginsDirMissing() throws Exception { - Environment env = createEnv(fs, temp); - Files.delete(env.pluginsFile()); + Tuple env = createEnv(fs, temp); + Files.delete(env.v2().pluginsFile()); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testPluginsDirReadOnly() throws Exception { assumeTrue("posix and filesystem", isPosix && isReal); - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); - try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.pluginsFile())) { + try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) { pluginsAttrs.setPermissions(new HashSet<>()); String pluginZip = createPlugin("fake", pluginDir); - IOException e = expectThrows(IOException.class, () -> { - installPlugin(pluginZip, env); - }); - assertTrue(e.getMessage(), e.getMessage().contains(env.pluginsFile().toString())); + IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); + assertTrue(e.getMessage(), e.getMessage().contains(env.v2().pluginsFile().toString())); } - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBuiltinModule() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("lang-groovy", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("is a system module")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testJarHell() throws Exception { // jar hell test needs a real filesystem assumeTrue("real filesystem", isReal); - Environment environment = createEnv(fs, temp); + Tuple environment = createEnv(fs, temp); Path pluginDirectory = createPluginDir(temp); writeJar(pluginDirectory.resolve("other.jar"), "FakePlugin"); String pluginZip = createPlugin("fake", pluginDirectory); // adds plugin.jar with FakePlugin - IllegalStateException e = expectThrows(IllegalStateException.class, () -> { - installPlugin(pluginZip, environment, true); - }); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, environment.v1(), true)); assertTrue(e.getMessage(), e.getMessage().contains("jar hell")); - assertInstallCleaned(environment); + assertInstallCleaned(environment.v2()); } public void testIsolatedPlugins() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); // these both share the same FakePlugin class Path pluginDir1 = createPluginDir(temp); String pluginZip1 = createPlugin("fake1", pluginDir1); - installPlugin(pluginZip1, env); + installPlugin(pluginZip1, env.v1()); Path pluginDir2 = createPluginDir(temp); String pluginZip2 = createPlugin("fake2", pluginDir2); - installPlugin(pluginZip2, env); - assertPlugin("fake1", pluginDir1, env); - assertPlugin("fake2", pluginDir2, env); + installPlugin(pluginZip2, env.v1()); + assertPlugin("fake1", pluginDir1, env.v2()); + assertPlugin("fake2", pluginDir2, env.v2()); } public void testExistingPlugin() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + installPlugin(pluginZip, env.v1()); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("already exists")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBin() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testBinNotDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createFile(binDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBinContainsDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path dirInBinDir = pluginDir.resolve("bin").resolve("foo"); Files.createDirectories(dirInBinDir); Files.createFile(dirInBinDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBinConflict() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("elasticsearch", pluginDir); - FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> { - installPlugin(pluginZip, env); - }); - assertTrue(e.getMessage(), e.getMessage().contains(env.binFile().resolve("elasticsearch").toString())); - assertInstallCleaned(env); + FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1())); + assertTrue(e.getMessage(), e.getMessage().contains(env.v2().binFile().resolve("elasticsearch").toString())); + assertInstallCleaned(env.v2()); } public void testBinPermissions() throws Exception { assumeTrue("posix filesystem", isPosix); - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.binFile())) { + try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) { Set perms = binAttrs.getCopyPermissions(); // make sure at least one execute perm is missing, so we know we forced it during installation perms.remove(PosixFilePermission.GROUP_EXECUTE); binAttrs.setPermissions(perms); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } } public void testConfig() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.createFile(configDir.resolve("custom.yaml")); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testExistingConfig() throws Exception { - Environment env = createEnv(fs, temp); - Path envConfigDir = env.configFile().resolve("fake"); + Tuple env = createEnv(fs, temp); + Path envConfigDir = env.v2().configFile().resolve("fake"); Files.createDirectories(envConfigDir); Files.write(envConfigDir.resolve("custom.yaml"), "existing config".getBytes(StandardCharsets.UTF_8)); Path pluginDir = createPluginDir(temp); @@ -469,8 +458,8 @@ public class InstallPluginCommandTests extends ESTestCase { Files.write(configDir.resolve("custom.yaml"), "new config".getBytes(StandardCharsets.UTF_8)); Files.createFile(configDir.resolve("other.yaml")); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); List configLines = Files.readAllLines(envConfigDir.resolve("custom.yaml"), StandardCharsets.UTF_8); assertEquals(1, configLines.size()); assertEquals("existing config", configLines.get(0)); @@ -478,80 +467,68 @@ public class InstallPluginCommandTests extends ESTestCase { } public void testConfigNotDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createFile(configDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testConfigContainsDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path dirInConfigDir = pluginDir.resolve("config").resolve("foo"); Files.createDirectories(dirInConfigDir); Files.createFile(dirInConfigDir.resolve("myconfig.yml")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in config dir for plugin")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testConfigConflict() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.createFile(configDir.resolve("myconfig.yml")); String pluginZip = createPlugin("elasticsearch.yml", pluginDir); - FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> { - installPlugin(pluginZip, env); - }); - assertTrue(e.getMessage(), e.getMessage().contains(env.configFile().resolve("elasticsearch.yml").toString())); - assertInstallCleaned(env); + FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1())); + assertTrue(e.getMessage(), e.getMessage().contains(env.v2().configFile().resolve("elasticsearch.yml").toString())); + assertInstallCleaned(env.v2()); } public void testMissingDescriptor() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve("fake.yml")); String pluginZip = writeZip(pluginDir, "elasticsearch"); - NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> { - installPlugin(pluginZip, env); - }); + NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testMissingDirectory() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES)); String pluginZip = writeZip(pluginDir, null); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("`elasticsearch` directory is missing in the plugin zip")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testZipRelativeOutsideEntryName() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path zip = createTempDir().resolve("broken.zip"); try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { stream.putNextEntry(new ZipEntry("elasticsearch/../blah")); } String pluginZip = zip.toUri().toURL().toString(); - IOException e = expectThrows(IOException.class, () -> { - installPlugin(pluginZip, env); - }); + IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("resolving outside of plugin directory")); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index f26857e19af..1422280165c 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -25,35 +25,47 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import java.util.stream.Collectors; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.common.inject.spi.HasDependencies; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.Version; +import org.junit.Before; @LuceneTestCase.SuppressFileSystems("*") public class ListPluginsCommandTests extends ESTestCase { - Environment createEnv() throws IOException { - Path home = createTempDir(); + private Path home; + private Environment env; + + @Before + public void setUp() throws Exception { + super.setUp(); + home = createTempDir(); Files.createDirectories(home.resolve("plugins")); Settings settings = Settings.builder() - .put("path.home", home) - .build(); - return new Environment(settings); + .put("path.home", home) + .build(); + env = new Environment(settings); } - static MockTerminal listPlugins(Environment env) throws Exception { - return listPlugins(env, new String[0]); + static MockTerminal listPlugins(Path home) throws Exception { + return listPlugins(home, new String[0]); } - static MockTerminal listPlugins(Environment env, String[] args) throws Exception { + static MockTerminal listPlugins(Path home, String[] args) throws Exception { + String[] argsAndHome = new String[args.length + 1]; + System.arraycopy(args, 0, argsAndHome, 0, args.length); + argsAndHome[args.length] = "-Epath.home=" + home; MockTerminal terminal = new MockTerminal(); - int status = new ListPluginsCommand(env).main(args, terminal); + int status = new ListPluginsCommand().main(argsAndHome, terminal); assertEquals(ExitCodes.OK, status); return terminal; } @@ -74,49 +86,42 @@ public class ListPluginsCommandTests extends ESTestCase { public void testPluginsDirMissing() throws Exception { - Environment env = createEnv(); Files.delete(env.pluginsFile()); - IOException e = expectThrows(IOException.class, () -> { - listPlugins(env); - }); + IOException e = expectThrows(IOException.class, () -> listPlugins(home)); assertEquals(e.getMessage(), "Plugins directory missing: " + env.pluginsFile()); } public void testNoPlugins() throws Exception { - MockTerminal terminal = listPlugins(createEnv()); + MockTerminal terminal = listPlugins(home); assertTrue(terminal.getOutput(), terminal.getOutput().isEmpty()); } public void testOnePlugin() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc", "fake", "org.fake"); - MockTerminal terminal = listPlugins(env); + MockTerminal terminal = listPlugins(home); assertEquals(terminal.getOutput(), buildMultiline("fake")); } public void testTwoPlugins() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc", "fake1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake2", "org.fake"); - MockTerminal terminal = listPlugins(env); + MockTerminal terminal = listPlugins(home); assertEquals(terminal.getOutput(), buildMultiline("fake1", "fake2")); } public void testPluginWithVerbose() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc", "fake_plugin", "org.fake"); String[] params = { "-v" }; - MockTerminal terminal = listPlugins(env, params); + MockTerminal terminal = listPlugins(home, params); assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(), "fake_plugin", "- Plugin information:", "Name: fake_plugin", "Description: fake desc", "Version: 1.0", " * Classname: org.fake")); } public void testPluginWithVerboseMultiplePlugins() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); String[] params = { "-v" }; - MockTerminal terminal = listPlugins(env, params); + MockTerminal terminal = listPlugins(home, params); assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(), "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", "Description: fake desc 1", "Version: 1.0", " * Classname: org.fake", "fake_plugin2", "- Plugin information:", "Name: fake_plugin2", @@ -124,26 +129,23 @@ public class ListPluginsCommandTests extends ESTestCase { } public void testPluginWithoutVerboseMultiplePlugins() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); - MockTerminal terminal = listPlugins(env, new String[0]); + MockTerminal terminal = listPlugins(home, new String[0]); String output = terminal.getOutput(); assertEquals(output, buildMultiline("fake_plugin1", "fake_plugin2")); } public void testPluginWithoutDescriptorFile() throws Exception{ - Environment env = createEnv(); Files.createDirectories(env.pluginsFile().resolve("fake1")); - NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listPlugins(env)); + NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listPlugins(home)); assertEquals(e.getFile(), env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString()); } public void testPluginWithWrongDescriptorFile() throws Exception{ - Environment env = createEnv(); PluginTestUtil.writeProperties(env.pluginsFile().resolve("fake1"), "description", "fake desc"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listPlugins(env)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listPlugins(home)); assertEquals(e.getMessage(), "Property [name] is missing in [" + env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString() + "]"); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index d9d5661b834..6528bbc0911 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -23,6 +23,8 @@ import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cli.UserError; @@ -30,25 +32,32 @@ import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; @LuceneTestCase.SuppressFileSystems("*") public class RemovePluginCommandTests extends ESTestCase { - /** Creates a test environment with bin, config and plugins directories. */ - static Environment createEnv() throws IOException { - Path home = createTempDir(); + private Path home; + private Environment env; + + @Before + public void setUp() throws Exception { + super.setUp(); + home = createTempDir(); Files.createDirectories(home.resolve("bin")); Files.createFile(home.resolve("bin").resolve("elasticsearch")); Files.createDirectories(home.resolve("plugins")); Settings settings = Settings.builder() - .put("path.home", home) - .build(); - return new Environment(settings); + .put("path.home", home) + .build(); + env = new Environment(settings); } - static MockTerminal removePlugin(String name, Environment env) throws Exception { + static MockTerminal removePlugin(String name, Path home) throws Exception { + Map settings = new HashMap<>(); + settings.put("path.home", home.toString()); MockTerminal terminal = new MockTerminal(); - new RemovePluginCommand(env).execute(terminal, name); + new RemovePluginCommand().execute(terminal, name, settings); return terminal; } @@ -63,33 +72,28 @@ public class RemovePluginCommandTests extends ESTestCase { } public void testMissing() throws Exception { - Environment env = createEnv(); - UserError e = expectThrows(UserError.class, () -> { - removePlugin("dne", env); - }); + UserError e = expectThrows(UserError.class, () -> removePlugin("dne", home)); assertTrue(e.getMessage(), e.getMessage().contains("Plugin dne not found")); assertRemoveCleaned(env); } public void testBasic() throws Exception { - Environment env = createEnv(); Files.createDirectory(env.pluginsFile().resolve("fake")); Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); Files.createDirectory(env.pluginsFile().resolve("other")); - removePlugin("fake", env); + removePlugin("fake", home); assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); assertTrue(Files.exists(env.pluginsFile().resolve("other"))); assertRemoveCleaned(env); } public void testBin() throws Exception { - Environment env = createEnv(); Files.createDirectories(env.pluginsFile().resolve("fake")); Path binDir = env.binFile().resolve("fake"); Files.createDirectories(binDir); Files.createFile(binDir.resolve("somescript")); - removePlugin("fake", env); + removePlugin("fake", home); assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); assertFalse(Files.exists(binDir)); @@ -97,14 +101,12 @@ public class RemovePluginCommandTests extends ESTestCase { } public void testBinNotDir() throws Exception { - Environment env = createEnv(); Files.createDirectories(env.pluginsFile().resolve("elasticsearch")); - UserError e = expectThrows(UserError.class, () -> { - removePlugin("elasticsearch", env); - }); + UserError e = expectThrows(UserError.class, () -> removePlugin("elasticsearch", home)); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertTrue(Files.exists(env.pluginsFile().resolve("elasticsearch"))); // did not remove assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); assertRemoveCleaned(env); } + } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 63c09890acc..f9cdf5b4f66 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -84,29 +84,10 @@ public class TribeUnitTests extends ESTestCase { tribe2 = null; } - public void testThatTribeClientsIgnoreGlobalSysProps() throws Exception { - System.setProperty("es.cluster.name", "tribe_node_cluster"); - System.setProperty("es.tribe.t1.cluster.name", "tribe1"); - System.setProperty("es.tribe.t2.cluster.name", "tribe2"); - System.setProperty("es.tribe.t1.node_id.seed", Long.toString(random().nextLong())); - System.setProperty("es.tribe.t2.node_id.seed", Long.toString(random().nextLong())); - - try { - assertTribeNodeSuccessfullyCreated(Settings.EMPTY); - } finally { - System.clearProperty("es.cluster.name"); - System.clearProperty("es.tribe.t1.cluster.name"); - System.clearProperty("es.tribe.t2.cluster.name"); - System.clearProperty("es.tribe.t1.node_id.seed"); - System.clearProperty("es.tribe.t2.node_id.seed"); - } - } - public void testThatTribeClientsIgnoreGlobalConfig() throws Exception { Path pathConf = getDataPath("elasticsearch.yml").getParent(); Settings settings = Settings .builder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(Environment.PATH_CONF_SETTING.getKey(), pathConf) .build(); assertTribeNodeSuccessfullyCreated(settings); diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 9976f072d42..6297ce244f9 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -75,7 +75,6 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { private static Client startClient(Path tempDir, TransportAddress... transportAddresses) { Settings clientSettings = Settings.builder() .put("node.name", "qa_smoke_client_" + counter.getAndIncrement()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/AbstractMustacheTestCase.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/AbstractMustacheTestCase.java index f58351b2e7d..8a24ac2408f 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/AbstractMustacheTestCase.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/AbstractMustacheTestCase.java @@ -24,7 +24,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.script.mustache.MustacheScriptEngineService; @@ -48,7 +47,7 @@ public abstract class AbstractMustacheTestCase extends ESTestCase { new ScriptEngineRegistry(Collections.singletonList( new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class, MustacheScriptEngineService.NAME, - ScriptMode.ON))); + true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); ScriptService scriptService = new ScriptService(settings, new Environment(settings), Collections.singleton(mustache), null, diff --git a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml index 00c6e814eed..54a79ac1e32 100644 --- a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml +++ b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml @@ -102,7 +102,7 @@ - match: {batches: 1} --- -"Setting bogus ctx is an error": +"Setting bogus context is an error": - do: index: index: twitter @@ -113,7 +113,7 @@ indices.refresh: {} - do: - catch: /Invalid fields added to ctx \[junk\]/ + catch: /Invalid fields added to context \[junk\]/ update_by_query: index: twitter body: diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index bd5cd499015..362b5d60341 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -103,7 +103,7 @@ fi echo "CONF_FILE=$CONF_FILE" >> /etc/sysconfig/elasticsearch; fi - run_elasticsearch_service 1 -Ees.default.config="$CONF_FILE" + run_elasticsearch_service 1 -Edefault.config="$CONF_FILE" # remove settings again otherwise cleaning up before next testrun will fail if is_dpkg ; then @@ -289,6 +289,10 @@ fi install_and_check_plugin repository azure azure-storage-*.jar } +@test "[$GROUP] install repository-gcs plugin" { + install_and_check_plugin repository gcs google-api-services-storage-*.jar +} + @test "[$GROUP] install repository-s3 plugin" { install_and_check_plugin repository s3 aws-java-sdk-core-*.jar } @@ -387,6 +391,10 @@ fi remove_plugin repository-azure } +@test "[$GROUP] remove repository-gcs plugin" { + remove_plugin repository-gcs +} + @test "[$GROUP] remove repository-hdfs plugin" { remove_plugin repository-hdfs } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index 5f50dfc2850..c4dc8c96f58 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -340,7 +340,7 @@ run_elasticsearch_service() { local CONF_DIR="" local ES_PATH_CONF="" else - local ES_PATH_CONF="-Ees.path.conf=$CONF_DIR" + local ES_PATH_CONF="-Epath.conf=$CONF_DIR" fi # we must capture the exit code to compare so we don't want to start as background process in case we expect something other than 0 local background="" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json index 2ae42c089d3..8bb85ca087a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json @@ -16,6 +16,10 @@ "type" : "boolean", "description" : "Return an explanation of why the commands can or cannot be executed" }, + "retry_failed": { + "type" : "boolean", + "description" : "Retries allocation of shards that are blocked due to too many subsequent allocation failures" + }, "metric": { "type": "list", "options": ["_all", "blocks", "metadata", "nodes", "routing_table", "master_node", "version"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json new file mode 100644 index 00000000000..834133138b2 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -0,0 +1,207 @@ +{ + "delete_by_query": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-delete-by-query.html", + "methods": ["POST"], + "url": { + "path": "/{index}/_delete_by_query", + "paths": ["/{index}/_delete_by_query", "/{index}/{type}/_delete_by_query"], + "comment": "most things below this are just copied from search.json", + "parts": { + "index": { + "required" : true, + "type" : "list", + "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" + }, + "type": { + "type" : "list", + "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" + } + }, + "params": { + "analyzer": { + "type" : "string", + "description" : "The analyzer to use for the query string" + }, + "analyze_wildcard": { + "type" : "boolean", + "description" : "Specify whether wildcard and prefix queries should be analyzed (default: false)" + }, + "default_operator": { + "type" : "enum", + "options" : ["AND","OR"], + "default" : "OR", + "description" : "The default operator for query string query (AND or OR)" + }, + "df": { + "type" : "string", + "description" : "The field to use as default where no field prefix is given in the query string" + }, + "explain": { + "type" : "boolean", + "description" : "Specify whether to return detailed information about score computation as part of a hit" + }, + "fields": { + "type" : "list", + "description" : "A comma-separated list of fields to return as part of a hit" + }, + "fielddata_fields": { + "type" : "list", + "description" : "A comma-separated list of fields to return as the field data representation of a field for each hit" + }, + "from": { + "type" : "number", + "description" : "Starting offset (default: 0)" + }, + "ignore_unavailable": { + "type" : "boolean", + "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" + }, + "allow_no_indices": { + "type" : "boolean", + "description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + }, + "conflicts": { + "note": "This is not copied from search", + "type" : "enum", + "options": ["abort", "proceed"], + "default": "abort", + "description" : "What to do when the delete-by-query hits version conflicts?" + }, + "expand_wildcards": { + "type" : "enum", + "options" : ["open","closed","none","all"], + "default" : "open", + "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." + }, + "lenient": { + "type" : "boolean", + "description" : "Specify whether format-based query failures (such as providing text to a numeric field) should be ignored" + }, + "lowercase_expanded_terms": { + "type" : "boolean", + "description" : "Specify whether query terms should be lowercased" + }, + "preference": { + "type" : "string", + "description" : "Specify the node or shard the operation should be performed on (default: random)" + }, + "q": { + "type" : "string", + "description" : "Query in the Lucene query string syntax" + }, + "routing": { + "type" : "list", + "description" : "A comma-separated list of specific routing values" + }, + "scroll": { + "type" : "duration", + "description" : "Specify how long a consistent view of the index should be maintained for scrolled search" + }, + "search_type": { + "type" : "enum", + "options" : ["query_then_fetch", "dfs_query_then_fetch"], + "description" : "Search operation type" + }, + "search_timeout": { + "type" : "time", + "description" : "Explicit timeout for each search request. Defaults to no timeout." + }, + "size": { + "type" : "number", + "description" : "Number of hits to return (default: 10)" + }, + "sort": { + "type" : "list", + "description" : "A comma-separated list of : pairs" + }, + "_source": { + "type" : "list", + "description" : "True or false to return the _source field or not, or a list of fields to return" + }, + "_source_exclude": { + "type" : "list", + "description" : "A list of fields to exclude from the returned _source field" + }, + "_source_include": { + "type" : "list", + "description" : "A list of fields to extract and return from the _source field" + }, + "terminate_after": { + "type" : "number", + "description" : "The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early." + }, + "stats": { + "type" : "list", + "description" : "Specific 'tag' of the request for logging and statistical purposes" + }, + "suggest_field": { + "type" : "string", + "description" : "Specify which field to use for suggestions" + }, + "suggest_mode": { + "type" : "enum", + "options" : ["missing", "popular", "always"], + "default" : "missing", + "description" : "Specify suggest mode" + }, + "suggest_size": { + "type" : "number", + "description" : "How many suggestions to return in response" + }, + "suggest_text": { + "type" : "text", + "description" : "The source text for which the suggestions should be returned" + }, + "timeout": { + "type" : "time", + "description" : "Explicit operation timeout" + }, + "track_scores": { + "type" : "boolean", + "description": "Whether to calculate and return scores even if they are not used for sorting" + }, + "version": { + "type" : "boolean", + "description" : "Specify whether to return document version as part of a hit" + }, + "request_cache": { + "type" : "boolean", + "description" : "Specify if request cache should be used for this request or not, defaults to index level setting" + }, + "refresh": { + "type" : "boolean", + "description" : "Should the effected indexes be refreshed?" + }, + "timeout": { + "type" : "time", + "default": "1m", + "description" : "Time each individual bulk request should wait for shards that are unavailable." + }, + "consistency": { + "type" : "enum", + "options" : ["one", "quorum", "all"], + "description" : "Explicit write consistency setting for the operation" + }, + "scroll_size": { + "type": "integer", + "defaut_value": 100, + "description": "Size on the scroll request powering the update_by_query" + }, + "wait_for_completion": { + "type" : "boolean", + "default": false, + "description" : "Should the request should block until the delete-by-query is complete." + }, + "requests_per_second": { + "type": "float", + "default": 0, + "description": "The throttle for this request in sub-requests per second. 0 means set no throttle." + } + } + }, + "body": { + "description": "The search definition using the Query DSL", + "required": true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json index 0dc0088dd5c..80210a2048e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json @@ -1,6 +1,6 @@ { "reindex": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-reindex.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html", "methods": ["POST"], "url": { "path": "/_reindex", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json index 4903c7598c3..10d6321212d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json @@ -1,10 +1,10 @@ { "reindex.rethrottle": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-reindex.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html", "methods": ["POST"], "url": { "path": "/_reindex/{task_id}/_rethrottle", - "paths": ["/_reindex/{task_id}/_rethrottle", "/_update_by_query/{task_id}/_rethrottle"], + "paths": ["/_reindex/{task_id}/_rethrottle", "/_update_by_query/{task_id}/_rethrottle", "/_delete_by_query/{task_id}/_rethrottle"], "parts": { "task_id": { "type": "string", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json index 506828beaf7..69d21f4ec1d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json @@ -1,6 +1,6 @@ { "tasks.cancel": { - "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks-cancel.html", + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html", "methods": ["POST"], "url": { "path": "/_tasks", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index a2e2b5e916c..313dbe00c46 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -1,6 +1,6 @@ { "update_by_query": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-reindex.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update-by-query.html", "methods": ["POST"], "url": { "path": "/{index}/_update_by_query", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml index dfafd833509..97ffae1f802 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml @@ -48,7 +48,6 @@ merges.total_docs .+ \n merges.total_size .+ \n merges.total_time .+ \n - percolate.queries .+ \n refresh.total .+ \n refresh.time .+ \n search.fetch_current .+ \n diff --git a/settings.gradle b/settings.gradle index 3a8b0f66210..88217a9dde5 100644 --- a/settings.gradle +++ b/settings.gradle @@ -37,6 +37,7 @@ List projects = [ 'plugins:mapper-murmur3', 'plugins:mapper-size', 'plugins:repository-azure', + 'plugins:repository-gcs', 'plugins:repository-hdfs', 'plugins:repository-s3', 'plugins:jvm-example', diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java new file mode 100644 index 00000000000..aa327ae2546 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static org.hamcrest.CoreMatchers.equalTo; + +abstract class ESElasticsearchCliTestCase extends ESTestCase { + + interface InitConsumer { + void accept(final boolean foreground, final String pidFile, final Map esSettings); + } + + void runTest( + final int expectedStatus, + final boolean expectedInit, + final Consumer outputConsumer, + final InitConsumer initConsumer, + String... args) throws Exception { + final MockTerminal terminal = new MockTerminal(); + try { + final AtomicBoolean init = new AtomicBoolean(); + final int status = Elasticsearch.main(args, new Elasticsearch() { + @Override + void init(final boolean daemonize, final String pidFile, final Map esSettings) { + init.set(true); + initConsumer.accept(!daemonize, pidFile, esSettings); + } + }, terminal); + assertThat(status, equalTo(expectedStatus)); + assertThat(init.get(), equalTo(expectedInit)); + outputConsumer.accept(terminal.getOutput()); + } catch (Throwable t) { + // if an unexpected exception is thrown, we log + // terminal output to aid debugging + logger.info(terminal.getOutput()); + // rethrow so the test fails + throw t; + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java similarity index 94% rename from core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java rename to test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java index 291d00a8dde..8462cf007f0 100644 --- a/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test; +package org.elasticsearch.repositories; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; @@ -25,6 +25,7 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.io.InputStream; @@ -32,9 +33,9 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.test.ESBlobStoreTestCase.writeRandomBlob; -import static org.elasticsearch.test.ESBlobStoreTestCase.randomBytes; -import static org.elasticsearch.test.ESBlobStoreTestCase.readBlobFully; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.writeRandomBlob; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.readBlobFully; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreRepositoryIntegTestCase.java similarity index 91% rename from core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java rename to test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreRepositoryIntegTestCase.java index dc49683de63..2ffd30fa470 100644 --- a/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreRepositoryIntegTestCase.java @@ -16,13 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test; +package org.elasticsearch.repositories; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; import java.util.HashSet; @@ -59,7 +60,8 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase String snapshotName = randomAsciiName(); logger.info("--> create snapshot {}:{}", repoName, snapshotName); - assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName).setWaitForCompletion(true).setIndices(indexNames)); + assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName) + .setWaitForCompletion(true).setIndices(indexNames)); List deleteIndices = randomSubsetOf(randomIntBetween(0, indexCount), indexNames); if (deleteIndices.size() > 0) { @@ -99,6 +101,9 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase for (int i = 0; i < indexCount; i++) { assertHitCount(client().prepareSearch(indexNames[i]).setSize(0).get(), docCounts[i]); } + + logger.info("--> delete snapshot {}:{}", repoName, snapshotName); + assertAcked(client().admin().cluster().prepareDeleteSnapshot(repoName, snapshotName).get()); } public void testMultipleSnapshotAndRollback() throws Exception { @@ -130,7 +135,8 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase // Check number of documents in this iteration docCounts[i] = (int) client().prepareSearch(indexName).setSize(0).get().getHits().totalHits(); logger.info("--> create snapshot {}:{} with {} documents", repoName, snapshotName + "-" + i, docCounts[i]); - assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName + "-" + i).setWaitForCompletion(true).setIndices(indexName)); + assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName + "-" + i) + .setWaitForCompletion(true).setIndices(indexName)); } int restoreOperations = randomIntBetween(1, 3); @@ -142,10 +148,17 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase assertAcked(client().admin().indices().prepareClose(indexName)); logger.info("--> restore index from the snapshot"); - assertSuccessfulRestore(client().admin().cluster().prepareRestoreSnapshot(repoName, snapshotName + "-" + iterationToRestore).setWaitForCompletion(true)); + assertSuccessfulRestore(client().admin().cluster().prepareRestoreSnapshot(repoName, snapshotName + "-" + iterationToRestore) + .setWaitForCompletion(true)); + ensureGreen(); assertHitCount(client().prepareSearch(indexName).setSize(0).get(), docCounts[iterationToRestore]); } + + for (int i = 0; i < iterationCount; i++) { + logger.info("--> delete snapshot {}:{}", repoName, snapshotName + "-" + i); + assertAcked(client().admin().cluster().prepareDeleteSnapshot(repoName, snapshotName + "-" + i).get()); + } } protected void addRandomDocuments(String name, int numDocs) throws ExecutionException, InterruptedException { diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java similarity index 97% rename from core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java rename to test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java index 80432d628ef..be7431795b2 100644 --- a/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java @@ -16,12 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test; +package org.elasticsearch.repositories; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.test.ESTestCase; import org.junit.Test; import java.io.IOException; diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 309b6622156..4e06bbe8b45 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; @@ -70,7 +69,7 @@ public class MockScriptEngine implements ScriptEngineService { public void onModule(ScriptModule module) { module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, - MockScriptEngine.NAME, ScriptMode.ON)); + MockScriptEngine.NAME, true)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 98b17c2323e..8f62617ee7b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -19,7 +19,6 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.RandomizedContext; -import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; @@ -124,7 +123,6 @@ import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.test.transport.MockTransportService; import org.hamcrest.Matchers; -import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -1826,23 +1824,6 @@ public abstract class ESIntegTestCase extends ESTestCase { return perTestRatio; } - /** - * Returns a random JODA Time Zone based on Java Time Zones - */ - public static DateTimeZone randomDateTimeZone() { - DateTimeZone timeZone; - - // It sounds like some Java Time Zones are unknown by JODA. For example: Asia/Riyadh88 - // We need to fallback in that case to a known time zone - try { - timeZone = DateTimeZone.forTimeZone(RandomizedTest.randomTimeZone()); - } catch (IllegalArgumentException e) { - timeZone = DateTimeZone.forOffsetHours(randomIntBetween(-12, 12)); - } - - return timeZone; - } - /** * Returns path to a random directory that can be used to create a temporary file system repo */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 1e75f3d8261..7875f8fd20b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -185,7 +185,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put("http.enabled", false) .put(Node.NODE_LOCAL_SETTING.getKey(), true) .put(Node.NODE_DATA_SETTING.getKey(), true) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) .put(nodeSettings()) // allow test cases to provide their own settings or override these .build(); Node build = new MockNode(settings, getVersion(), getPlugins()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 3dbbf25e202..67d93c6887a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -61,6 +61,7 @@ import org.elasticsearch.search.MockSearchService; import org.elasticsearch.test.junit.listeners.LoggingListener; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; import org.elasticsearch.threadpool.ThreadPool; +import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -398,6 +399,15 @@ public abstract class ESTestCase extends LuceneTestCase { return randomTimeValue(1, 1000); } + /** + * generate a random DateTimeZone from the ones available in joda library + */ + public static DateTimeZone randomDateTimeZone() { + List ids = new ArrayList<>(DateTimeZone.getAvailableIDs()); + Collections.sort(ids); + return DateTimeZone.forID(randomFrom(ids)); + } + /** * helper to randomly perform on consumer with value */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 4625aa77e25..efdd6bad90f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -51,7 +51,6 @@ import java.util.concurrent.TimeUnit; final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we need network mode for this diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 5372c319dae..71fe622d8c0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -73,7 +73,6 @@ public final class ExternalTestCluster extends TestCluster { Settings clientSettings = Settings.builder() .put(additionalSettings) .put("node.name", InternalTestCluster.TRANSPORT_CLIENT_PREFIX + EXTERNAL_CLUSTER_PREFIX + counter.getAndIncrement()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 3199a27b9a5..995ca5480f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -291,11 +291,10 @@ public final class InternalTestCluster extends TestCluster { builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); builder.put(TransportSettings.PORT.getKey(), TRANSPORT_BASE_PORT + "-" + (TRANSPORT_BASE_PORT + PORTS_PER_CLUSTER)); builder.put("http.port", HTTP_BASE_PORT + "-" + (HTTP_BASE_PORT + PORTS_PER_CLUSTER)); - builder.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true); builder.put(Node.NODE_MODE_SETTING.getKey(), nodeMode); builder.put("http.pipelining", enableHttpPipelining); - if (Strings.hasLength(System.getProperty("es.logger.level"))) { - builder.put("logger.level", System.getProperty("es.logger.level")); + if (Strings.hasLength(System.getProperty("tests.logger.level"))) { + builder.put("logger.level", System.getProperty("tests.logger.level")); } if (Strings.hasLength(System.getProperty("es.logger.prefix"))) { builder.put("logger.prefix", System.getProperty("es.logger.prefix")); @@ -319,14 +318,14 @@ public final class InternalTestCluster extends TestCluster { public static String configuredNodeMode() { Builder builder = Settings.builder(); - if (Strings.isEmpty(System.getProperty("es.node.mode")) && Strings.isEmpty(System.getProperty("es.node.local"))) { + if (Strings.isEmpty(System.getProperty("node.mode")) && Strings.isEmpty(System.getProperty("node.local"))) { return "local"; // default if nothing is specified } - if (Strings.hasLength(System.getProperty("es.node.mode"))) { - builder.put(Node.NODE_MODE_SETTING.getKey(), System.getProperty("es.node.mode")); + if (Strings.hasLength(System.getProperty("node.mode"))) { + builder.put(Node.NODE_MODE_SETTING.getKey(), System.getProperty("node.mode")); } - if (Strings.hasLength(System.getProperty("es.node.local"))) { - builder.put(Node.NODE_LOCAL_SETTING.getKey(), System.getProperty("es.node.local")); + if (Strings.hasLength(System.getProperty("node.local"))) { + builder.put(Node.NODE_LOCAL_SETTING.getKey(), System.getProperty("node.local")); } if (DiscoveryNode.isLocalNode(builder.build())) { return "local"; @@ -882,7 +881,6 @@ public final class InternalTestCluster extends TestCluster { .put(Node.NODE_MODE_SETTING.getKey(), Node.NODE_MODE_SETTING.exists(nodeSettings) ? Node.NODE_MODE_SETTING.get(nodeSettings) : nodeMode) .put("logger.prefix", nodeSettings.get("logger.prefix", "")) .put("logger.level", nodeSettings.get("logger.level", "INFO")) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(settings); if (Node.NODE_LOCAL_SETTING.exists(nodeSettings)) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 5cac904df35..d8ea1f60bbd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -73,7 +72,6 @@ public class TestSearchContext extends SearchContext { final IndexService indexService; final IndexFieldDataService indexFieldDataService; final BitsetFilterCache fixedBitSetFilterCache; - final PercolatorQueryCache percolatorQueryCache; final ThreadPool threadPool; final Map, Collector> queryCollectors = new HashMap<>(); final IndexShard indexShard; @@ -101,7 +99,6 @@ public class TestSearchContext extends SearchContext { this.indexService = indexService; this.indexFieldDataService = indexService.fieldData(); this.fixedBitSetFilterCache = indexService.cache().bitsetFilterCache(); - this.percolatorQueryCache = indexService.cache().getPercolatorQueryCache(); this.threadPool = threadPool; this.indexShard = indexService.getShardOrNull(0); this.scriptService = scriptService; @@ -116,7 +113,6 @@ public class TestSearchContext extends SearchContext { this.indexFieldDataService = null; this.threadPool = null; this.fixedBitSetFilterCache = null; - this.percolatorQueryCache = null; this.indexShard = null; scriptService = null; this.queryShardContext = queryShardContext; @@ -323,11 +319,6 @@ public class TestSearchContext extends SearchContext { return indexFieldDataService; } - @Override - public PercolatorQueryCache percolatorQueryCache() { - return percolatorQueryCache; - } - @Override public long timeoutInMillis() { return 0; diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 6142edb9394..75bc916ab94 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -137,10 +137,10 @@ public class ReproduceInfoPrinter extends RunListener { } public ReproduceErrorMessageBuilder appendESProperties() { - appendProperties("es.logger.level"); + appendProperties("tests.logger.level"); if (inVerifyPhase()) { // these properties only make sense for integration tests - appendProperties("es.node.mode", "es.node.local", TESTS_CLUSTER, + appendProperties("node.mode", "node.local", TESTS_CLUSTER, ESIntegTestCase.TESTS_ENABLE_MOCK_MODULES); } appendProperties("tests.assertion.disabled", "tests.security.manager", "tests.nightly", "tests.jvms", diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index 5fb6e199b17..cb35653b103 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -19,16 +19,15 @@ package org.elasticsearch.test.rest.client; import com.carrotsearch.randomizedtesting.RandomizedTest; - import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.socket.PlainConnectionSocketFactory; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; -import org.apache.http.conn.ssl.SSLContexts; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.ssl.SSLContexts; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; @@ -134,7 +133,8 @@ public class RestClient implements Closeable { * @throws RestException if the obtained status code is non ok, unless the specific error code needs to be ignored * according to the ignore parameter received as input (which won't get sent to elasticsearch) */ - public RestResponse callApi(String apiName, Map params, String body, Map headers) throws IOException, RestException { + public RestResponse callApi(String apiName, Map params, String body, Map headers) + throws IOException, RestException { List ignores = new ArrayList<>(); Map requestParams = null; @@ -220,7 +220,8 @@ public class RestClient implements Closeable { if (restApi.getParams().contains(entry.getKey()) || ALWAYS_ACCEPTED_QUERY_STRING_PARAMS.contains(entry.getKey())) { httpRequestBuilder.addParam(entry.getKey(), entry.getValue()); } else { - throw new IllegalArgumentException("param [" + entry.getKey() + "] not supported in [" + restApi.getName() + "] api"); + throw new IllegalArgumentException("param [" + entry.getKey() + + "] not supported in [" + restApi.getName() + "] api"); } } } @@ -293,10 +294,8 @@ public class RestClient implements Closeable { try (InputStream is = Files.newInputStream(path)) { keyStore.load(is, keystorePass.toCharArray()); } - SSLContext sslcontext = SSLContexts.custom() - .loadTrustMaterial(keyStore, null) - .build(); - sslsf = new SSLConnectionSocketFactory(sslcontext, StrictHostnameVerifier.INSTANCE); + SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); + sslsf = new SSLConnectionSocketFactory(sslcontext); } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { throw new RuntimeException(e); } @@ -308,7 +307,8 @@ public class RestClient implements Closeable { .register("http", PlainConnectionSocketFactory.getSocketFactory()) .register("https", sslsf) .build(); - return HttpClients.createMinimal(new PoolingHttpClientConnectionManager(socketFactoryRegistry, null, null, null, 15, TimeUnit.SECONDS)); + return HttpClients.createMinimal( + new PoolingHttpClientConnectionManager(socketFactoryRegistry, null, null, null, 15, TimeUnit.SECONDS)); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifier.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifier.java deleted file mode 100644 index 33a92ceb417..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifier.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.rest.client; - -import org.apache.http.conn.ssl.X509HostnameVerifier; -import org.apache.http.conn.util.InetAddressUtils; - -import javax.net.ssl.SSLException; -import javax.net.ssl.SSLSession; -import javax.net.ssl.SSLSocket; -import java.io.IOException; -import java.security.cert.X509Certificate; - -/** - * A custom {@link X509HostnameVerifier} implementation that wraps calls to the {@link org.apache.http.conn.ssl.StrictHostnameVerifier} and - * properly handles IPv6 addresses that come from a URL in the form http://[::1]:9200/ by removing the surrounding brackets. - * - * This is a variation of the fix for HTTPCLIENT-1698, which is not - * released yet as of Apache HttpClient 4.5.1 - */ -final class StrictHostnameVerifier implements X509HostnameVerifier { - - static final StrictHostnameVerifier INSTANCE = new StrictHostnameVerifier(); - - // We need to wrap the default verifier for HttpClient since we use an older version and the following issue is not - // fixed in a released version yet https://issues.apache.org/jira/browse/HTTPCLIENT-1698 - // TL;DR we need to strip '[' and ']' from IPv6 addresses if they come from a URL - private final X509HostnameVerifier verifier = new org.apache.http.conn.ssl.StrictHostnameVerifier(); - - private StrictHostnameVerifier() {} - - @Override - public boolean verify(String host, SSLSession sslSession) { - return verifier.verify(stripBracketsIfNecessary(host), sslSession); - } - - @Override - public void verify(String host, SSLSocket ssl) throws IOException { - verifier.verify(stripBracketsIfNecessary(host), ssl); - } - - @Override - public void verify(String host, X509Certificate cert) throws SSLException { - verifier.verify(stripBracketsIfNecessary(host), cert); - } - - @Override - public void verify(String host, String[] cns, String[] subjectAlts) throws SSLException { - verifier.verify(stripBracketsIfNecessary(host), cns, subjectAlts); - } - - private String stripBracketsIfNecessary(String host) { - if (host.startsWith("[") && host.endsWith("]")) { - String newHost = host.substring(1, host.length() - 1); - assert InetAddressUtils.isIPv6Address(newHost); - return newHost; - } - return host; - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifierTests.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifierTests.java deleted file mode 100644 index 7bbda67fbdb..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifierTests.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.rest.client; - -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import javax.net.ssl.SSLSession; -import javax.net.ssl.SSLSocket; -import javax.security.auth.x500.X500Principal; -import java.security.cert.Certificate; -import java.security.cert.X509Certificate; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - * Tests for the {@link StrictHostnameVerifier} to validate that it can verify IPv6 addresses with and without bracket notation, in - * addition to other address types. - */ -public class StrictHostnameVerifierTests extends ESTestCase { - - private static final int IP_SAN_TYPE = 7; - private static final int DNS_SAN_TYPE = 2; - - private static final String[] CNS = new String[] { "my node" }; - private static final String[] IP_SANS = new String[] { "127.0.0.1", "192.168.1.1", "::1" }; - private static final String[] DNS_SANS = new String[] { "localhost", "computer", "localhost6" }; - - private SSLSocket sslSocket; - private SSLSession sslSession; - private X509Certificate certificate; - - @Before - public void setupMocks() throws Exception { - sslSocket = mock(SSLSocket.class); - sslSession = mock(SSLSession.class); - certificate = mock(X509Certificate.class); - Collection> subjectAlternativeNames = new ArrayList<>(); - for (String san : IP_SANS) { - subjectAlternativeNames.add(Arrays.asList(IP_SAN_TYPE, san)); - } - for (String san : DNS_SANS) { - subjectAlternativeNames.add(Arrays.asList(DNS_SAN_TYPE, san)); - } - - when(sslSocket.getSession()).thenReturn(sslSession); - when(sslSession.getPeerCertificates()).thenReturn(new Certificate[] { certificate }); - when(certificate.getSubjectX500Principal()).thenReturn(new X500Principal("CN=" + CNS[0])); - when(certificate.getSubjectAlternativeNames()).thenReturn(subjectAlternativeNames); - } - - public void testThatIPv6WithBracketsWorks() throws Exception { - final String ipv6Host = "[::1]"; - - // an exception will be thrown if verification fails - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, CNS, IP_SANS); - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSocket); - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, certificate); - - // this is the only one we can assert on - assertTrue(StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSession)); - } - - public void testThatIPV6WithoutBracketWorks() throws Exception { - final String ipv6Host = "::1"; - - // an exception will be thrown if verification fails - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, CNS, IP_SANS); - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSocket); - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, certificate); - - // this is the only one we can assert on - assertTrue(StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSession)); - } - - public void testThatIPV4Works() throws Exception { - final String ipv4Host = randomFrom("127.0.0.1", "192.168.1.1"); - - // an exception will be thrown if verification fails - StrictHostnameVerifier.INSTANCE.verify(ipv4Host, CNS, IP_SANS); - StrictHostnameVerifier.INSTANCE.verify(ipv4Host, sslSocket); - StrictHostnameVerifier.INSTANCE.verify(ipv4Host, certificate); - - // this is the only one we can assert on - assertTrue(StrictHostnameVerifier.INSTANCE.verify(ipv4Host, sslSession)); - } - - public void testThatHostnameWorks() throws Exception { - final String host = randomFrom(DNS_SANS); - - // an exception will be thrown if verification fails - StrictHostnameVerifier.INSTANCE.verify(host, CNS, DNS_SANS); - StrictHostnameVerifier.INSTANCE.verify(host, sslSocket); - StrictHostnameVerifier.INSTANCE.verify(host, certificate); - - // this is the only one we can assert on - assertTrue(StrictHostnameVerifier.INSTANCE.verify(host, sslSession)); - } -} diff --git a/test/framework/src/main/resources/log4j.properties b/test/framework/src/main/resources/log4j.properties index 22f54ef68e5..11a864df0f3 100644 --- a/test/framework/src/main/resources/log4j.properties +++ b/test/framework/src/main/resources/log4j.properties @@ -1,5 +1,5 @@ -es.logger.level=INFO -log4j.rootLogger=${es.logger.level}, out +tests.logger.level=INFO +log4j.rootLogger=${tests.logger.level}, out log4j.logger.org.apache.http=INFO, out log4j.additivity.org.apache.http=false diff --git a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java index ee567aae779..d6cd3eea5ac 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.test.TestSearchContext; public class MockSearchServiceTests extends ESTestCase { public void testAssertNoInFlightContext() { SearchContext s = new TestSearchContext(new QueryShardContext(new IndexSettings(IndexMetaData.PROTO, Settings.EMPTY), null, null, - null, null, null, null, null, null, null, null)) { + null, null, null, null, null, null, null)) { @Override public SearchShardTarget shardTarget() { return new SearchShardTarget("node", new Index("idx", "ignored"), 0);