Merge branch 'master' into feature/rank-eval

This commit is contained in:
Christoph Büscher 2016-10-27 11:11:37 +02:00
commit 51a2e3bf1e
208 changed files with 1844 additions and 831 deletions

View File

@ -63,8 +63,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]validate[/\\]template[/\\]RenderSearchTemplateRequestBuilder.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]validate[/\\]template[/\\]RenderSearchTemplateRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]validate[/\\]template[/\\]TransportRenderSearchTemplateAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]validate[/\\]template[/\\]TransportRenderSearchTemplateAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]Alias.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]Alias.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]IndicesAliasesRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]IndicesAliasesRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]TransportIndicesAliasesAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]TransportIndicesAliasesAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]exists[/\\]TransportAliasesExistAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]exists[/\\]TransportAliasesExistAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]get[/\\]BaseAliasesRequestBuilder.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]alias[/\\]get[/\\]BaseAliasesRequestBuilder.java" checks="LineLength" />
@ -116,8 +114,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]delete[/\\]TransportDeleteIndexTemplateAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]delete[/\\]TransportDeleteIndexTemplateAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]get[/\\]GetIndexTemplatesRequestBuilder.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]get[/\\]GetIndexTemplatesRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]get[/\\]TransportGetIndexTemplatesAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]get[/\\]TransportGetIndexTemplatesAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]put[/\\]PutIndexTemplateRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]put[/\\]PutIndexTemplateRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]put[/\\]TransportPutIndexTemplateAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]put[/\\]TransportPutIndexTemplateAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]get[/\\]IndexUpgradeStatus.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]get[/\\]IndexUpgradeStatus.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]get[/\\]TransportUpgradeStatusAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]upgrade[/\\]get[/\\]TransportUpgradeStatusAction.java" checks="LineLength" />
@ -228,7 +224,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]support[/\\]AbstractClient.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]support[/\\]AbstractClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClient.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]support[/\\]TransportProxyClient.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]support[/\\]TransportProxyClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterModule.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterState.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterState.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateObserver.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateObserver.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateUpdateTask.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateUpdateTask.java" checks="LineLength" />
@ -243,7 +238,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]AutoExpandReplicas.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]AutoExpandReplicas.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]IndexMetaData.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]IndexMetaData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]IndexNameExpressionResolver.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]IndexNameExpressionResolver.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]IndexTemplateMetaData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MappingMetaData.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MappingMetaData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MetaData.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MetaData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MetaDataCreateIndexService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MetaDataCreateIndexService.java" checks="LineLength" />
@ -256,7 +250,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]IndexRoutingTable.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]IndexRoutingTable.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]IndexShardRoutingTable.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]IndexShardRoutingTable.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]OperationRouting.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]OperationRouting.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingNode.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingNodes.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingNodes.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingTable.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]RoutingTable.java" checks="LineLength" />
@ -307,13 +300,10 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]PrioritizedEsThreadPoolExecutor.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]PrioritizedEsThreadPoolExecutor.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadBarrier.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadBarrier.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadContext.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]XContentBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]XContentFactory.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]XContentFactory.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]XContentHelper.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]XContentHelper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]smile[/\\]SmileXContent.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]smile[/\\]SmileXContent.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]xcontent[/\\]support[/\\]XContentMapValues.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]Discovery.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]Discovery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]DiscoveryModule.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]DiscoveryService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]DiscoveryService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]DiscoverySettings.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]DiscoverySettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]local[/\\]LocalDiscovery.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]local[/\\]LocalDiscovery.java" checks="LineLength" />
@ -379,7 +369,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CompletionFieldMapper.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]CompletionFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]LegacyDateFieldMapper.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]LegacyDateFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]LegacyDoubleFieldMapper.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]LegacyDoubleFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]LegacyFloatFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]LegacyNumberFieldMapper.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]LegacyNumberFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]StringFieldMapper.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]StringFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]LegacyTokenCountFieldMapper.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]LegacyTokenCountFieldMapper.java" checks="LineLength" />
@ -410,7 +399,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]QueryParsers.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]QueryParsers.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MatchQuery.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MatchQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MultiMatchQuery.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MultiMatchQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]geo[/\\]GeoDistanceRangeQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]geo[/\\]IndexedGeoBoundingBoxQuery.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]geo[/\\]IndexedGeoBoundingBoxQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]CommitPoint.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]CommitPoint.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexEventListener.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexEventListener.java" checks="LineLength" />
@ -454,7 +442,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]ttl[/\\]IndicesTTLService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]ttl[/\\]IndicesTTLService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]GcNames.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]GcNames.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]HotThreads.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]HotThreads.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]JvmStats.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]Node.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]Node.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]internal[/\\]InternalSettingsPreparer.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]internal[/\\]InternalSettingsPreparer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsService.java" checks="LineLength" />
@ -473,7 +460,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestCountAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestCountAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestIndicesAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestIndicesAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestNodesAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestNodesAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestPendingClusterTasksAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptContextRegistry.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptContextRegistry.java" checks="LineLength" />
@ -544,8 +530,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]AggregatedDfs.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]AggregatedDfs.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]DfsSearchResult.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]DfsSearchResult.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchPhase.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchPhase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSearchResult.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]InternalSearchHit.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]InternalSearchHit.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]ShardSearchTransportRequest.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]ShardSearchTransportRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]FieldLookup.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]FieldLookup.java" checks="LineLength" />
@ -586,7 +570,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]get[/\\]GetIndexIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]get[/\\]GetIndexIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]shards[/\\]IndicesShardStoreRequestIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]shards[/\\]IndicesShardStoreRequestIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]shards[/\\]IndicesShardStoreResponseTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]shards[/\\]IndicesShardStoreResponseTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]indices[/\\]template[/\\]put[/\\]MetaDataIndexTemplateServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkProcessorIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkProcessorIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkRequestTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkRequestTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]RetryTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]RetryTests.java" checks="LineLength" />
@ -780,7 +763,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ExternalFieldMapperTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ExternalFieldMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoEncodingTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoEncodingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoPointFieldMapperTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoPointFieldMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoPointFieldTypeTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoShapeFieldMapperTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeoShapeFieldMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeohashMappingGeoPointTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]GeohashMappingGeoPointTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]IdFieldMapperTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]IdFieldMapperTests.java" checks="LineLength" />
@ -808,10 +790,8 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoolQueryBuilderTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoolQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoostingQueryBuilderTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoostingQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]CommonTermsQueryBuilderTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]CommonTermsQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]FieldMaskingSpanQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]GeoDistanceQueryBuilderTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]GeoDistanceQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]HasChildQueryBuilderTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]HasChildQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]HasParentQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MoreLikeThisQueryBuilderTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MoreLikeThisQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MultiMatchQueryBuilderTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MultiMatchQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]RandomQueryBuilder.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]RandomQueryBuilder.java" checks="LineLength" />
@ -842,7 +822,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indexlifecycle[/\\]IndexLifecycleActionIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indexlifecycle[/\\]IndexLifecycleActionIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndexingMemoryControllerTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndexingMemoryControllerTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesLifecycleListenerIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesLifecycleListenerIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesLifecycleListenerSingleNodeTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesOptionsIntegrationIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesOptionsIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analyze[/\\]AnalyzeActionIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analyze[/\\]AnalyzeActionIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analyze[/\\]HunspellServiceIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analyze[/\\]HunspellServiceIT.java" checks="LineLength" />
@ -870,7 +849,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]stats[/\\]IndexStatsIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]stats[/\\]IndexStatsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]IndicesStoreIntegrationIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]IndicesStoreIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]IndicesStoreTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]IndicesStoreTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]template[/\\]SimpleIndexTemplateIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]JvmGcMonitorServiceSettingsTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]JvmGcMonitorServiceSettingsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginInfoTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginInfoTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsServiceTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsServiceTests.java" checks="LineLength" />
@ -969,7 +947,6 @@
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]IndexedExpressionTests.java" checks="LineLength" /> <suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]IndexedExpressionTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]MoreExpressionTests.java" checks="LineLength" /> <suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]MoreExpressionTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptEngineService.java" checks="LineLength" /> <suppress files="modules[/\\]lang-groovy[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptEngineService.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovySecurityTests.java" checks="LineLength" /> <suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovySecurityTests.java" checks="LineLength" />
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolateRequest.java" checks="LineLength" /> <suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolateRequest.java" checks="LineLength" />
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolateRequestBuilder.java" checks="LineLength" /> <suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolateRequestBuilder.java" checks="LineLength" />

View File

@ -693,7 +693,9 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
ShardStateAction.NoLongerPrimaryShardException::new, 142), ShardStateAction.NoLongerPrimaryShardException::new, 142),
SCRIPT_EXCEPTION(org.elasticsearch.script.ScriptException.class, org.elasticsearch.script.ScriptException::new, 143), SCRIPT_EXCEPTION(org.elasticsearch.script.ScriptException.class, org.elasticsearch.script.ScriptException::new, 143),
NOT_MASTER_EXCEPTION(org.elasticsearch.cluster.NotMasterException.class, org.elasticsearch.cluster.NotMasterException::new, 144), NOT_MASTER_EXCEPTION(org.elasticsearch.cluster.NotMasterException.class, org.elasticsearch.cluster.NotMasterException::new, 144),
STATUS_EXCEPTION(org.elasticsearch.ElasticsearchStatusException.class, org.elasticsearch.ElasticsearchStatusException::new, 145); STATUS_EXCEPTION(org.elasticsearch.ElasticsearchStatusException.class, org.elasticsearch.ElasticsearchStatusException::new, 145),
TASK_CANCELLED_EXCEPTION(org.elasticsearch.tasks.TaskCancelledException.class,
org.elasticsearch.tasks.TaskCancelledException::new, 146);
final Class<? extends ElasticsearchException> exceptionClass; final Class<? extends ElasticsearchException> exceptionClass;
final FunctionThatThrowsIOException<StreamInput, ? extends ElasticsearchException> constructor; final FunctionThatThrowsIOException<StreamInput, ? extends ElasticsearchException> constructor;

View File

@ -149,7 +149,7 @@ public class TransportRolloverAction extends TransportMasterNodeAction<RolloverR
} else { } else {
// conditions not met // conditions not met
listener.onResponse( listener.onResponse(
new RolloverResponse(sourceIndexName, sourceIndexName, conditionResults, false, false, false, false) new RolloverResponse(sourceIndexName, rolloverIndexName, conditionResults, false, false, false, false)
); );
} }
} }

View File

@ -59,6 +59,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
protected final SearchRequest request; protected final SearchRequest request;
/** Used by subclasses to resolve node ids to DiscoveryNodes. **/ /** Used by subclasses to resolve node ids to DiscoveryNodes. **/
protected final Function<String, DiscoveryNode> nodeIdToDiscoveryNode; protected final Function<String, DiscoveryNode> nodeIdToDiscoveryNode;
protected final SearchTask task;
protected final int expectedSuccessfulOps; protected final int expectedSuccessfulOps;
private final int expectedTotalOps; private final int expectedTotalOps;
protected final AtomicInteger successfulOps = new AtomicInteger(); protected final AtomicInteger successfulOps = new AtomicInteger();
@ -74,12 +75,13 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
Map<String, AliasFilter> aliasFilter, Executor executor, SearchRequest request, Map<String, AliasFilter> aliasFilter, Executor executor, SearchRequest request,
ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts, long startTime, ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts, long startTime,
long clusterStateVersion) { long clusterStateVersion, SearchTask task) {
super(startTime); super(startTime);
this.logger = logger; this.logger = logger;
this.searchTransportService = searchTransportService; this.searchTransportService = searchTransportService;
this.executor = executor; this.executor = executor;
this.request = request; this.request = request;
this.task = task;
this.listener = listener; this.listener = listener;
this.nodeIdToDiscoveryNode = nodeIdToDiscoveryNode; this.nodeIdToDiscoveryNode = nodeIdToDiscoveryNode;
this.clusterStateVersion = clusterStateVersion; this.clusterStateVersion = clusterStateVersion;

View File

@ -49,9 +49,9 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<DfsSea
Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
Map<String, AliasFilter> aliasFilter, SearchPhaseController searchPhaseController, Map<String, AliasFilter> aliasFilter, SearchPhaseController searchPhaseController,
Executor executor, SearchRequest request, ActionListener<SearchResponse> listener, Executor executor, SearchRequest request, ActionListener<SearchResponse> listener,
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion) { GroupShardsIterator shardsIts, long startTime, long clusterStateVersion, SearchTask task) {
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor,
request, listener, shardsIts, startTime, clusterStateVersion); request, listener, shardsIts, startTime, clusterStateVersion, task);
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;
queryFetchResults = new AtomicArray<>(firstResults.length()); queryFetchResults = new AtomicArray<>(firstResults.length());
} }
@ -64,7 +64,7 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<DfsSea
@Override @Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request, protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request,
ActionListener<DfsSearchResult> listener) { ActionListener<DfsSearchResult> listener) {
searchTransportService.sendExecuteDfs(node, request, listener); searchTransportService.sendExecuteDfs(node, request, task, listener);
} }
@Override @Override
@ -82,7 +82,7 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<DfsSea
void executeSecondPhase(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter, void executeSecondPhase(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter,
final DiscoveryNode node, final QuerySearchRequest querySearchRequest) { final DiscoveryNode node, final QuerySearchRequest querySearchRequest) {
searchTransportService.sendExecuteFetch(node, querySearchRequest, new ActionListener<QueryFetchSearchResult>() { searchTransportService.sendExecuteFetch(node, querySearchRequest, task, new ActionListener<QueryFetchSearchResult>() {
@Override @Override
public void onResponse(QueryFetchSearchResult result) { public void onResponse(QueryFetchSearchResult result) {
result.shardTarget(dfsResult.shardTarget()); result.shardTarget(dfsResult.shardTarget());

View File

@ -57,9 +57,10 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
Map<String, AliasFilter> aliasFilter, SearchPhaseController searchPhaseController, Map<String, AliasFilter> aliasFilter, SearchPhaseController searchPhaseController,
Executor executor, SearchRequest request, ActionListener<SearchResponse> listener, Executor executor, SearchRequest request, ActionListener<SearchResponse> listener,
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion) { GroupShardsIterator shardsIts, long startTime, long clusterStateVersion,
SearchTask task) {
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor,
request, listener, shardsIts, startTime, clusterStateVersion); request, listener, shardsIts, startTime, clusterStateVersion, task);
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;
queryResults = new AtomicArray<>(firstResults.length()); queryResults = new AtomicArray<>(firstResults.length());
fetchResults = new AtomicArray<>(firstResults.length()); fetchResults = new AtomicArray<>(firstResults.length());
@ -74,7 +75,7 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
@Override @Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request, protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request,
ActionListener<DfsSearchResult> listener) { ActionListener<DfsSearchResult> listener) {
searchTransportService.sendExecuteDfs(node, request, listener); searchTransportService.sendExecuteDfs(node, request, task, listener);
} }
@Override @Override
@ -91,7 +92,7 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
void executeQuery(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter, void executeQuery(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter,
final QuerySearchRequest querySearchRequest, final DiscoveryNode node) { final QuerySearchRequest querySearchRequest, final DiscoveryNode node) {
searchTransportService.sendExecuteQuery(node, querySearchRequest, new ActionListener<QuerySearchResult>() { searchTransportService.sendExecuteQuery(node, querySearchRequest, task, new ActionListener<QuerySearchResult>() {
@Override @Override
public void onResponse(QuerySearchResult result) { public void onResponse(QuerySearchResult result) {
result.shardTarget(dfsResult.shardTarget()); result.shardTarget(dfsResult.shardTarget());
@ -162,7 +163,7 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
void executeFetch(final int shardIndex, final SearchShardTarget shardTarget, final AtomicInteger counter, void executeFetch(final int shardIndex, final SearchShardTarget shardTarget, final AtomicInteger counter,
final ShardFetchSearchRequest fetchSearchRequest, DiscoveryNode node) { final ShardFetchSearchRequest fetchSearchRequest, DiscoveryNode node) {
searchTransportService.sendExecuteFetch(node, fetchSearchRequest, new ActionListener<FetchSearchResult>() { searchTransportService.sendExecuteFetch(node, fetchSearchRequest, task, new ActionListener<FetchSearchResult>() {
@Override @Override
public void onResponse(FetchSearchResult result) { public void onResponse(FetchSearchResult result) {
result.shardTarget(shardTarget); result.shardTarget(shardTarget);

View File

@ -43,9 +43,10 @@ class SearchQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<QueryFetc
Map<String, AliasFilter> aliasFilter, Map<String, AliasFilter> aliasFilter,
SearchPhaseController searchPhaseController, Executor executor, SearchPhaseController searchPhaseController, Executor executor,
SearchRequest request, ActionListener<SearchResponse> listener, SearchRequest request, ActionListener<SearchResponse> listener,
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion) { GroupShardsIterator shardsIts, long startTime, long clusterStateVersion,
SearchTask task) {
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor,
request, listener, shardsIts, startTime, clusterStateVersion); request, listener, shardsIts, startTime, clusterStateVersion, task);
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;
} }
@ -58,7 +59,7 @@ class SearchQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<QueryFetc
@Override @Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request, protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request,
ActionListener<QueryFetchSearchResult> listener) { ActionListener<QueryFetchSearchResult> listener) {
searchTransportService.sendExecuteFetch(node, request, listener); searchTransportService.sendExecuteFetch(node, request, task, listener);
} }
@Override @Override

View File

@ -54,9 +54,10 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
AliasFilter> aliasFilter, AliasFilter> aliasFilter,
SearchPhaseController searchPhaseController, Executor executor, SearchPhaseController searchPhaseController, Executor executor,
SearchRequest request, ActionListener<SearchResponse> listener, SearchRequest request, ActionListener<SearchResponse> listener,
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion) { GroupShardsIterator shardsIts, long startTime, long clusterStateVersion,
SearchTask task) {
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, request, listener, super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, request, listener,
shardsIts, startTime, clusterStateVersion); shardsIts, startTime, clusterStateVersion, task);
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;
fetchResults = new AtomicArray<>(firstResults.length()); fetchResults = new AtomicArray<>(firstResults.length());
docIdsToLoad = new AtomicArray<>(firstResults.length()); docIdsToLoad = new AtomicArray<>(firstResults.length());
@ -70,7 +71,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
@Override @Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request, protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request,
ActionListener<QuerySearchResultProvider> listener) { ActionListener<QuerySearchResultProvider> listener) {
searchTransportService.sendExecuteQuery(node, request, listener); searchTransportService.sendExecuteQuery(node, request, task, listener);
} }
@Override @Override
@ -97,7 +98,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
void executeFetch(final int shardIndex, final SearchShardTarget shardTarget, final AtomicInteger counter, void executeFetch(final int shardIndex, final SearchShardTarget shardTarget, final AtomicInteger counter,
final ShardFetchSearchRequest fetchSearchRequest, DiscoveryNode node) { final ShardFetchSearchRequest fetchSearchRequest, DiscoveryNode node) {
searchTransportService.sendExecuteFetch(node, fetchSearchRequest, new ActionListener<FetchSearchResult>() { searchTransportService.sendExecuteFetch(node, fetchSearchRequest, task, new ActionListener<FetchSearchResult>() {
@Override @Override
public void onResponse(FetchSearchResult result) { public void onResponse(FetchSearchResult result) {
result.shardTarget(shardTarget); result.shardTarget(shardTarget);

View File

@ -31,6 +31,8 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.Scroll; import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@ -275,6 +277,11 @@ public final class SearchRequest extends ActionRequest<SearchRequest> implements
return source != null && source.isSuggestOnly(); return source != null && source.isSuggestOnly();
} }
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new SearchTask(id, type, action, getDescription(), parentTaskId);
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);

View File

@ -44,6 +44,7 @@ class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction {
private final SearchPhaseController searchPhaseController; private final SearchPhaseController searchPhaseController;
private final SearchTransportService searchTransportService; private final SearchTransportService searchTransportService;
private final SearchScrollRequest request; private final SearchScrollRequest request;
private final SearchTask task;
private final ActionListener<SearchResponse> listener; private final ActionListener<SearchResponse> listener;
private final ParsedScrollId scrollId; private final ParsedScrollId scrollId;
private final DiscoveryNodes nodes; private final DiscoveryNodes nodes;
@ -52,13 +53,14 @@ class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction {
private final AtomicInteger successfulOps; private final AtomicInteger successfulOps;
private final AtomicInteger counter; private final AtomicInteger counter;
SearchScrollQueryAndFetchAsyncAction(Logger logger, ClusterService clusterService, SearchScrollQueryAndFetchAsyncAction(Logger logger, ClusterService clusterService, SearchTransportService searchTransportService,
SearchTransportService searchTransportService, SearchPhaseController searchPhaseController, SearchPhaseController searchPhaseController, SearchScrollRequest request, SearchTask task,
SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) { ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
this.logger = logger; this.logger = logger;
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;
this.searchTransportService = searchTransportService; this.searchTransportService = searchTransportService;
this.request = request; this.request = request;
this.task = task;
this.listener = listener; this.listener = listener;
this.scrollId = scrollId; this.scrollId = scrollId;
this.nodes = clusterService.state().nodes(); this.nodes = clusterService.state().nodes();
@ -128,7 +130,7 @@ class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction {
void executePhase(final int shardIndex, DiscoveryNode node, final long searchId) { void executePhase(final int shardIndex, DiscoveryNode node, final long searchId) {
InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(searchId, request); InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(searchId, request);
searchTransportService.sendExecuteFetch(node, internalRequest, new ActionListener<ScrollQueryFetchSearchResult>() { searchTransportService.sendExecuteFetch(node, internalRequest, task, new ActionListener<ScrollQueryFetchSearchResult>() {
@Override @Override
public void onResponse(ScrollQueryFetchSearchResult result) { public void onResponse(ScrollQueryFetchSearchResult result) {
queryFetchResults.set(shardIndex, result.result()); queryFetchResults.set(shardIndex, result.result());

View File

@ -44,6 +44,7 @@ import static org.elasticsearch.action.search.TransportSearchHelper.internalScro
class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
private final Logger logger; private final Logger logger;
private final SearchTask task;
private final SearchTransportService searchTransportService; private final SearchTransportService searchTransportService;
private final SearchPhaseController searchPhaseController; private final SearchPhaseController searchPhaseController;
private final SearchScrollRequest request; private final SearchScrollRequest request;
@ -56,13 +57,14 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
private volatile ScoreDoc[] sortedShardDocs; private volatile ScoreDoc[] sortedShardDocs;
private final AtomicInteger successfulOps; private final AtomicInteger successfulOps;
SearchScrollQueryThenFetchAsyncAction(Logger logger, ClusterService clusterService, SearchScrollQueryThenFetchAsyncAction(Logger logger, ClusterService clusterService, SearchTransportService searchTransportService,
SearchTransportService searchTransportService, SearchPhaseController searchPhaseController, SearchPhaseController searchPhaseController, SearchScrollRequest request, SearchTask task,
SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) { ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
this.logger = logger; this.logger = logger;
this.searchTransportService = searchTransportService; this.searchTransportService = searchTransportService;
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;
this.request = request; this.request = request;
this.task = task;
this.listener = listener; this.listener = listener;
this.scrollId = scrollId; this.scrollId = scrollId;
this.nodes = clusterService.state().nodes(); this.nodes = clusterService.state().nodes();
@ -124,7 +126,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
private void executeQueryPhase(final int shardIndex, final AtomicInteger counter, DiscoveryNode node, final long searchId) { private void executeQueryPhase(final int shardIndex, final AtomicInteger counter, DiscoveryNode node, final long searchId) {
InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(searchId, request); InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(searchId, request);
searchTransportService.sendExecuteQuery(node, internalRequest, new ActionListener<ScrollQuerySearchResult>() { searchTransportService.sendExecuteQuery(node, internalRequest, task, new ActionListener<ScrollQuerySearchResult>() {
@Override @Override
public void onResponse(ScrollQuerySearchResult result) { public void onResponse(ScrollQuerySearchResult result) {
queryResults.set(shardIndex, result.queryResult()); queryResults.set(shardIndex, result.queryResult());
@ -184,7 +186,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[entry.index]; ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[entry.index];
ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.id(), docIds, lastEmittedDoc); ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.id(), docIds, lastEmittedDoc);
DiscoveryNode node = nodes.get(querySearchResult.shardTarget().nodeId()); DiscoveryNode node = nodes.get(querySearchResult.shardTarget().nodeId());
searchTransportService.sendExecuteFetchScroll(node, shardFetchRequest, new ActionListener<FetchSearchResult>() { searchTransportService.sendExecuteFetchScroll(node, shardFetchRequest, task, new ActionListener<FetchSearchResult>() {
@Override @Override
public void onResponse(FetchSearchResult result) { public void onResponse(FetchSearchResult result) {
result.shardTarget(querySearchResult.shardTarget()); result.shardTarget(querySearchResult.shardTarget());

View File

@ -25,6 +25,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.Scroll; import org.elasticsearch.search.Scroll;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
@ -107,6 +109,11 @@ public class SearchScrollRequest extends ActionRequest<SearchScrollRequest> {
out.writeOptionalWriteable(scroll); out.writeOptionalWriteable(scroll);
} }
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new SearchTask(id, type, action, getDescription(), parentTaskId);
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) { if (this == o) {

View File

@ -17,17 +17,18 @@
* under the License. * under the License.
*/ */
package org.elasticsearch.discovery; package org.elasticsearch.action.search;
import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.TaskId;
/** /**
* A listener that should be called by the {@link org.elasticsearch.discovery.Discovery} component * Task storing information about a currently running search request.
* when the first valid initial cluster state has been submitted and processed by the cluster service.
* <p>
* Note, this listener should be registered with the discovery service before it has started.
*
*
*/ */
public interface InitialStateDiscoveryListener { public class SearchTask extends CancellableTask {
public SearchTask(long id, String type, String action, String description, TaskId parentTaskId) {
super(id, type, action, description, parentTaskId);
}
void initialStateProcessed();
} }

View File

@ -23,6 +23,8 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
@ -42,7 +44,10 @@ import org.elasticsearch.search.query.QuerySearchRequest;
import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.query.QuerySearchResultProvider;
import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TaskAwareTransportRequestHandler;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -100,59 +105,62 @@ public class SearchTransportService extends AbstractComponent {
new ActionListenerResponseHandler<>(listener, () -> TransportResponse.Empty.INSTANCE)); new ActionListenerResponseHandler<>(listener, () -> TransportResponse.Empty.INSTANCE));
} }
public void sendExecuteDfs(DiscoveryNode node, final ShardSearchTransportRequest request, public void sendExecuteDfs(DiscoveryNode node, final ShardSearchTransportRequest request, SearchTask task,
final ActionListener<DfsSearchResult> listener) { final ActionListener<DfsSearchResult> listener) {
transportService.sendRequest(node, DFS_ACTION_NAME, request, new ActionListenerResponseHandler<>(listener, DfsSearchResult::new)); transportService.sendChildRequest(node, DFS_ACTION_NAME, request, task,
new ActionListenerResponseHandler<>(listener, DfsSearchResult::new));
} }
public void sendExecuteQuery(DiscoveryNode node, final ShardSearchTransportRequest request, public void sendExecuteQuery(DiscoveryNode node, final ShardSearchTransportRequest request, SearchTask task,
final ActionListener<QuerySearchResultProvider> listener) { final ActionListener<QuerySearchResultProvider> listener) {
transportService.sendRequest(node, QUERY_ACTION_NAME, request, transportService.sendChildRequest(node, QUERY_ACTION_NAME, request, task,
new ActionListenerResponseHandler<>(listener, QuerySearchResult::new)); new ActionListenerResponseHandler<>(listener, QuerySearchResult::new));
} }
public void sendExecuteQuery(DiscoveryNode node, final QuerySearchRequest request, final ActionListener<QuerySearchResult> listener) { public void sendExecuteQuery(DiscoveryNode node, final QuerySearchRequest request, SearchTask task,
transportService.sendRequest(node, QUERY_ID_ACTION_NAME, request, final ActionListener<QuerySearchResult> listener) {
transportService.sendChildRequest(node, QUERY_ID_ACTION_NAME, request, task,
new ActionListenerResponseHandler<>(listener, QuerySearchResult::new)); new ActionListenerResponseHandler<>(listener, QuerySearchResult::new));
} }
public void sendExecuteQuery(DiscoveryNode node, final InternalScrollSearchRequest request, public void sendExecuteQuery(DiscoveryNode node, final InternalScrollSearchRequest request, SearchTask task,
final ActionListener<ScrollQuerySearchResult> listener) { final ActionListener<ScrollQuerySearchResult> listener) {
transportService.sendRequest(node, QUERY_SCROLL_ACTION_NAME, request, transportService.sendChildRequest(node, QUERY_SCROLL_ACTION_NAME, request, task,
new ActionListenerResponseHandler<>(listener, ScrollQuerySearchResult::new)); new ActionListenerResponseHandler<>(listener, ScrollQuerySearchResult::new));
} }
public void sendExecuteFetch(DiscoveryNode node, final ShardSearchTransportRequest request, public void sendExecuteFetch(DiscoveryNode node, final ShardSearchTransportRequest request, SearchTask task,
final ActionListener<QueryFetchSearchResult> listener) { final ActionListener<QueryFetchSearchResult> listener) {
transportService.sendRequest(node, QUERY_FETCH_ACTION_NAME, request, transportService.sendChildRequest(node, QUERY_FETCH_ACTION_NAME, request, task,
new ActionListenerResponseHandler<>(listener, QueryFetchSearchResult::new)); new ActionListenerResponseHandler<>(listener, QueryFetchSearchResult::new));
} }
public void sendExecuteFetch(DiscoveryNode node, final QuerySearchRequest request, public void sendExecuteFetch(DiscoveryNode node, final QuerySearchRequest request, SearchTask task,
final ActionListener<QueryFetchSearchResult> listener) { final ActionListener<QueryFetchSearchResult> listener) {
transportService.sendRequest(node, QUERY_QUERY_FETCH_ACTION_NAME, request, transportService.sendChildRequest(node, QUERY_QUERY_FETCH_ACTION_NAME, request, task,
new ActionListenerResponseHandler<>(listener, QueryFetchSearchResult::new)); new ActionListenerResponseHandler<>(listener, QueryFetchSearchResult::new));
} }
public void sendExecuteFetch(DiscoveryNode node, final InternalScrollSearchRequest request, public void sendExecuteFetch(DiscoveryNode node, final InternalScrollSearchRequest request, SearchTask task,
final ActionListener<ScrollQueryFetchSearchResult> listener) { final ActionListener<ScrollQueryFetchSearchResult> listener) {
transportService.sendRequest(node, QUERY_FETCH_SCROLL_ACTION_NAME, request, transportService.sendChildRequest(node, QUERY_FETCH_SCROLL_ACTION_NAME, request, task,
new ActionListenerResponseHandler<>(listener, ScrollQueryFetchSearchResult::new)); new ActionListenerResponseHandler<>(listener, ScrollQueryFetchSearchResult::new));
} }
public void sendExecuteFetch(DiscoveryNode node, final ShardFetchSearchRequest request, public void sendExecuteFetch(DiscoveryNode node, final ShardFetchSearchRequest request, SearchTask task,
final ActionListener<FetchSearchResult> listener) { final ActionListener<FetchSearchResult> listener) {
sendExecuteFetch(node, FETCH_ID_ACTION_NAME, request, listener); sendExecuteFetch(node, FETCH_ID_ACTION_NAME, request, task, listener);
} }
public void sendExecuteFetchScroll(DiscoveryNode node, final ShardFetchRequest request, public void sendExecuteFetchScroll(DiscoveryNode node, final ShardFetchRequest request, SearchTask task,
final ActionListener<FetchSearchResult> listener) { final ActionListener<FetchSearchResult> listener) {
sendExecuteFetch(node, FETCH_ID_SCROLL_ACTION_NAME, request, listener); sendExecuteFetch(node, FETCH_ID_SCROLL_ACTION_NAME, request, task, listener);
} }
private void sendExecuteFetch(DiscoveryNode node, String action, final ShardFetchRequest request, private void sendExecuteFetch(DiscoveryNode node, String action, final ShardFetchRequest request, SearchTask task,
final ActionListener<FetchSearchResult> listener) { final ActionListener<FetchSearchResult> listener) {
transportService.sendRequest(node, action, request, new ActionListenerResponseHandler<>(listener, FetchSearchResult::new)); transportService.sendChildRequest(node, action, request, task,
new ActionListenerResponseHandler<>(listener, FetchSearchResult::new));
} }
static class ScrollFreeContextRequest extends TransportRequest { static class ScrollFreeContextRequest extends TransportRequest {
@ -252,64 +260,103 @@ public class SearchTransportService extends AbstractComponent {
public static void registerRequestHandler(TransportService transportService, SearchService searchService) { public static void registerRequestHandler(TransportService transportService, SearchService searchService) {
transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ScrollFreeContextRequest::new, ThreadPool.Names.SAME, transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ScrollFreeContextRequest::new, ThreadPool.Names.SAME,
((request, channel) -> { new TaskAwareTransportRequestHandler<ScrollFreeContextRequest>() {
@Override
public void messageReceived(ScrollFreeContextRequest request, TransportChannel channel, Task task) throws Exception {
boolean freed = searchService.freeContext(request.id()); boolean freed = searchService.freeContext(request.id());
channel.sendResponse(new SearchFreeContextResponse(freed)); channel.sendResponse(new SearchFreeContextResponse(freed));
})); }
});
transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, SearchFreeContextRequest::new, ThreadPool.Names.SAME, transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, SearchFreeContextRequest::new, ThreadPool.Names.SAME,
(request, channel) -> { new TaskAwareTransportRequestHandler<SearchFreeContextRequest>() {
@Override
public void messageReceived(SearchFreeContextRequest request, TransportChannel channel, Task task) throws Exception {
boolean freed = searchService.freeContext(request.id()); boolean freed = searchService.freeContext(request.id());
channel.sendResponse(new SearchFreeContextResponse(freed)); channel.sendResponse(new SearchFreeContextResponse(freed));
}
}); });
transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportRequest.Empty.INSTANCE, transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportRequest.Empty.INSTANCE,
ThreadPool.Names.SAME, (request, channel) -> { ThreadPool.Names.SAME,
new TaskAwareTransportRequestHandler<TransportRequest.Empty>() {
@Override
public void messageReceived(TransportRequest.Empty request, TransportChannel channel, Task task) throws Exception {
searchService.freeAllScrollContexts(); searchService.freeAllScrollContexts();
channel.sendResponse(TransportResponse.Empty.INSTANCE); channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
}); });
transportService.registerRequestHandler(DFS_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH, transportService.registerRequestHandler(DFS_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH,
(request, channel) -> { new TaskAwareTransportRequestHandler<ShardSearchTransportRequest>() {
DfsSearchResult result = searchService.executeDfsPhase(request); @Override
public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception {
DfsSearchResult result = searchService.executeDfsPhase(request, (SearchTask)task);
channel.sendResponse(result); channel.sendResponse(result);
}
}); });
transportService.registerRequestHandler(QUERY_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH, transportService.registerRequestHandler(QUERY_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH,
(request, channel) -> { new TaskAwareTransportRequestHandler<ShardSearchTransportRequest>() {
QuerySearchResultProvider result = searchService.executeQueryPhase(request); @Override
public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception {
QuerySearchResultProvider result = searchService.executeQueryPhase(request, (SearchTask)task);
channel.sendResponse(result); channel.sendResponse(result);
}
}); });
transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH, transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH,
(request, channel) -> { new TaskAwareTransportRequestHandler<QuerySearchRequest>() {
QuerySearchResult result = searchService.executeQueryPhase(request); @Override
public void messageReceived(QuerySearchRequest request, TransportChannel channel, Task task) throws Exception {
QuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task);
channel.sendResponse(result); channel.sendResponse(result);
}
}); });
transportService.registerRequestHandler(QUERY_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH, transportService.registerRequestHandler(QUERY_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH,
(request, channel) -> { new TaskAwareTransportRequestHandler<InternalScrollSearchRequest>() {
ScrollQuerySearchResult result = searchService.executeQueryPhase(request); @Override
public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel, Task task) throws Exception {
ScrollQuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task);
channel.sendResponse(result); channel.sendResponse(result);
}
}); });
transportService.registerRequestHandler(QUERY_FETCH_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH, transportService.registerRequestHandler(QUERY_FETCH_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH,
(request, channel) -> { new TaskAwareTransportRequestHandler<ShardSearchTransportRequest>() {
QueryFetchSearchResult result = searchService.executeFetchPhase(request); @Override
public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception {
QueryFetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task);
channel.sendResponse(result); channel.sendResponse(result);
}
}); });
transportService.registerRequestHandler(QUERY_QUERY_FETCH_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH, transportService.registerRequestHandler(QUERY_QUERY_FETCH_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH,
(request, channel) -> { new TaskAwareTransportRequestHandler<QuerySearchRequest>() {
QueryFetchSearchResult result = searchService.executeFetchPhase(request); @Override
public void messageReceived(QuerySearchRequest request, TransportChannel channel, Task task) throws Exception {
QueryFetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task);
channel.sendResponse(result); channel.sendResponse(result);
}
}); });
transportService.registerRequestHandler(QUERY_FETCH_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH, transportService.registerRequestHandler(QUERY_FETCH_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH,
(request, channel) -> { new TaskAwareTransportRequestHandler<InternalScrollSearchRequest>() {
ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request); @Override
public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel, Task task) throws Exception {
ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task);
channel.sendResponse(result); channel.sendResponse(result);
}
}); });
transportService.registerRequestHandler(FETCH_ID_SCROLL_ACTION_NAME, ShardFetchRequest::new, ThreadPool.Names.SEARCH, transportService.registerRequestHandler(FETCH_ID_SCROLL_ACTION_NAME, ShardFetchRequest::new, ThreadPool.Names.SEARCH,
(request, channel) -> { new TaskAwareTransportRequestHandler<ShardFetchRequest>() {
FetchSearchResult result = searchService.executeFetchPhase(request); @Override
public void messageReceived(ShardFetchRequest request, TransportChannel channel, Task task) throws Exception {
FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task);
channel.sendResponse(result); channel.sendResponse(result);
}
}); });
transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ShardFetchSearchRequest::new, ThreadPool.Names.SEARCH, transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ShardFetchSearchRequest::new, ThreadPool.Names.SEARCH,
(request, channel) -> { new TaskAwareTransportRequestHandler<ShardFetchSearchRequest>() {
FetchSearchResult result = searchService.executeFetchPhase(request); @Override
public void messageReceived(ShardFetchSearchRequest request, TransportChannel channel, Task task) throws Exception {
FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task);
channel.sendResponse(result); channel.sendResponse(result);
}
}); });
} }
} }

View File

@ -38,6 +38,7 @@ import org.elasticsearch.indices.IndexClosedException;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -88,7 +89,7 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
} }
@Override @Override
protected void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) { protected void doExecute(Task task, SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
// pure paranoia if time goes backwards we are at least positive // pure paranoia if time goes backwards we are at least positive
final long startTimeInMillis = Math.max(0, System.currentTimeMillis()); final long startTimeInMillis = Math.max(0, System.currentTimeMillis());
ClusterState clusterState = clusterService.state(); ClusterState clusterState = clusterService.state();
@ -129,12 +130,17 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
logger.debug("failed to optimize search type, continue as normal", e); logger.debug("failed to optimize search type, continue as normal", e);
} }
searchAsyncAction(searchRequest, shardIterators, startTimeInMillis, clusterState, Collections.unmodifiableMap(aliasFilter) searchAsyncAction((SearchTask)task, searchRequest, shardIterators, startTimeInMillis, clusterState,
, listener).start(); Collections.unmodifiableMap(aliasFilter), listener).start();
} }
private AbstractSearchAsyncAction searchAsyncAction(SearchRequest searchRequest, GroupShardsIterator shardIterators, long startTime, @Override
ClusterState state, Map<String, AliasFilter> aliasFilter, protected final void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
throw new UnsupportedOperationException("the task parameter is required");
}
private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators,
long startTime, ClusterState state, Map<String, AliasFilter> aliasFilter,
ActionListener<SearchResponse> listener) { ActionListener<SearchResponse> listener) {
final Function<String, DiscoveryNode> nodesLookup = state.nodes()::get; final Function<String, DiscoveryNode> nodesLookup = state.nodes()::get;
final long clusterStateVersion = state.version(); final long clusterStateVersion = state.version();
@ -144,22 +150,22 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
case DFS_QUERY_THEN_FETCH: case DFS_QUERY_THEN_FETCH:
searchAsyncAction = new SearchDfsQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup, searchAsyncAction = new SearchDfsQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup,
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime, aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
clusterStateVersion); clusterStateVersion, task);
break; break;
case QUERY_THEN_FETCH: case QUERY_THEN_FETCH:
searchAsyncAction = new SearchQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup, searchAsyncAction = new SearchQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup,
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime, aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
clusterStateVersion); clusterStateVersion, task);
break; break;
case DFS_QUERY_AND_FETCH: case DFS_QUERY_AND_FETCH:
searchAsyncAction = new SearchDfsQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup, searchAsyncAction = new SearchDfsQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup,
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime, aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
clusterStateVersion); clusterStateVersion, task);
break; break;
case QUERY_AND_FETCH: case QUERY_AND_FETCH:
searchAsyncAction = new SearchQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup, searchAsyncAction = new SearchQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup,
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime, aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
clusterStateVersion); clusterStateVersion, task);
break; break;
default: default:
throw new IllegalStateException("Unknown search type: [" + searchRequest.searchType() + "]"); throw new IllegalStateException("Unknown search type: [" + searchRequest.searchType() + "]");

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -52,19 +53,24 @@ public class TransportSearchScrollAction extends HandledTransportAction<SearchSc
this.searchPhaseController = new SearchPhaseController(settings, bigArrays, scriptService, clusterService); this.searchPhaseController = new SearchPhaseController(settings, bigArrays, scriptService, clusterService);
} }
@Override @Override
protected void doExecute(SearchScrollRequest request, ActionListener<SearchResponse> listener) { protected final void doExecute(SearchScrollRequest request, ActionListener<SearchResponse> listener) {
throw new UnsupportedOperationException("the task parameter is required");
}
@Override
protected void doExecute(Task task, SearchScrollRequest request, ActionListener<SearchResponse> listener) {
try { try {
ParsedScrollId scrollId = parseScrollId(request.scrollId()); ParsedScrollId scrollId = parseScrollId(request.scrollId());
AbstractAsyncAction action; AbstractAsyncAction action;
switch (scrollId.getType()) { switch (scrollId.getType()) {
case QUERY_THEN_FETCH_TYPE: case QUERY_THEN_FETCH_TYPE:
action = new SearchScrollQueryThenFetchAsyncAction(logger, clusterService, searchTransportService, action = new SearchScrollQueryThenFetchAsyncAction(logger, clusterService, searchTransportService,
searchPhaseController, request, scrollId, listener); searchPhaseController, request, (SearchTask)task, scrollId, listener);
break; break;
case QUERY_AND_FETCH_TYPE: case QUERY_AND_FETCH_TYPE:
action = new SearchScrollQueryAndFetchAsyncAction(logger, clusterService, searchTransportService, action = new SearchScrollQueryAndFetchAsyncAction(logger, clusterService, searchTransportService,
searchPhaseController, request, scrollId, listener); searchPhaseController, request, (SearchTask)task, scrollId, listener);
break; break;
default: default:
throw new IllegalArgumentException("Scroll id type [" + scrollId.getType() + "] unrecognized"); throw new IllegalArgumentException("Scroll id type [" + scrollId.getType() + "] unrecognized");

View File

@ -42,8 +42,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import java.io.IOException; import java.io.IOException;
@ -232,7 +231,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
* @deprecated Use {@link #script()} instead * @deprecated Use {@link #script()} instead
*/ */
@Deprecated @Deprecated
public ScriptService.ScriptType scriptType() { public ScriptType scriptType() {
return this.script == null ? null : this.script.getType(); return this.script == null ? null : this.script.getType();
} }
@ -252,7 +251,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
* @deprecated Use {@link #script(Script)} instead * @deprecated Use {@link #script(Script)} instead
*/ */
@Deprecated @Deprecated
public UpdateRequest script(String script, ScriptService.ScriptType scriptType) { public UpdateRequest script(String script, ScriptType scriptType) {
updateOrCreateScript(script, scriptType, null, null); updateOrCreateScript(script, scriptType, null, null);
return this; return this;
} }
@ -347,7 +346,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
* @deprecated Use {@link #script(Script)} instead * @deprecated Use {@link #script(Script)} instead
*/ */
@Deprecated @Deprecated
public UpdateRequest script(String script, ScriptService.ScriptType scriptType, @Nullable Map<String, Object> scriptParams) { public UpdateRequest script(String script, ScriptType scriptType, @Nullable Map<String, Object> scriptParams) {
this.script = new Script(script, scriptType, null, scriptParams); this.script = new Script(script, scriptType, null, scriptParams);
return this; return this;
} }
@ -369,7 +368,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
* @deprecated Use {@link #script(Script)} instead * @deprecated Use {@link #script(Script)} instead
*/ */
@Deprecated @Deprecated
public UpdateRequest script(String script, @Nullable String scriptLang, ScriptService.ScriptType scriptType, public UpdateRequest script(String script, @Nullable String scriptLang, ScriptType scriptType,
@Nullable Map<String, Object> scriptParams) { @Nullable Map<String, Object> scriptParams) {
this.script = new Script(script, scriptType, scriptLang, scriptParams); this.script = new Script(script, scriptType, scriptLang, scriptParams);
return this; return this;

View File

@ -210,12 +210,9 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
public static class ShardSnapshotStatus { public static class ShardSnapshotStatus {
private State state; private final State state;
private String nodeId; private final String nodeId;
private String reason; private final String reason;
private ShardSnapshotStatus() {
}
public ShardSnapshotStatus(String nodeId) { public ShardSnapshotStatus(String nodeId) {
this(nodeId, State.INIT); this(nodeId, State.INIT);
@ -231,6 +228,12 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
this.reason = reason; this.reason = reason;
} }
public ShardSnapshotStatus(StreamInput in) throws IOException {
nodeId = in.readOptionalString();
state = State.fromValue(in.readByte());
reason = in.readOptionalString();
}
public State state() { public State state() {
return state; return state;
} }
@ -243,18 +246,6 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
return reason; return reason;
} }
public static ShardSnapshotStatus readShardSnapshotStatus(StreamInput in) throws IOException {
ShardSnapshotStatus shardSnapshotStatus = new ShardSnapshotStatus();
shardSnapshotStatus.readFrom(in);
return shardSnapshotStatus;
}
public void readFrom(StreamInput in) throws IOException {
nodeId = in.readOptionalString();
state = State.fromValue(in.readByte());
reason = in.readOptionalString();
}
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(nodeId); out.writeOptionalString(nodeId);
out.writeByte(state.value); out.writeByte(state.value);
@ -282,6 +273,11 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
result = 31 * result + (reason != null ? reason.hashCode() : 0); result = 31 * result + (reason != null ? reason.hashCode() : 0);
return result; return result;
} }
@Override
public String toString() {
return "ShardSnapshotStatus[state=" + state + ", nodeId=" + nodeId + ", reason=" + reason + "]";
}
} }
public enum State { public enum State {

View File

@ -26,6 +26,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -399,9 +400,7 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
public String toString() { public String toString() {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append("{"); sb.append("{");
for (DiscoveryNode node : this) { sb.append(Strings.collectionToDelimitedString(this, ","));
sb.append(node).append(',');
}
sb.append("}"); sb.append("}");
return sb.toString(); return sb.toString();
} }

View File

@ -345,6 +345,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING, UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING,
SearchService.DEFAULT_KEEPALIVE_SETTING, SearchService.DEFAULT_KEEPALIVE_SETTING,
SearchService.KEEPALIVE_INTERVAL_SETTING, SearchService.KEEPALIVE_INTERVAL_SETTING,
SearchService.LOW_LEVEL_CANCELLATION_SETTING,
Node.WRITE_PORTS_FIELD_SETTING, Node.WRITE_PORTS_FIELD_SETTING,
Node.NODE_NAME_SETTING, Node.NODE_NAME_SETTING,
Node.NODE_DATA_SETTING, Node.NODE_DATA_SETTING,

View File

@ -683,15 +683,10 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
return currentState; return currentState;
} }
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(currentState.nodes())
// make sure the old master node, which has failed, is not part of the nodes we publish
.remove(masterNode)
.masterNodeId(null).build();
// flush any pending cluster states from old master, so it will not be set as master again // flush any pending cluster states from old master, so it will not be set as master again
publishClusterState.pendingStatesQueue().failAllStatesAndClear(new ElasticsearchException("master left [{}]", reason)); publishClusterState.pendingStatesQueue().failAllStatesAndClear(new ElasticsearchException("master left [{}]", reason));
return rejoin(ClusterState.builder(currentState).nodes(discoveryNodes).build(), "master left (reason = " + reason + ")"); return rejoin(currentState, "master left (reason = " + reason + ")");
} }
@Override @Override

View File

@ -411,7 +411,11 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
} }
} finally { } finally {
try { try {
if (store != null) {
store.close(); store.close();
} else {
logger.trace("[{}] store not initialized prior to closing shard, nothing to close", shardId);
}
} catch (Exception e) { } catch (Exception e) {
logger.warn( logger.warn(
(Supplier<?>) () -> new ParameterizedMessage( (Supplier<?>) () -> new ParameterizedMessage(

View File

@ -27,7 +27,7 @@ public class IndexShardSnapshotStatus {
/** /**
* Snapshot stage * Snapshot stage
*/ */
public static enum Stage { public enum Stage {
/** /**
* Snapshot hasn't started yet * Snapshot hasn't started yet
*/ */
@ -66,7 +66,7 @@ public class IndexShardSnapshotStatus {
private long indexVersion; private long indexVersion;
private boolean aborted; private volatile boolean aborted;
private String failure; private String failure;

View File

@ -69,6 +69,7 @@ import org.elasticsearch.indices.recovery.RecoveryState;
import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.RepositoriesService;
import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchService;
import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.snapshots.RestoreService;
import org.elasticsearch.snapshots.SnapshotShardsService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException; import java.io.IOException;
@ -113,10 +114,11 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
NodeMappingRefreshAction nodeMappingRefreshAction, NodeMappingRefreshAction nodeMappingRefreshAction,
RepositoriesService repositoriesService, RestoreService restoreService, RepositoriesService repositoriesService, RestoreService restoreService,
SearchService searchService, SyncedFlushService syncedFlushService, SearchService searchService, SyncedFlushService syncedFlushService,
PeerRecoverySourceService peerRecoverySourceService) { PeerRecoverySourceService peerRecoverySourceService, SnapshotShardsService snapshotShardsService) {
this(settings, (AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>>) indicesService, this(settings, (AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>>) indicesService,
clusterService, threadPool, recoveryTargetService, shardStateAction, clusterService, threadPool, recoveryTargetService, shardStateAction,
nodeMappingRefreshAction, repositoriesService, restoreService, searchService, syncedFlushService, peerRecoverySourceService); nodeMappingRefreshAction, repositoriesService, restoreService, searchService, syncedFlushService, peerRecoverySourceService,
snapshotShardsService);
} }
// for tests // for tests
@ -128,9 +130,10 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
NodeMappingRefreshAction nodeMappingRefreshAction, NodeMappingRefreshAction nodeMappingRefreshAction,
RepositoriesService repositoriesService, RestoreService restoreService, RepositoriesService repositoriesService, RestoreService restoreService,
SearchService searchService, SyncedFlushService syncedFlushService, SearchService searchService, SyncedFlushService syncedFlushService,
PeerRecoverySourceService peerRecoverySourceService) { PeerRecoverySourceService peerRecoverySourceService, SnapshotShardsService snapshotShardsService) {
super(settings); super(settings);
this.buildInIndexListener = Arrays.asList(peerRecoverySourceService, recoveryTargetService, searchService, syncedFlushService); this.buildInIndexListener = Arrays.asList(peerRecoverySourceService, recoveryTargetService, searchService, syncedFlushService,
snapshotShardsService);
this.indicesService = indicesService; this.indicesService = indicesService;
this.clusterService = clusterService; this.clusterService = clusterService;
this.threadPool = threadPool; this.threadPool = threadPool;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import java.util.Collections; import java.util.Collections;
import java.util.Map; import java.util.Map;
@ -42,7 +43,7 @@ public class InternalTemplateService implements TemplateService {
int mustacheStart = template.indexOf("{{"); int mustacheStart = template.indexOf("{{");
int mustacheEnd = template.indexOf("}}"); int mustacheEnd = template.indexOf("}}");
if (mustacheStart != -1 && mustacheEnd != -1 && mustacheStart < mustacheEnd) { if (mustacheStart != -1 && mustacheEnd != -1 && mustacheStart < mustacheEnd) {
Script script = new Script(template, ScriptService.ScriptType.INLINE, "mustache", Collections.emptyMap()); Script script = new Script(template, ScriptType.INLINE, "mustache", Collections.emptyMap());
CompiledScript compiledScript = scriptService.compile( CompiledScript compiledScript = scriptService.compile(
script, script,
ScriptContext.Standard.INGEST, ScriptContext.Standard.INGEST,

View File

@ -399,11 +399,11 @@ public class OsProbe {
* group to which the Elasticsearch process belongs for the * group to which the Elasticsearch process belongs for the
* {@code cpu} subsystem. These lines represent the CPU time * {@code cpu} subsystem. These lines represent the CPU time
* statistics and have the form * statistics and have the form
* <p> * <blockquote><pre>
* nr_periods \d+ * nr_periods \d+
* nr_throttled \d+ * nr_throttled \d+
* throttled_time \d+ * throttled_time \d+
* <p> * </pre></blockquote>
* where {@code nr_periods} is the number of period intervals * where {@code nr_periods} is the number of period intervals
* as specified by {@code cpu.cfs_period_us} that have elapsed, * as specified by {@code cpu.cfs_period_us} that have elapsed,
* {@code nr_throttled} is the number of times tasks in the given * {@code nr_throttled} is the number of times tasks in the given

View File

@ -60,9 +60,6 @@ public class RestMainAction extends BaseRestHandler {
static BytesRestResponse convertMainResponse(MainResponse response, RestRequest request, XContentBuilder builder) throws IOException { static BytesRestResponse convertMainResponse(MainResponse response, RestRequest request, XContentBuilder builder) throws IOException {
RestStatus status = response.isAvailable() ? RestStatus.OK : RestStatus.SERVICE_UNAVAILABLE; RestStatus status = response.isAvailable() ? RestStatus.OK : RestStatus.SERVICE_UNAVAILABLE;
if (request.method() == RestRequest.Method.HEAD) {
return new BytesRestResponse(status, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY);
}
// Default to pretty printing, but allow ?pretty=false to disable // Default to pretty printing, but allow ?pretty=false to disable
if (request.hasParam("pretty") == false) { if (request.hasParam("pretty") == false) {

View File

@ -22,13 +22,11 @@ import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
@ -67,42 +65,14 @@ public class RestAnalyzeAction extends BaseRestHandler {
@Override @Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
String[] texts = request.paramAsStringArrayOrEmptyIfAll("text");
AnalyzeRequest analyzeRequest = new AnalyzeRequest(request.param("index")); AnalyzeRequest analyzeRequest = new AnalyzeRequest(request.param("index"));
analyzeRequest.text(texts);
analyzeRequest.analyzer(request.param("analyzer"));
analyzeRequest.field(request.param("field"));
final String tokenizer = request.param("tokenizer");
if (tokenizer != null) {
analyzeRequest.tokenizer(tokenizer);
}
for (String filter : request.paramAsStringArray("filter", Strings.EMPTY_ARRAY)) {
analyzeRequest.addTokenFilter(filter);
}
for (String charFilter : request.paramAsStringArray("char_filter", Strings.EMPTY_ARRAY)) {
analyzeRequest.addTokenFilter(charFilter);
}
analyzeRequest.explain(request.paramAsBoolean("explain", false));
analyzeRequest.attributes(request.paramAsStringArray("attributes", analyzeRequest.attributes()));
if (RestActions.hasBodyContent(request)) {
XContentType type = RestActions.guessBodyContentType(request);
if (type == null) {
if (texts == null || texts.length == 0) {
texts = new String[]{ RestActions.getRestContent(request).utf8ToString() };
analyzeRequest.text(texts);
}
} else {
// NOTE: if rest request with xcontent body has request parameters, the parameters does not override xcontent values
buildFromContent(RestActions.getRestContent(request), analyzeRequest, parseFieldMatcher); buildFromContent(RestActions.getRestContent(request), analyzeRequest, parseFieldMatcher);
}
}
return channel -> client.admin().indices().analyze(analyzeRequest, new RestToXContentListener<>(channel)); return channel -> client.admin().indices().analyze(analyzeRequest, new RestToXContentListener<>(channel));
} }
public static void buildFromContent(BytesReference content, AnalyzeRequest analyzeRequest, ParseFieldMatcher parseFieldMatcher) { static void buildFromContent(BytesReference content, AnalyzeRequest analyzeRequest, ParseFieldMatcher parseFieldMatcher) {
try (XContentParser parser = XContentHelper.createParser(content)) { try (XContentParser parser = XContentHelper.createParser(content)) {
if (parser.nextToken() != XContentParser.Token.START_OBJECT) { if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("Malformed content, must start with an object"); throw new IllegalArgumentException("Malformed content, must start with an object");

View File

@ -24,7 +24,7 @@ package org.elasticsearch.script;
*/ */
public class CompiledScript { public class CompiledScript {
private final ScriptService.ScriptType type; private final ScriptType type;
private final String name; private final String name;
private final String lang; private final String lang;
private final Object compiled; private final Object compiled;
@ -36,7 +36,7 @@ public class CompiledScript {
* @param lang The language of the script to be executed. * @param lang The language of the script to be executed.
* @param compiled The compiled script Object that is executable. * @param compiled The compiled script Object that is executable.
*/ */
public CompiledScript(ScriptService.ScriptType type, String name, String lang, Object compiled) { public CompiledScript(ScriptType type, String name, String lang, Object compiled) {
this.type = type; this.type = type;
this.name = name; this.name = name;
this.lang = lang; this.lang = lang;
@ -47,7 +47,7 @@ public class CompiledScript {
* Method to get the type of language. * Method to get the type of language.
* @return The type of language the script was compiled in. * @return The type of language the script was compiled in.
*/ */
public ScriptService.ScriptType type() { public ScriptType type() {
return type; return type;
} }

View File

@ -34,7 +34,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.ScriptService.ScriptType;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;

View File

@ -19,8 +19,6 @@
package org.elasticsearch.script; package org.elasticsearch.script;
import org.elasticsearch.common.settings.Settings;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -81,8 +79,8 @@ public final class ScriptContextRegistry {
} }
private static Set<String> reservedScriptContexts() { private static Set<String> reservedScriptContexts() {
Set<String> reserved = new HashSet<>(ScriptService.ScriptType.values().length + ScriptContext.Standard.values().length); Set<String> reserved = new HashSet<>(ScriptType.values().length + ScriptContext.Standard.values().length);
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { for (ScriptType scriptType : ScriptType.values()) {
reserved.add(scriptType.toString()); reserved.add(scriptType.toString());
} }
for (ScriptContext.Standard scriptContext : ScriptContext.Standard.values()) { for (ScriptContext.Standard scriptContext : ScriptContext.Standard.values()) {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.script;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptService.ScriptType;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;

View File

@ -35,7 +35,6 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.CircuitBreakingException;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
@ -46,8 +45,6 @@ import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
@ -70,7 +67,6 @@ import java.nio.file.Path;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
@ -632,68 +628,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
} }
/**
* The type of a script, more specifically where it gets loaded from:
* - provided dynamically at request time
* - loaded from an index
* - loaded from file
*/
public enum ScriptType {
INLINE(0, "inline", "inline", false),
STORED(1, "id", "stored", false),
FILE(2, "file", "file", true);
private final int val;
private final ParseField parseField;
private final String scriptType;
private final boolean defaultScriptEnabled;
public static ScriptType readFrom(StreamInput in) throws IOException {
int scriptTypeVal = in.readVInt();
for (ScriptType type : values()) {
if (type.val == scriptTypeVal) {
return type;
}
}
throw new IllegalArgumentException("Unexpected value read for ScriptType got [" + scriptTypeVal + "] expected one of ["
+ INLINE.val + "," + FILE.val + "," + STORED.val + "]");
}
public static void writeTo(ScriptType scriptType, StreamOutput out) throws IOException{
if (scriptType != null) {
out.writeVInt(scriptType.val);
} else {
out.writeVInt(INLINE.val); //Default to inline
}
}
ScriptType(int val, String name, String scriptType, boolean defaultScriptEnabled) {
this.val = val;
this.parseField = new ParseField(name);
this.scriptType = scriptType;
this.defaultScriptEnabled = defaultScriptEnabled;
}
public ParseField getParseField() {
return parseField;
}
public boolean getDefaultScriptEnabled() {
return defaultScriptEnabled;
}
public String getScriptType() {
return scriptType;
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}
private static final class CacheKey { private static final class CacheKey {
final String lang; final String lang;
final String name; final String name;

View File

@ -43,11 +43,11 @@ public class ScriptSettings {
@Deprecated @Deprecated
public static final String LEGACY_SCRIPT_SETTING = "script.legacy.default_lang"; public static final String LEGACY_SCRIPT_SETTING = "script.legacy.default_lang";
private static final Map<ScriptService.ScriptType, Setting<Boolean>> SCRIPT_TYPE_SETTING_MAP; private static final Map<ScriptType, Setting<Boolean>> SCRIPT_TYPE_SETTING_MAP;
static { static {
Map<ScriptService.ScriptType, Setting<Boolean>> scriptTypeSettingMap = new HashMap<>(); Map<ScriptType, Setting<Boolean>> scriptTypeSettingMap = new HashMap<>();
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { for (ScriptType scriptType : ScriptType.values()) {
scriptTypeSettingMap.put(scriptType, Setting.boolSetting( scriptTypeSettingMap.put(scriptType, Setting.boolSetting(
ScriptModes.sourceKey(scriptType), ScriptModes.sourceKey(scriptType),
scriptType.getDefaultScriptEnabled(), scriptType.getDefaultScriptEnabled(),
@ -84,7 +84,7 @@ public class ScriptSettings {
return scriptContextSettingMap; return scriptContextSettingMap;
} }
private static List<Setting<Boolean>> languageSettings(Map<ScriptService.ScriptType, Setting<Boolean>> scriptTypeSettingMap, private static List<Setting<Boolean>> languageSettings(Map<ScriptType, Setting<Boolean>> scriptTypeSettingMap,
Map<ScriptContext, Setting<Boolean>> scriptContextSettingMap, Map<ScriptContext, Setting<Boolean>> scriptContextSettingMap,
ScriptEngineRegistry scriptEngineRegistry, ScriptEngineRegistry scriptEngineRegistry,
ScriptContextRegistry scriptContextRegistry) { ScriptContextRegistry scriptContextRegistry) {
@ -96,13 +96,13 @@ public class ScriptSettings {
continue; continue;
} }
final String language = scriptEngineRegistry.getLanguage(scriptEngineService); final String language = scriptEngineRegistry.getLanguage(scriptEngineService);
for (final ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { for (final ScriptType scriptType : ScriptType.values()) {
// Top level, like "script.engine.groovy.inline" // Top level, like "script.engine.groovy.inline"
final boolean defaultNonFileScriptMode = scriptEngineRegistry.getDefaultInlineScriptEnableds().get(language); final boolean defaultNonFileScriptMode = scriptEngineRegistry.getDefaultInlineScriptEnableds().get(language);
boolean defaultLangAndType = defaultNonFileScriptMode; boolean defaultLangAndType = defaultNonFileScriptMode;
// Files are treated differently because they are never default-deny // Files are treated differently because they are never default-deny
if (ScriptService.ScriptType.FILE == scriptType) { if (ScriptType.FILE == scriptType) {
defaultLangAndType = ScriptService.ScriptType.FILE.getDefaultScriptEnabled(); defaultLangAndType = ScriptType.FILE.getDefaultScriptEnabled();
} }
final boolean defaultIfNothingSet = defaultLangAndType; final boolean defaultIfNothingSet = defaultLangAndType;

View File

@ -0,0 +1,89 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.Locale;
/**
* The type of a script, more specifically where it gets loaded from:
* - provided dynamically at request time
* - loaded from an index
* - loaded from file
*/
public enum ScriptType {
INLINE(0, "inline", "inline", false),
STORED(1, "id", "stored", false),
FILE(2, "file", "file", true);
private final int val;
private final ParseField parseField;
private final String scriptType;
private final boolean defaultScriptEnabled;
public static ScriptType readFrom(StreamInput in) throws IOException {
int scriptTypeVal = in.readVInt();
for (ScriptType type : values()) {
if (type.val == scriptTypeVal) {
return type;
}
}
throw new IllegalArgumentException("Unexpected value read for ScriptType got [" + scriptTypeVal + "] expected one of ["
+ INLINE.val + "," + FILE.val + "," + STORED.val + "]");
}
public static void writeTo(ScriptType scriptType, StreamOutput out) throws IOException{
if (scriptType != null) {
out.writeVInt(scriptType.val);
} else {
out.writeVInt(INLINE.val); //Default to inline
}
}
ScriptType(int val, String name, String scriptType, boolean defaultScriptEnabled) {
this.val = val;
this.parseField = new ParseField(name);
this.scriptType = scriptType;
this.defaultScriptEnabled = defaultScriptEnabled;
}
public ParseField getParseField() {
return parseField;
}
public boolean getDefaultScriptEnabled() {
return defaultScriptEnabled;
}
public String getScriptType() {
return scriptType;
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}

View File

@ -27,6 +27,7 @@ import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
@ -113,8 +114,11 @@ final class DefaultSearchContext extends SearchContext {
private Float minimumScore; private Float minimumScore;
private boolean trackScores = false; // when sorting, track scores as well... private boolean trackScores = false; // when sorting, track scores as well...
private FieldDoc searchAfter; private FieldDoc searchAfter;
private boolean lowLevelCancellation;
// filter for sliced scroll // filter for sliced scroll
private SliceBuilder sliceBuilder; private SliceBuilder sliceBuilder;
private SearchTask task;
/** /**
* The original query as sent by the user without the types and aliases * The original query as sent by the user without the types and aliases
@ -571,6 +575,15 @@ final class DefaultSearchContext extends SearchContext {
return this; return this;
} }
@Override
public boolean lowLevelCancellation() {
return lowLevelCancellation;
}
public void lowLevelCancellation(boolean lowLevelCancellation) {
this.lowLevelCancellation = lowLevelCancellation;
}
@Override @Override
public FieldDoc searchAfter() { public FieldDoc searchAfter() {
return searchAfter; return searchAfter;
@ -792,4 +805,19 @@ final class DefaultSearchContext extends SearchContext {
public void setProfilers(Profilers profilers) { public void setProfilers(Profilers profilers) {
this.profilers = profilers; this.profilers = profilers;
} }
@Override
public void setTask(SearchTask task) {
this.task = task;
}
@Override
public SearchTask getTask() {
return task;
}
@Override
public boolean isCancelled() {
return task.isCancelled();
}
} }

View File

@ -26,6 +26,7 @@ import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
@ -84,6 +85,7 @@ import org.elasticsearch.search.sort.SortAndFormats;
import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPool.Cancellable; import org.elasticsearch.threadpool.ThreadPool.Cancellable;
import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.threadpool.ThreadPool.Names;
@ -107,6 +109,13 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), Property.NodeScope); Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), Property.NodeScope);
public static final Setting<TimeValue> KEEPALIVE_INTERVAL_SETTING = public static final Setting<TimeValue> KEEPALIVE_INTERVAL_SETTING =
Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), Property.NodeScope); Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), Property.NodeScope);
/**
* Enables low-level, frequent search cancellation checks. Enabling low-level checks will make long running searches to react
* to the cancellation request faster. However, since it will produce more cancellation checks it might slow the search performance
* down.
*/
public static final Setting<Boolean> LOW_LEVEL_CANCELLATION_SETTING =
Setting.boolSetting("search.low_level_cancellation", false, Property.Dynamic, Property.NodeScope);
public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); public static final TimeValue NO_TIMEOUT = timeValueMillis(-1);
public static final Setting<TimeValue> DEFAULT_SEARCH_TIMEOUT_SETTING = public static final Setting<TimeValue> DEFAULT_SEARCH_TIMEOUT_SETTING =
@ -133,6 +142,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
private volatile TimeValue defaultSearchTimeout; private volatile TimeValue defaultSearchTimeout;
private volatile boolean lowLevelCancellation;
private final Cancellable keepAliveReaper; private final Cancellable keepAliveReaper;
private final AtomicLong idGenerator = new AtomicLong(); private final AtomicLong idGenerator = new AtomicLong();
@ -160,12 +171,19 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings); defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings);
clusterService.getClusterSettings().addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout); clusterService.getClusterSettings().addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout);
lowLevelCancellation = LOW_LEVEL_CANCELLATION_SETTING.get(settings);
clusterService.getClusterSettings().addSettingsUpdateConsumer(LOW_LEVEL_CANCELLATION_SETTING, this::setLowLevelCancellation);
} }
private void setDefaultSearchTimeout(TimeValue defaultSearchTimeout) { private void setDefaultSearchTimeout(TimeValue defaultSearchTimeout) {
this.defaultSearchTimeout = defaultSearchTimeout; this.defaultSearchTimeout = defaultSearchTimeout;
} }
private void setLowLevelCancellation(Boolean lowLevelCancellation) {
this.lowLevelCancellation = lowLevelCancellation;
}
@Override @Override
public void afterIndexClosed(Index index, Settings indexSettings) { public void afterIndexClosed(Index index, Settings indexSettings) {
// once an index is closed we can just clean up all the pending search context information // once an index is closed we can just clean up all the pending search context information
@ -212,10 +230,11 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
keepAliveReaper.cancel(); keepAliveReaper.cancel();
} }
public DfsSearchResult executeDfsPhase(ShardSearchRequest request) throws IOException { public DfsSearchResult executeDfsPhase(ShardSearchRequest request, SearchTask task) throws IOException {
final SearchContext context = createAndPutContext(request); final SearchContext context = createAndPutContext(request);
context.incRef(); context.incRef();
try { try {
context.setTask(task);
contextProcessing(context); contextProcessing(context);
dfsPhase.execute(context); dfsPhase.execute(context);
contextProcessedSuccessfully(context); contextProcessedSuccessfully(context);
@ -242,11 +261,12 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
} }
public QuerySearchResultProvider executeQueryPhase(ShardSearchRequest request) throws IOException { public QuerySearchResultProvider executeQueryPhase(ShardSearchRequest request, SearchTask task) throws IOException {
final SearchContext context = createAndPutContext(request); final SearchContext context = createAndPutContext(request);
final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
context.incRef(); context.incRef();
try { try {
context.setTask(task);
operationListener.onPreQueryPhase(context); operationListener.onPreQueryPhase(context);
long time = System.nanoTime(); long time = System.nanoTime();
contextProcessing(context); contextProcessing(context);
@ -276,11 +296,12 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
} }
public ScrollQuerySearchResult executeQueryPhase(InternalScrollSearchRequest request) { public ScrollQuerySearchResult executeQueryPhase(InternalScrollSearchRequest request, SearchTask task) {
final SearchContext context = findContext(request.id()); final SearchContext context = findContext(request.id());
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
context.incRef(); context.incRef();
try { try {
context.setTask(task);
operationListener.onPreQueryPhase(context); operationListener.onPreQueryPhase(context);
long time = System.nanoTime(); long time = System.nanoTime();
contextProcessing(context); contextProcessing(context);
@ -299,8 +320,9 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
} }
public QuerySearchResult executeQueryPhase(QuerySearchRequest request) { public QuerySearchResult executeQueryPhase(QuerySearchRequest request, SearchTask task) {
final SearchContext context = findContext(request.id()); final SearchContext context = findContext(request.id());
context.setTask(task);
IndexShard indexShard = context.indexShard(); IndexShard indexShard = context.indexShard();
SearchOperationListener operationListener = indexShard.getSearchOperationListener(); SearchOperationListener operationListener = indexShard.getSearchOperationListener();
context.incRef(); context.incRef();
@ -339,11 +361,12 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
} }
public QueryFetchSearchResult executeFetchPhase(ShardSearchRequest request) throws IOException { public QueryFetchSearchResult executeFetchPhase(ShardSearchRequest request, SearchTask task) throws IOException {
final SearchContext context = createAndPutContext(request); final SearchContext context = createAndPutContext(request);
context.incRef(); context.incRef();
try { try {
contextProcessing(context); contextProcessing(context);
context.setTask(task);
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
operationListener.onPreQueryPhase(context); operationListener.onPreQueryPhase(context);
long time = System.nanoTime(); long time = System.nanoTime();
@ -379,10 +402,11 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
} }
public QueryFetchSearchResult executeFetchPhase(QuerySearchRequest request) { public QueryFetchSearchResult executeFetchPhase(QuerySearchRequest request, SearchTask task) {
final SearchContext context = findContext(request.id()); final SearchContext context = findContext(request.id());
context.incRef(); context.incRef();
try { try {
context.setTask(task);
contextProcessing(context); contextProcessing(context);
context.searcher().setAggregatedDfs(request.dfs()); context.searcher().setAggregatedDfs(request.dfs());
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
@ -420,10 +444,11 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
} }
public ScrollQueryFetchSearchResult executeFetchPhase(InternalScrollSearchRequest request) { public ScrollQueryFetchSearchResult executeFetchPhase(InternalScrollSearchRequest request, SearchTask task) {
final SearchContext context = findContext(request.id()); final SearchContext context = findContext(request.id());
context.incRef(); context.incRef();
try { try {
context.setTask(task);
contextProcessing(context); contextProcessing(context);
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
processScroll(request, context); processScroll(request, context);
@ -462,11 +487,12 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
} }
public FetchSearchResult executeFetchPhase(ShardFetchRequest request) { public FetchSearchResult executeFetchPhase(ShardFetchRequest request, SearchTask task) {
final SearchContext context = findContext(request.id()); final SearchContext context = findContext(request.id());
final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
context.incRef(); context.incRef();
try { try {
context.setTask(task);
contextProcessing(context); contextProcessing(context);
if (request.lastEmittedDoc() != null) { if (request.lastEmittedDoc() != null) {
context.scrollContext().lastEmittedDoc = request.lastEmittedDoc(); context.scrollContext().lastEmittedDoc = request.lastEmittedDoc();
@ -546,6 +572,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
keepAlive = request.scroll().keepAlive().millis(); keepAlive = request.scroll().keepAlive().millis();
} }
context.keepAlive(keepAlive); context.keepAlive(keepAlive);
context.lowLevelCancellation(lowLevelCancellation);
} catch (Exception e) { } catch (Exception e) {
context.close(); context.close();
throw ExceptionsHelper.convertToRuntime(e); throw ExceptionsHelper.convertToRuntime(e);
@ -627,6 +654,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
private void cleanContext(SearchContext context) { private void cleanContext(SearchContext context) {
try { try {
context.clearReleasables(Lifetime.PHASE); context.clearReleasables(Lifetime.PHASE);
context.setTask(null);
} finally { } finally {
context.decRef(); context.decRef();
} }

View File

@ -28,9 +28,11 @@ import org.apache.lucene.index.TermContext;
import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TermStatistics;
import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.search.SearchContextException;
import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.tasks.TaskCancelledException;
import java.util.AbstractSet; import java.util.AbstractSet;
import java.util.Collection; import java.util.Collection;
@ -59,6 +61,9 @@ public class DfsPhase implements SearchPhase {
TermStatistics[] termStatistics = new TermStatistics[terms.length]; TermStatistics[] termStatistics = new TermStatistics[terms.length];
IndexReaderContext indexReaderContext = context.searcher().getTopReaderContext(); IndexReaderContext indexReaderContext = context.searcher().getTopReaderContext();
for (int i = 0; i < terms.length; i++) { for (int i = 0; i < terms.length; i++) {
if(context.isCancelled()) {
throw new TaskCancelledException("cancelled");
}
// LUCENE 4 UPGRADE: cache TermContext? // LUCENE 4 UPGRADE: cache TermContext?
TermContext termContext = TermContext.build(indexReaderContext, terms[i]); TermContext termContext = TermContext.build(indexReaderContext, terms[i]);
termStatistics[i] = context.searcher().termStatistics(terms[i], termContext); termStatistics[i] = context.searcher().termStatistics(terms[i], termContext);
@ -70,6 +75,9 @@ public class DfsPhase implements SearchPhase {
if (!fieldStatistics.containsKey(term.field())) { if (!fieldStatistics.containsKey(term.field())) {
final CollectionStatistics collectionStatistics = context.searcher().collectionStatistics(term.field()); final CollectionStatistics collectionStatistics = context.searcher().collectionStatistics(term.field());
fieldStatistics.put(term.field(), collectionStatistics); fieldStatistics.put(term.field(), collectionStatistics);
if(context.isCancelled()) {
throw new TaskCancelledException("cancelled");
}
} }
} }

View File

@ -31,6 +31,10 @@ public class DfsPhaseExecutionException extends SearchContextException {
super(context, "Dfs Failed [" + msg + "]", t); super(context, "Dfs Failed [" + msg + "]", t);
} }
public DfsPhaseExecutionException(SearchContext context, String msg) {
super(context, "Dfs Failed [" + msg + "]");
}
public DfsPhaseExecutionException(StreamInput in) throws IOException { public DfsPhaseExecutionException(StreamInput in) throws IOException {
super(in); super(in);
} }

View File

@ -51,6 +51,7 @@ import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.tasks.TaskCancelledException;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -136,6 +137,9 @@ public class FetchPhase implements SearchPhase {
InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()]; InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()];
FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
for (int index = 0; index < context.docIdsToLoadSize(); index++) { for (int index = 0; index < context.docIdsToLoadSize(); index++) {
if(context.isCancelled()) {
throw new TaskCancelledException("cancelled");
}
int docId = context.docIdsToLoad()[context.docIdsToLoadFrom() + index]; int docId = context.docIdsToLoad()[context.docIdsToLoadFrom() + index];
int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves()); int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves());
LeafReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); LeafReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);

View File

@ -31,6 +31,10 @@ public class FetchPhaseExecutionException extends SearchContextException {
super(context, "Fetch Failed [" + msg + "]", t); super(context, "Fetch Failed [" + msg + "]", t);
} }
public FetchPhaseExecutionException(SearchContext context, String msg) {
super(context, "Fetch Failed [" + msg + "]");
}
public FetchPhaseExecutionException(StreamInput in) throws IOException { public FetchPhaseExecutionException(StreamInput in) throws IOException {
super(in); super(in);
} }

View File

@ -22,9 +22,12 @@ package org.elasticsearch.search.fetch;
import com.carrotsearch.hppc.IntArrayList; import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequest;
import java.io.IOException; import java.io.IOException;
@ -106,4 +109,9 @@ public class ShardFetchRequest extends TransportRequest {
Lucene.writeScoreDoc(out, lastEmittedDoc); Lucene.writeScoreDoc(out, lastEmittedDoc);
} }
} }
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new SearchTask(id, type, action, getDescription(), parentTaskId);
}
} }

View File

@ -23,6 +23,7 @@ import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
@ -293,6 +294,11 @@ public abstract class FilteredSearchContext extends SearchContext {
in.terminateAfter(terminateAfter); in.terminateAfter(terminateAfter);
} }
@Override
public boolean lowLevelCancellation() {
return in.lowLevelCancellation();
}
@Override @Override
public SearchContext minimumScore(float minimumScore) { public SearchContext minimumScore(float minimumScore) {
return in.minimumScore(minimumScore); return in.minimumScore(minimumScore);
@ -516,4 +522,19 @@ public abstract class FilteredSearchContext extends SearchContext {
public QueryShardContext getQueryShardContext() { public QueryShardContext getQueryShardContext() {
return in.getQueryShardContext(); return in.getQueryShardContext();
} }
@Override
public void setTask(SearchTask task) {
in.setTask(task);
}
@Override
public SearchTask getTask() {
return in.getTask();
}
@Override
public boolean isCancelled() {
return in.isCancelled();
}
} }

View File

@ -20,9 +20,12 @@
package org.elasticsearch.search.internal; package org.elasticsearch.search.internal;
import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.Scroll; import org.elasticsearch.search.Scroll;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequest;
import java.io.IOException; import java.io.IOException;
@ -67,4 +70,9 @@ public class InternalScrollSearchRequest extends TransportRequest {
out.writeLong(id); out.writeLong(id);
out.writeOptionalWriteable(scroll); out.writeOptionalWriteable(scroll);
} }
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new SearchTask(id, type, action, getDescription(), parentTaskId);
}
} }

View File

@ -23,6 +23,7 @@ import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
@ -96,6 +97,12 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
return parseFieldMatcher; return parseFieldMatcher;
} }
public abstract void setTask(SearchTask task);
public abstract SearchTask getTask();
public abstract boolean isCancelled();
@Override @Override
public final void close() { public final void close() {
if (closed.compareAndSet(false, true)) { // prevent double closing if (closed.compareAndSet(false, true)) { // prevent double closing
@ -220,6 +227,14 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
public abstract void terminateAfter(int terminateAfter); public abstract void terminateAfter(int terminateAfter);
/**
* Indicates if the current index should perform frequent low level search cancellation check.
*
* Enabling low-level checks will make long running searches to react to the cancellation request faster. However,
* since it will produce more cancellation checks it might slow the search performance down.
*/
public abstract boolean lowLevelCancellation();
public abstract SearchContext minimumScore(float minimumScore); public abstract SearchContext minimumScore(float minimumScore);
public abstract Float minimumScore(); public abstract Float minimumScore();

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.internal;
import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
@ -33,6 +34,8 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.Scroll; import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequest;
import java.io.IOException; import java.io.IOException;
@ -158,4 +161,9 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha
public void rewrite(QueryShardContext context) throws IOException { public void rewrite(QueryShardContext context) throws IOException {
shardSearchLocalRequest.rewrite(context); shardSearchLocalRequest.rewrite(context);
} }
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new SearchTask(id, type, action, getDescription(), parentTaskId);
}
} }

View File

@ -45,6 +45,7 @@ public class CollectorResult implements ToXContent, Writeable {
public static final String REASON_SEARCH_MIN_SCORE = "search_min_score"; public static final String REASON_SEARCH_MIN_SCORE = "search_min_score";
public static final String REASON_SEARCH_MULTI = "search_multi"; public static final String REASON_SEARCH_MULTI = "search_multi";
public static final String REASON_SEARCH_TIMEOUT = "search_timeout"; public static final String REASON_SEARCH_TIMEOUT = "search_timeout";
public static final String REASON_SEARCH_CANCELLED = "search_cancelled";
public static final String REASON_AGGREGATION = "aggregation"; public static final String REASON_AGGREGATION = "aggregation";
public static final String REASON_AGGREGATION_GLOBAL = "aggregation_global"; public static final String REASON_AGGREGATION_GLOBAL = "aggregation_global";

View File

@ -0,0 +1,78 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.query;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FilterCollector;
import org.apache.lucene.search.FilterLeafCollector;
import org.apache.lucene.search.LeafCollector;
import org.elasticsearch.common.inject.Provider;
import org.elasticsearch.tasks.TaskCancelledException;
import java.io.IOException;
/**
* Collector that checks if the task it is executed under is cancelled.
*/
public class CancellableCollector extends FilterCollector {
private final Provider<Boolean> cancelled;
private final boolean leafLevel;
/**
* Constructor
* @param cancelled supplier of the cancellation flag, the supplier will be called for each segment if lowLevelCancellation is set
* to false and for each collected record if lowLevelCancellation is set to true. In other words this class assumes
* that the supplier is fast, with performance on the order of a volatile read.
* @param lowLevelCancellation true if collector should check for cancellation for each collected record, false if check should be
* performed only once per segment
* @param in wrapped collector
*/
public CancellableCollector(Provider<Boolean> cancelled, boolean lowLevelCancellation, Collector in) {
super(in);
this.cancelled = cancelled;
this.leafLevel = lowLevelCancellation;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
if (cancelled.get()) {
throw new TaskCancelledException("cancelled");
}
if (leafLevel) {
return new CancellableLeafCollector(super.getLeafCollector(context));
} else {
return super.getLeafCollector(context);
}
}
private class CancellableLeafCollector extends FilterLeafCollector {
private CancellableLeafCollector(LeafCollector in) {
super(in);
}
@Override
public void collect(int doc) throws IOException {
if (cancelled.get()) {
throw new TaskCancelledException("cancelled");
}
super.collect(doc);
}
}
}

View File

@ -362,6 +362,15 @@ public class QueryPhase implements SearchPhase {
} }
} }
if (collector != null) {
final Collector child = collector;
collector = new CancellableCollector(searchContext.getTask()::isCancelled, searchContext.lowLevelCancellation(), collector);
if (doProfile) {
collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_CANCELLED,
Collections.singletonList((InternalProfileCollector) child));
}
}
try { try {
if (collector != null) { if (collector != null) {
if (doProfile) { if (doProfile) {

View File

@ -22,10 +22,13 @@ package org.elasticsearch.search.query;
import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.AggregatedDfs;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequest;
import java.io.IOException; import java.io.IOException;
@ -82,4 +85,9 @@ public class QuerySearchRequest extends TransportRequest implements IndicesReque
dfs.writeTo(out); dfs.writeTo(out);
OriginalIndices.writeOriginalIndices(originalIndices, out); OriginalIndices.writeOriginalIndices(originalIndices, out);
} }
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new SearchTask(id, type, action, getDescription(), parentTaskId);
}
} }

View File

@ -39,11 +39,10 @@ import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
@ -394,7 +393,7 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
* Sets a query used for filtering out suggested phrases (collation). * Sets a query used for filtering out suggested phrases (collation).
*/ */
public PhraseSuggestionBuilder collateQuery(String collateQuery) { public PhraseSuggestionBuilder collateQuery(String collateQuery) {
this.collateQuery = new Script(collateQuery, ScriptService.ScriptType.INLINE, "mustache", Collections.emptyMap()); this.collateQuery = new Script(collateQuery, ScriptType.INLINE, "mustache", Collections.emptyMap());
return this; return this;
} }

View File

@ -29,8 +29,10 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.cluster.SnapshotsInProgress.ShardSnapshotStatus;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -42,11 +44,13 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.engine.SnapshotFailedEngineException; import org.elasticsearch.index.engine.SnapshotFailedEngineException;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException; import org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException;
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus.Stage;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.IndexId;
import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.Repository;
@ -80,7 +84,7 @@ import static org.elasticsearch.cluster.SnapshotsInProgress.completed;
* This service runs on data and master nodes and controls currently snapshotted shards on these nodes. It is responsible for * This service runs on data and master nodes and controls currently snapshotted shards on these nodes. It is responsible for
* starting and stopping shard level snapshots * starting and stopping shard level snapshots
*/ */
public class SnapshotShardsService extends AbstractLifecycleComponent implements ClusterStateListener { public class SnapshotShardsService extends AbstractLifecycleComponent implements ClusterStateListener, IndexEventListener {
public static final String UPDATE_SNAPSHOT_ACTION_NAME = "internal:cluster/snapshot/update_snapshot"; public static final String UPDATE_SNAPSHOT_ACTION_NAME = "internal:cluster/snapshot/update_snapshot";
@ -156,11 +160,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
SnapshotsInProgress prev = event.previousState().custom(SnapshotsInProgress.TYPE); SnapshotsInProgress prev = event.previousState().custom(SnapshotsInProgress.TYPE);
SnapshotsInProgress curr = event.state().custom(SnapshotsInProgress.TYPE); SnapshotsInProgress curr = event.state().custom(SnapshotsInProgress.TYPE);
if (prev == null) { if ((prev == null && curr != null) || (prev != null && prev.equals(curr) == false)) {
if (curr != null) {
processIndexShardSnapshots(event);
}
} else if (prev.equals(curr) == false) {
processIndexShardSnapshots(event); processIndexShardSnapshots(event);
} }
String masterNodeId = event.state().nodes().getMasterNodeId(); String masterNodeId = event.state().nodes().getMasterNodeId();
@ -173,6 +173,18 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
} }
} }
@Override
public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) {
// abort any snapshots occurring on the soon-to-be closed shard
Map<Snapshot, SnapshotShards> snapshotShardsMap = shardSnapshots;
for (Map.Entry<Snapshot, SnapshotShards> snapshotShards : snapshotShardsMap.entrySet()) {
Map<ShardId, IndexShardSnapshotStatus> shards = snapshotShards.getValue().shards;
if (shards.containsKey(shardId)) {
logger.debug("[{}] shard closing, abort snapshotting for snapshot [{}]", shardId, snapshotShards.getKey().getSnapshotId());
shards.get(shardId).abort();
}
}
}
/** /**
* Returns status of shards that are snapshotted on the node and belong to the given snapshot * Returns status of shards that are snapshotted on the node and belong to the given snapshot
@ -205,6 +217,16 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
final Snapshot snapshot = entry.getKey(); final Snapshot snapshot = entry.getKey();
if (snapshotsInProgress != null && snapshotsInProgress.snapshot(snapshot) != null) { if (snapshotsInProgress != null && snapshotsInProgress.snapshot(snapshot) != null) {
survivors.put(entry.getKey(), entry.getValue()); survivors.put(entry.getKey(), entry.getValue());
} else {
// abort any running snapshots of shards for the removed entry;
// this could happen if for some reason the cluster state update for aborting
// running shards is missed, then the snapshot is removed is a subsequent cluster
// state update, which is being processed here
for (IndexShardSnapshotStatus snapshotStatus : entry.getValue().shards.values()) {
if (snapshotStatus.stage() == Stage.INIT || snapshotStatus.stage() == Stage.STARTED) {
snapshotStatus.abort();
}
}
} }
} }
@ -221,7 +243,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
if (entry.state() == SnapshotsInProgress.State.STARTED) { if (entry.state() == SnapshotsInProgress.State.STARTED) {
Map<ShardId, IndexShardSnapshotStatus> startedShards = new HashMap<>(); Map<ShardId, IndexShardSnapshotStatus> startedShards = new HashMap<>();
SnapshotShards snapshotShards = shardSnapshots.get(entry.snapshot()); SnapshotShards snapshotShards = shardSnapshots.get(entry.snapshot());
for (ObjectObjectCursor<ShardId, SnapshotsInProgress.ShardSnapshotStatus> shard : entry.shards()) { for (ObjectObjectCursor<ShardId, ShardSnapshotStatus> shard : entry.shards()) {
// Add all new shards to start processing on // Add all new shards to start processing on
if (localNodeId.equals(shard.value.nodeId())) { if (localNodeId.equals(shard.value.nodeId())) {
if (shard.value.state() == SnapshotsInProgress.State.INIT && (snapshotShards == null || !snapshotShards.shards.containsKey(shard.key))) { if (shard.value.state() == SnapshotsInProgress.State.INIT && (snapshotShards == null || !snapshotShards.shards.containsKey(shard.key))) {
@ -249,7 +271,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
// Abort all running shards for this snapshot // Abort all running shards for this snapshot
SnapshotShards snapshotShards = shardSnapshots.get(entry.snapshot()); SnapshotShards snapshotShards = shardSnapshots.get(entry.snapshot());
if (snapshotShards != null) { if (snapshotShards != null) {
for (ObjectObjectCursor<ShardId, SnapshotsInProgress.ShardSnapshotStatus> shard : entry.shards()) { for (ObjectObjectCursor<ShardId, ShardSnapshotStatus> shard : entry.shards()) {
IndexShardSnapshotStatus snapshotStatus = snapshotShards.shards.get(shard.key); IndexShardSnapshotStatus snapshotStatus = snapshotShards.shards.get(shard.key);
if (snapshotStatus != null) { if (snapshotStatus != null) {
switch (snapshotStatus.stage()) { switch (snapshotStatus.stage()) {
@ -263,12 +285,12 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
case DONE: case DONE:
logger.debug("[{}] trying to cancel snapshot on the shard [{}] that is already done, updating status on the master", entry.snapshot(), shard.key); logger.debug("[{}] trying to cancel snapshot on the shard [{}] that is already done, updating status on the master", entry.snapshot(), shard.key);
updateIndexShardSnapshotStatus(entry.snapshot(), shard.key, updateIndexShardSnapshotStatus(entry.snapshot(), shard.key,
new SnapshotsInProgress.ShardSnapshotStatus(event.state().nodes().getLocalNodeId(), SnapshotsInProgress.State.SUCCESS)); new ShardSnapshotStatus(event.state().nodes().getLocalNodeId(), SnapshotsInProgress.State.SUCCESS));
break; break;
case FAILURE: case FAILURE:
logger.debug("[{}] trying to cancel snapshot on the shard [{}] that has already failed, updating status on the master", entry.snapshot(), shard.key); logger.debug("[{}] trying to cancel snapshot on the shard [{}] that has already failed, updating status on the master", entry.snapshot(), shard.key);
updateIndexShardSnapshotStatus(entry.snapshot(), shard.key, updateIndexShardSnapshotStatus(entry.snapshot(), shard.key,
new SnapshotsInProgress.ShardSnapshotStatus(event.state().nodes().getLocalNodeId(), SnapshotsInProgress.State.FAILED, snapshotStatus.failure())); new ShardSnapshotStatus(event.state().nodes().getLocalNodeId(), SnapshotsInProgress.State.FAILED, snapshotStatus.failure()));
break; break;
default: default:
throw new IllegalStateException("Unknown snapshot shard stage " + snapshotStatus.stage()); throw new IllegalStateException("Unknown snapshot shard stage " + snapshotStatus.stage());
@ -309,18 +331,18 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
@Override @Override
public void doRun() { public void doRun() {
snapshot(indexShard, entry.getKey(), indexId, shardEntry.getValue()); snapshot(indexShard, entry.getKey(), indexId, shardEntry.getValue());
updateIndexShardSnapshotStatus(entry.getKey(), shardId, new SnapshotsInProgress.ShardSnapshotStatus(localNodeId, SnapshotsInProgress.State.SUCCESS)); updateIndexShardSnapshotStatus(entry.getKey(), shardId, new ShardSnapshotStatus(localNodeId, SnapshotsInProgress.State.SUCCESS));
} }
@Override @Override
public void onFailure(Exception e) { public void onFailure(Exception e) {
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] [{}] failed to create snapshot", shardId, entry.getKey()), e); logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] [{}] failed to create snapshot", shardId, entry.getKey()), e);
updateIndexShardSnapshotStatus(entry.getKey(), shardId, new SnapshotsInProgress.ShardSnapshotStatus(localNodeId, SnapshotsInProgress.State.FAILED, ExceptionsHelper.detailedMessage(e))); updateIndexShardSnapshotStatus(entry.getKey(), shardId, new ShardSnapshotStatus(localNodeId, SnapshotsInProgress.State.FAILED, ExceptionsHelper.detailedMessage(e)));
} }
}); });
} catch (Exception e) { } catch (Exception e) {
updateIndexShardSnapshotStatus(entry.getKey(), shardId, new SnapshotsInProgress.ShardSnapshotStatus(localNodeId, SnapshotsInProgress.State.FAILED, ExceptionsHelper.detailedMessage(e))); updateIndexShardSnapshotStatus(entry.getKey(), shardId, new ShardSnapshotStatus(localNodeId, SnapshotsInProgress.State.FAILED, ExceptionsHelper.detailedMessage(e)));
} }
} }
} }
@ -383,23 +405,23 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
if (snapshot.state() == SnapshotsInProgress.State.STARTED || snapshot.state() == SnapshotsInProgress.State.ABORTED) { if (snapshot.state() == SnapshotsInProgress.State.STARTED || snapshot.state() == SnapshotsInProgress.State.ABORTED) {
Map<ShardId, IndexShardSnapshotStatus> localShards = currentSnapshotShards(snapshot.snapshot()); Map<ShardId, IndexShardSnapshotStatus> localShards = currentSnapshotShards(snapshot.snapshot());
if (localShards != null) { if (localShards != null) {
ImmutableOpenMap<ShardId, SnapshotsInProgress.ShardSnapshotStatus> masterShards = snapshot.shards(); ImmutableOpenMap<ShardId, ShardSnapshotStatus> masterShards = snapshot.shards();
for(Map.Entry<ShardId, IndexShardSnapshotStatus> localShard : localShards.entrySet()) { for(Map.Entry<ShardId, IndexShardSnapshotStatus> localShard : localShards.entrySet()) {
ShardId shardId = localShard.getKey(); ShardId shardId = localShard.getKey();
IndexShardSnapshotStatus localShardStatus = localShard.getValue(); IndexShardSnapshotStatus localShardStatus = localShard.getValue();
SnapshotsInProgress.ShardSnapshotStatus masterShard = masterShards.get(shardId); ShardSnapshotStatus masterShard = masterShards.get(shardId);
if (masterShard != null && masterShard.state().completed() == false) { if (masterShard != null && masterShard.state().completed() == false) {
// Master knows about the shard and thinks it has not completed // Master knows about the shard and thinks it has not completed
if (localShardStatus.stage() == IndexShardSnapshotStatus.Stage.DONE) { if (localShardStatus.stage() == Stage.DONE) {
// but we think the shard is done - we need to make new master know that the shard is done // but we think the shard is done - we need to make new master know that the shard is done
logger.debug("[{}] new master thinks the shard [{}] is not completed but the shard is done locally, updating status on the master", snapshot.snapshot(), shardId); logger.debug("[{}] new master thinks the shard [{}] is not completed but the shard is done locally, updating status on the master", snapshot.snapshot(), shardId);
updateIndexShardSnapshotStatus(snapshot.snapshot(), shardId, updateIndexShardSnapshotStatus(snapshot.snapshot(), shardId,
new SnapshotsInProgress.ShardSnapshotStatus(event.state().nodes().getLocalNodeId(), SnapshotsInProgress.State.SUCCESS)); new ShardSnapshotStatus(event.state().nodes().getLocalNodeId(), SnapshotsInProgress.State.SUCCESS));
} else if (localShard.getValue().stage() == IndexShardSnapshotStatus.Stage.FAILURE) { } else if (localShard.getValue().stage() == Stage.FAILURE) {
// but we think the shard failed - we need to make new master know that the shard failed // but we think the shard failed - we need to make new master know that the shard failed
logger.debug("[{}] new master thinks the shard [{}] is not completed but the shard failed locally, updating status on master", snapshot.snapshot(), shardId); logger.debug("[{}] new master thinks the shard [{}] is not completed but the shard failed locally, updating status on master", snapshot.snapshot(), shardId);
updateIndexShardSnapshotStatus(snapshot.snapshot(), shardId, updateIndexShardSnapshotStatus(snapshot.snapshot(), shardId,
new SnapshotsInProgress.ShardSnapshotStatus(event.state().nodes().getLocalNodeId(), SnapshotsInProgress.State.FAILED, localShardStatus.failure())); new ShardSnapshotStatus(event.state().nodes().getLocalNodeId(), SnapshotsInProgress.State.FAILED, localShardStatus.failure()));
} }
} }
@ -427,7 +449,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
public static class UpdateIndexShardSnapshotStatusRequest extends TransportRequest { public static class UpdateIndexShardSnapshotStatusRequest extends TransportRequest {
private Snapshot snapshot; private Snapshot snapshot;
private ShardId shardId; private ShardId shardId;
private SnapshotsInProgress.ShardSnapshotStatus status; private ShardSnapshotStatus status;
private volatile boolean processed; // state field, no need to serialize private volatile boolean processed; // state field, no need to serialize
@ -435,7 +457,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
} }
public UpdateIndexShardSnapshotStatusRequest(Snapshot snapshot, ShardId shardId, SnapshotsInProgress.ShardSnapshotStatus status) { public UpdateIndexShardSnapshotStatusRequest(Snapshot snapshot, ShardId shardId, ShardSnapshotStatus status) {
this.snapshot = snapshot; this.snapshot = snapshot;
this.shardId = shardId; this.shardId = shardId;
this.status = status; this.status = status;
@ -446,7 +468,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
super.readFrom(in); super.readFrom(in);
snapshot = new Snapshot(in); snapshot = new Snapshot(in);
shardId = ShardId.readShardId(in); shardId = ShardId.readShardId(in);
status = SnapshotsInProgress.ShardSnapshotStatus.readShardSnapshotStatus(in); status = new ShardSnapshotStatus(in);
} }
@Override @Override
@ -465,7 +487,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
return shardId; return shardId;
} }
public SnapshotsInProgress.ShardSnapshotStatus status() { public ShardSnapshotStatus status() {
return status; return status;
} }
@ -486,7 +508,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
/** /**
* Updates the shard status * Updates the shard status
*/ */
public void updateIndexShardSnapshotStatus(Snapshot snapshot, ShardId shardId, SnapshotsInProgress.ShardSnapshotStatus status) { public void updateIndexShardSnapshotStatus(Snapshot snapshot, ShardId shardId, ShardSnapshotStatus status) {
UpdateIndexShardSnapshotStatusRequest request = new UpdateIndexShardSnapshotStatusRequest(snapshot, shardId, status); UpdateIndexShardSnapshotStatusRequest request = new UpdateIndexShardSnapshotStatusRequest(snapshot, shardId, status);
try { try {
if (clusterService.state().nodes().isLocalNodeElectedMaster()) { if (clusterService.state().nodes().isLocalNodeElectedMaster()) {
@ -533,7 +555,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
int changedCount = 0; int changedCount = 0;
final List<SnapshotsInProgress.Entry> entries = new ArrayList<>(); final List<SnapshotsInProgress.Entry> entries = new ArrayList<>();
for (SnapshotsInProgress.Entry entry : snapshots.entries()) { for (SnapshotsInProgress.Entry entry : snapshots.entries()) {
ImmutableOpenMap.Builder<ShardId, SnapshotsInProgress.ShardSnapshotStatus> shards = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder<ShardId, ShardSnapshotStatus> shards = ImmutableOpenMap.builder();
boolean updated = false; boolean updated = false;
for (int i = 0; i < batchSize; i++) { for (int i = 0; i < batchSize; i++) {

View File

@ -793,12 +793,12 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
} }
private boolean removedNodesCleanupNeeded(ClusterChangedEvent event) { private boolean removedNodesCleanupNeeded(ClusterChangedEvent event) {
// Check if we just became the master
boolean newMaster = !event.previousState().nodes().isLocalNodeElectedMaster();
SnapshotsInProgress snapshotsInProgress = event.state().custom(SnapshotsInProgress.TYPE); SnapshotsInProgress snapshotsInProgress = event.state().custom(SnapshotsInProgress.TYPE);
if (snapshotsInProgress == null) { if (snapshotsInProgress == null) {
return false; return false;
} }
// Check if we just became the master
boolean newMaster = !event.previousState().nodes().isLocalNodeElectedMaster();
for (SnapshotsInProgress.Entry snapshot : snapshotsInProgress.entries()) { for (SnapshotsInProgress.Entry snapshot : snapshotsInProgress.entries()) {
if (newMaster && (snapshot.state() == State.SUCCESS || snapshot.state() == State.INIT)) { if (newMaster && (snapshot.state() == State.SUCCESS || snapshot.state() == State.INIT)) {
// We just replaced old master and snapshots in intermediate states needs to be cleaned // We just replaced old master and snapshots in intermediate states needs to be cleaned

View File

@ -0,0 +1,38 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.tasks;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.StreamInput;
import java.io.IOException;
/**
* A generic exception that can be thrown by a task when it's cancelled by the task manager API
*/
public class TaskCancelledException extends ElasticsearchException {
public TaskCancelledException(String msg) {
super(msg);
}
public TaskCancelledException(StreamInput in) throws IOException{
super(in);
}
}

View File

@ -459,7 +459,7 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen
if (cancellationReason == null) { if (cancellationReason == null) {
nodesWithChildTasks.add(nodeId); nodesWithChildTasks.add(nodeId);
} else { } else {
throw new IllegalStateException("cannot register child task request, the task is already cancelled"); throw new TaskCancelledException("cannot register child task request, the task is already cancelled");
} }
} }
} }

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
/**
* Transport request handlers that is using task context
*/
public abstract class TaskAwareTransportRequestHandler<T extends TransportRequest> implements TransportRequestHandler<T> {
@Override
public final void messageReceived(T request, TransportChannel channel) throws Exception {
throw new UnsupportedOperationException("the task parameter is required");
}
}

View File

@ -46,6 +46,8 @@ import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskCancelledException;
import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -462,6 +464,27 @@ public class TransportService extends AbstractLifecycleComponent {
asyncSender.sendRequest(node, action, request, options, handler); asyncSender.sendRequest(node, action, request, options, handler);
} }
public <T extends TransportResponse> void sendChildRequest(final DiscoveryNode node, final String action,
final TransportRequest request, final Task parentTask,
final TransportResponseHandler<T> handler) {
sendChildRequest(node, action, request, parentTask, TransportRequestOptions.EMPTY, handler);
}
public <T extends TransportResponse> void sendChildRequest(final DiscoveryNode node, final String action,
final TransportRequest request, final Task parentTask,
final TransportRequestOptions options,
final TransportResponseHandler<T> handler) {
request.setParentTask(localNode.getId(), parentTask.getId());
try {
taskManager.registerChildTask(parentTask, node.getId());
sendRequest(node, action, request, options, handler);
} catch (TaskCancelledException ex) {
// The parent task is already cancelled - just fail the request
handler.handleException(new TransportException(ex));
}
}
private <T extends TransportResponse> void sendRequestInternal(final DiscoveryNode node, final String action, private <T extends TransportResponse> void sendRequestInternal(final DiscoveryNode node, final String action,
final TransportRequest request, final TransportRequest request,
final TransportRequestOptions options, final TransportRequestOptions options,

View File

@ -795,6 +795,7 @@ public class ExceptionSerializationTests extends ESTestCase {
ids.put(143, org.elasticsearch.script.ScriptException.class); ids.put(143, org.elasticsearch.script.ScriptException.class);
ids.put(144, org.elasticsearch.cluster.NotMasterException.class); ids.put(144, org.elasticsearch.cluster.NotMasterException.class);
ids.put(145, org.elasticsearch.ElasticsearchStatusException.class); ids.put(145, org.elasticsearch.ElasticsearchStatusException.class);
ids.put(146, org.elasticsearch.tasks.TaskCancelledException.class);
Map<Class<? extends ElasticsearchException>, Integer> reverse = new HashMap<>(); Map<Class<? extends ElasticsearchException>, Integer> reverse = new HashMap<>();
for (Map.Entry<Integer, Class<? extends ElasticsearchException>> entry : ids.entrySet()) { for (Map.Entry<Integer, Class<? extends ElasticsearchException>> entry : ids.entrySet()) {

View File

@ -86,8 +86,8 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.search.SearchTransportService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@ -259,7 +259,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
String indexOrAlias = randomIndexOrAlias(); String indexOrAlias = randomIndexOrAlias();
client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get(); client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get();
UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id") UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id")
.script(new Script("ctx.op='delete'", ScriptService.ScriptType.INLINE, CustomScriptPlugin.NAME, Collections.emptyMap())); .script(new Script("ctx.op='delete'", ScriptType.INLINE, CustomScriptPlugin.NAME, Collections.emptyMap()));
UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet(); UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet();
assertEquals(DocWriteResponse.Result.DELETED, updateResponse.getResult()); assertEquals(DocWriteResponse.Result.DELETED, updateResponse.getResult());

View File

@ -35,6 +35,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskCancelledException;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.tasks.TaskInfo;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -168,7 +169,7 @@ public class CancellableTasksTests extends TaskManagerTestCase {
try { try {
awaitBusy(() -> { awaitBusy(() -> {
if (((CancellableTask) task).isCancelled()) { if (((CancellableTask) task).isCancelled()) {
throw new RuntimeException("Cancelled"); throw new TaskCancelledException("Cancelled");
} }
return false; return false;
}); });

View File

@ -49,7 +49,8 @@ import java.util.function.Function;
import static org.elasticsearch.action.DocWriteRequest.OpType; import static org.elasticsearch.action.DocWriteRequest.OpType;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.script.ScriptType;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;

View File

@ -85,7 +85,7 @@ public class SearchAsyncActionTests extends ESTestCase {
Map<String, DiscoveryNode> lookup = new HashMap<>(); Map<String, DiscoveryNode> lookup = new HashMap<>();
lookup.put(primaryNode.getId(), primaryNode); lookup.put(primaryNode.getId(), primaryNode);
AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<TestSearchPhaseResult>(logger, transportService, lookup::get, AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<TestSearchPhaseResult>(logger, transportService, lookup::get,
Collections.emptyMap(), null, request, responseListener, shardsIter, 0, 0) { Collections.emptyMap(), null, request, responseListener, shardsIter, 0, 0, null) {
TestSearchResponse response = new TestSearchResponse(); TestSearchResponse response = new TestSearchResponse();
@Override @Override

View File

@ -36,7 +36,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.watcher.ResourceWatcherService;

View File

@ -132,7 +132,9 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
}); });
state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true)); assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true));
assertThat(state.nodes().getSize(), equalTo(1)); // verify that we still see the local node in the cluster state // verify that both nodes are still in the cluster state but there is no master
assertThat(state.nodes().getSize(), equalTo(2));
assertThat(state.nodes().getMasterNode(), equalTo(null));
logger.info("--> starting the previous master node again..."); logger.info("--> starting the previous master node again...");
internalCluster().startNode(settings); internalCluster().startNode(settings);

View File

@ -35,7 +35,7 @@ import org.elasticsearch.discovery.MasterNotDiscoveredException;
import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.ZenDiscovery;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.ESIntegTestCase.Scope;
@ -122,12 +122,12 @@ public class NoMasterNodeIT extends ESIntegTestCase {
checkWriteAction( checkWriteAction(
false, timeout, false, timeout,
client().prepareUpdate("test", "type1", "1") client().prepareUpdate("test", "type1", "1")
.setScript(new Script("test script", ScriptService.ScriptType.INLINE, null, null)).setTimeout(timeout)); .setScript(new Script("test script", ScriptType.INLINE, null, null)).setTimeout(timeout));
checkWriteAction( checkWriteAction(
autoCreateIndex, timeout, autoCreateIndex, timeout,
client().prepareUpdate("no_index", "type1", "1") client().prepareUpdate("no_index", "type1", "1")
.setScript(new Script("test script", ScriptService.ScriptType.INLINE, null, null)).setTimeout(timeout)); .setScript(new Script("test script", ScriptType.INLINE, null, null)).setTimeout(timeout));
checkWriteAction(false, timeout, checkWriteAction(false, timeout,

View File

@ -993,7 +993,11 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
String isolatedNode = randomBoolean() ? masterNode : nonMasterNode; String isolatedNode = randomBoolean() ? masterNode : nonMasterNode;
TwoPartitions partitions = isolateNode(isolatedNode); TwoPartitions partitions = isolateNode(isolatedNode);
NetworkDisruption networkDisruption = addRandomDisruptionType(partitions); // we cannot use the NetworkUnresponsive disruption type here as it will swallow the "shard failed" request, calling neither
// onSuccess nor onFailure on the provided listener.
NetworkLinkDisruptionType disruptionType = new NetworkDisconnect();
NetworkDisruption networkDisruption = new NetworkDisruption(partitions, disruptionType);
setDisruptionScheme(networkDisruption);
networkDisruption.startDisrupting(); networkDisruption.startDisrupting();
service.localShardFailed(failedShard, "simulated", new CorruptIndexException("simulated", (String) null), new service.localShardFailed(failedShard, "simulated", new CorruptIndexException("simulated", (String) null), new

View File

@ -35,7 +35,7 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.NativeScriptFactory; import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.junit.Before; import org.junit.Before;

View File

@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
@ -319,7 +319,7 @@ public class InnerHitBuilderTests extends ESTestCase {
} }
static SearchSourceBuilder.ScriptField randomScript() { static SearchSourceBuilder.ScriptField randomScript() {
ScriptService.ScriptType randomScriptType = randomFrom(ScriptService.ScriptType.values()); ScriptType randomScriptType = randomFrom(ScriptType.values());
Map<String, Object> randomMap = null; Map<String, Object> randomMap = null;
if (randomBoolean()) { if (randomBoolean()) {
randomMap = new HashMap<>(); randomMap = new HashMap<>();

View File

@ -30,7 +30,7 @@ import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;

View File

@ -23,7 +23,7 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.ScriptQueryBuilder.ScriptQuery; import org.elasticsearch.index.query.ScriptQueryBuilder.ScriptQuery;
import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.AbstractQueryTestCase;

View File

@ -47,7 +47,7 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.AbstractQueryTestCase;
@ -168,7 +168,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
String script = "1"; String script = "1";
Map<String, Object> params = Collections.emptyMap(); Map<String, Object> params = Collections.emptyMap();
functionBuilder = new ScriptScoreFunctionBuilder( functionBuilder = new ScriptScoreFunctionBuilder(
new Script(script, ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, params)); new Script(script, ScriptType.INLINE, MockScriptEngine.NAME, params));
break; break;
case 3: case 3:
RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilderWithFixedSeed(); RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilderWithFixedSeed();

View File

@ -376,7 +376,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice
transportService, null, clusterService); transportService, null, clusterService);
final ShardStateAction shardStateAction = mock(ShardStateAction.class); final ShardStateAction shardStateAction = mock(ShardStateAction.class);
return new IndicesClusterStateService(settings, indicesService, clusterService, return new IndicesClusterStateService(settings, indicesService, clusterService,
threadPool, recoveryTargetService, shardStateAction, null, repositoriesService, null, null, null, null); threadPool, recoveryTargetService, shardStateAction, null, repositoriesService, null, null, null, null, null);
} }
private class RecordingIndicesService extends MockIndicesService { private class RecordingIndicesService extends MockIndicesService {

View File

@ -61,9 +61,9 @@ public class RestMainActionTests extends ESTestCase {
BytesRestResponse response = RestMainAction.convertMainResponse(mainResponse, restRequest, builder); BytesRestResponse response = RestMainAction.convertMainResponse(mainResponse, restRequest, builder);
assertNotNull(response); assertNotNull(response);
assertEquals(expectedStatus, response.status()); assertEquals(expectedStatus, response.status());
assertEquals(0, response.content().length());
assertEquals(0, builder.bytes().length()); // the empty responses are handled in the HTTP layer so we do
// not assert on them here
} }
public void testGetResponse() throws Exception { public void testGetResponse() throws Exception {

View File

@ -118,7 +118,7 @@ public class RestAnalyzeActionTests extends ESTestCase {
assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'"));
} }
public void testDeprecatedParamException() throws Exception { public void testDeprecatedParamIn2xException() throws Exception {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> RestAnalyzeAction.buildFromContent( () -> RestAnalyzeAction.buildFromContent(
XContentFactory.jsonBuilder() XContentFactory.jsonBuilder()
@ -165,5 +165,4 @@ public class RestAnalyzeActionTests extends ESTestCase {
, new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY))); , new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY)));
assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]"));
} }
} }

View File

@ -26,8 +26,6 @@ import org.elasticsearch.test.ESTestCase;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
// TODO: these really should just be part of ScriptService tests, there is nothing special about them // TODO: these really should just be part of ScriptService tests, there is nothing special about them
public class FileScriptTests extends ESTestCase { public class FileScriptTests extends ESTestCase {
@ -56,7 +54,7 @@ public class FileScriptTests extends ESTestCase {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put("script.engine." + MockScriptEngine.NAME + ".file.aggs", "false").build(); .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", "false").build();
ScriptService scriptService = makeScriptService(settings); ScriptService scriptService = makeScriptService(settings);
Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); Script script = new Script("script1", ScriptType.FILE, MockScriptEngine.NAME, null);
CompiledScript compiledScript = scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); CompiledScript compiledScript = scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap());
assertNotNull(compiledScript); assertNotNull(compiledScript);
MockCompiledScript executable = (MockCompiledScript) compiledScript.compiled(); MockCompiledScript executable = (MockCompiledScript) compiledScript.compiled();
@ -71,7 +69,7 @@ public class FileScriptTests extends ESTestCase {
.put("script.engine." + MockScriptEngine.NAME + ".file.update", "false") .put("script.engine." + MockScriptEngine.NAME + ".file.update", "false")
.put("script.engine." + MockScriptEngine.NAME + ".file.ingest", "false").build(); .put("script.engine." + MockScriptEngine.NAME + ".file.ingest", "false").build();
ScriptService scriptService = makeScriptService(settings); ScriptService scriptService = makeScriptService(settings);
Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); Script script = new Script("script1", ScriptType.FILE, MockScriptEngine.NAME, null);
for (ScriptContext context : ScriptContext.Standard.values()) { for (ScriptContext context : ScriptContext.Standard.values()) {
try { try {
scriptService.compile(script, context, Collections.emptyMap()); scriptService.compile(script, context, Collections.emptyMap());

View File

@ -48,7 +48,7 @@ import java.util.concurrent.ExecutionException;
import java.util.function.Function; import java.util.function.Function;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static org.elasticsearch.script.ScriptService.ScriptType;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;

View File

@ -30,7 +30,6 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.watcher.ResourceWatcherService;

View File

@ -25,9 +25,7 @@ import org.elasticsearch.test.ESTestCase;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
@ -58,7 +56,7 @@ public class ScriptContextTests extends ESTestCase {
public void testCustomGlobalScriptContextSettings() throws Exception { public void testCustomGlobalScriptContextSettings() throws Exception {
ScriptService scriptService = makeScriptService(); ScriptService scriptService = makeScriptService();
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { for (ScriptType scriptType : ScriptType.values()) {
try { try {
Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); Script script = new Script("1", scriptType, MockScriptEngine.NAME, null);
scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), Collections.emptyMap()); scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), Collections.emptyMap());
@ -71,7 +69,7 @@ public class ScriptContextTests extends ESTestCase {
public void testCustomScriptContextSettings() throws Exception { public void testCustomScriptContextSettings() throws Exception {
ScriptService scriptService = makeScriptService(); ScriptService scriptService = makeScriptService();
Script script = new Script("1", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, null); Script script = new Script("1", ScriptType.INLINE, MockScriptEngine.NAME, null);
try { try {
scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), Collections.emptyMap()); scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), Collections.emptyMap());
fail("script compilation should have been rejected"); fail("script compilation should have been rejected");
@ -87,7 +85,7 @@ public class ScriptContextTests extends ESTestCase {
public void testUnknownPluginScriptContext() throws Exception { public void testUnknownPluginScriptContext() throws Exception {
ScriptService scriptService = makeScriptService(); ScriptService scriptService = makeScriptService();
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { for (ScriptType scriptType : ScriptType.values()) {
try { try {
Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); Script script = new Script("1", scriptType, MockScriptEngine.NAME, null);
scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), Collections.emptyMap()); scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), Collections.emptyMap());
@ -106,7 +104,7 @@ public class ScriptContextTests extends ESTestCase {
} }
}; };
ScriptService scriptService = makeScriptService(); ScriptService scriptService = makeScriptService();
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { for (ScriptType scriptType : ScriptType.values()) {
try { try {
Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); Script script = new Script("1", scriptType, MockScriptEngine.NAME, null);
scriptService.compile(script, context, Collections.emptyMap()); scriptService.compile(script, context, Collections.emptyMap());

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;

View File

@ -21,7 +21,6 @@ package org.elasticsearch.script;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.After; import org.junit.After;

View File

@ -33,7 +33,6 @@ import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.watcher.ResourceWatcherService;

View File

@ -66,9 +66,9 @@ public class ScriptTests extends ESTestCase {
private Script createScript(XContent xContent) throws IOException { private Script createScript(XContent xContent) throws IOException {
final Map<String, Object> params = randomBoolean() ? null : Collections.singletonMap("key", "value"); final Map<String, Object> params = randomBoolean() ? null : Collections.singletonMap("key", "value");
ScriptService.ScriptType scriptType = randomFrom(ScriptService.ScriptType.values()); ScriptType scriptType = randomFrom(ScriptType.values());
String script; String script;
if (scriptType == ScriptService.ScriptType.INLINE) { if (scriptType == ScriptType.INLINE) {
try (XContentBuilder builder = XContentBuilder.builder(xContent)) { try (XContentBuilder builder = XContentBuilder.builder(xContent)) {
builder.startObject(); builder.startObject();
builder.field("field", randomAsciiOfLengthBetween(1, 5)); builder.field("field", randomAsciiOfLengthBetween(1, 5));
@ -83,7 +83,7 @@ public class ScriptTests extends ESTestCase {
scriptType, scriptType,
randomFrom("_lang1", "_lang2", null), randomFrom("_lang1", "_lang2", null),
params, params,
scriptType == ScriptService.ScriptType.INLINE ? xContent.type() : null scriptType == ScriptType.INLINE ? xContent.type() : null
); );
} }

View File

@ -0,0 +1,299 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.elasticsearch.action.ListenableActionFuture;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollAction;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.script.AbstractSearchScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.tasks.TaskInfo;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.index.query.QueryBuilders.scriptQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
public class SearchCancellationIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(ScriptedBlockPlugin.class);
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder().put(SearchService.LOW_LEVEL_CANCELLATION_SETTING.getKey(), randomBoolean()).build();
}
private void indexTestData() {
for (int i = 0; i < 10; i++) {
// Make sure we have a few segments
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for(int j=0; j<10; j++) {
bulkRequestBuilder.add(client().prepareIndex("test", "type", Integer.toString(i*10 + j)).setSource("field", "value"));
}
assertNoFailures(bulkRequestBuilder.get());
}
}
private List<ScriptedBlockPlugin> initBlockFactory() {
List<ScriptedBlockPlugin> plugins = new ArrayList<>();
for (PluginsService pluginsService : internalCluster().getDataNodeInstances(PluginsService.class)) {
plugins.addAll(pluginsService.filterPlugins(ScriptedBlockPlugin.class));
}
for (ScriptedBlockPlugin plugin : plugins) {
plugin.scriptedBlockFactory.reset();
plugin.scriptedBlockFactory.enableBlock();
}
return plugins;
}
private void awaitForBlock(List<ScriptedBlockPlugin> plugins) throws Exception {
int numberOfShards = getNumShards("test").numPrimaries;
assertBusy(() -> {
int numberOfBlockedPlugins = 0;
for (ScriptedBlockPlugin plugin : plugins) {
numberOfBlockedPlugins += plugin.scriptedBlockFactory.hits.get();
}
logger.info("The plugin blocked on {} out of {} shards", numberOfBlockedPlugins, numberOfShards);
assertThat(numberOfBlockedPlugins, greaterThan(0));
});
}
private void disableBlocks(List<ScriptedBlockPlugin> plugins) throws Exception {
for (ScriptedBlockPlugin plugin : plugins) {
plugin.scriptedBlockFactory.disableBlock();
}
}
private void cancelSearch(String action) {
ListTasksResponse listTasksResponse = client().admin().cluster().prepareListTasks().setActions(action).get();
assertThat(listTasksResponse.getTasks(), hasSize(1));
TaskInfo searchTask = listTasksResponse.getTasks().get(0);
logger.info("Cancelling search");
CancelTasksResponse cancelTasksResponse = client().admin().cluster().prepareCancelTasks().setTaskId(searchTask.getTaskId()).get();
assertThat(cancelTasksResponse.getTasks(), hasSize(1));
assertThat(cancelTasksResponse.getTasks().get(0).getTaskId(), equalTo(searchTask.getTaskId()));
}
private SearchResponse ensureSearchWasCancelled(ListenableActionFuture<SearchResponse> searchResponse) {
try {
SearchResponse response = searchResponse.actionGet();
logger.info("Search response {}", response);
assertNotEquals("At least one shard should have failed", 0, response.getFailedShards());
return response;
} catch (SearchPhaseExecutionException ex) {
logger.info("All shards failed with", ex);
return null;
}
}
public void testCancellationDuringQueryPhase() throws Exception {
List<ScriptedBlockPlugin> plugins = initBlockFactory();
indexTestData();
logger.info("Executing search");
ListenableActionFuture<SearchResponse> searchResponse = client().prepareSearch("test").setQuery(
scriptQuery(new Script(NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, ScriptType.INLINE, "native", null)))
.execute();
awaitForBlock(plugins);
cancelSearch(SearchAction.NAME);
disableBlocks(plugins);
ensureSearchWasCancelled(searchResponse);
}
public void testCancellationDuringFetchPhase() throws Exception {
List<ScriptedBlockPlugin> plugins = initBlockFactory();
indexTestData();
logger.info("Executing search");
ListenableActionFuture<SearchResponse> searchResponse = client().prepareSearch("test")
.addScriptField("test_field",
new Script(NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, ScriptType.INLINE, "native", null)
).execute();
awaitForBlock(plugins);
cancelSearch(SearchAction.NAME);
disableBlocks(plugins);
ensureSearchWasCancelled(searchResponse);
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/21126")
public void testCancellationOfScrollSearches() throws Exception {
List<ScriptedBlockPlugin> plugins = initBlockFactory();
indexTestData();
logger.info("Executing search");
ListenableActionFuture<SearchResponse> searchResponse = client().prepareSearch("test")
.setScroll(TimeValue.timeValueSeconds(10))
.setSize(5)
.setQuery(
scriptQuery(new Script(NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, ScriptType.INLINE, "native", null)))
.execute();
awaitForBlock(plugins);
cancelSearch(SearchAction.NAME);
disableBlocks(plugins);
ensureSearchWasCancelled(searchResponse);
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/21126")
public void testCancellationOfScrollSearchesOnFollowupRequests() throws Exception {
List<ScriptedBlockPlugin> plugins = initBlockFactory();
indexTestData();
// Disable block so the first request would pass
disableBlocks(plugins);
logger.info("Executing search");
TimeValue keepAlive = TimeValue.timeValueSeconds(5);
SearchResponse searchResponse = client().prepareSearch("test")
.setScroll(keepAlive)
.setSize(2)
.setQuery(
scriptQuery(new Script(NativeTestScriptedBlockFactory.TEST_NATIVE_BLOCK_SCRIPT, ScriptType.INLINE, "native", null)))
.get();
assertNotNull(searchResponse.getScrollId());
// Enable block so the second request would block
for (ScriptedBlockPlugin plugin : plugins) {
plugin.scriptedBlockFactory.reset();
plugin.scriptedBlockFactory.enableBlock();
}
String scrollId = searchResponse.getScrollId();
logger.info("Executing scroll with id {}", scrollId);
ListenableActionFuture<SearchResponse> scrollResponse = client().prepareSearchScroll(searchResponse.getScrollId())
.setScroll(keepAlive).execute();
awaitForBlock(plugins);
cancelSearch(SearchScrollAction.NAME);
disableBlocks(plugins);
SearchResponse response = ensureSearchWasCancelled(scrollResponse);
if (response != null){
// The response didn't fail completely - update scroll id
scrollId = response.getScrollId();
}
logger.info("Cleaning scroll with id {}", scrollId);
client().prepareClearScroll().addScrollId(scrollId).get();
}
public static class ScriptedBlockPlugin extends Plugin implements ScriptPlugin {
private NativeTestScriptedBlockFactory scriptedBlockFactory;
public ScriptedBlockPlugin() {
scriptedBlockFactory = new NativeTestScriptedBlockFactory();
}
@Override
public List<NativeScriptFactory> getNativeScripts() {
return Collections.singletonList(scriptedBlockFactory);
}
}
private static class NativeTestScriptedBlockFactory implements NativeScriptFactory {
public static final String TEST_NATIVE_BLOCK_SCRIPT = "native_test_search_block_script";
private final AtomicInteger hits = new AtomicInteger();
private final AtomicBoolean shouldBlock = new AtomicBoolean(true);
public NativeTestScriptedBlockFactory() {
}
public void reset() {
hits.set(0);
}
public void disableBlock() {
shouldBlock.set(false);
}
public void enableBlock() {
shouldBlock.set(true);
}
@Override
public ExecutableScript newScript(Map<String, Object> params) {
return new NativeTestScriptedBlock();
}
@Override
public boolean needsScores() {
return false;
}
@Override
public String getName() {
return TEST_NATIVE_BLOCK_SCRIPT;
}
public class NativeTestScriptedBlock extends AbstractSearchScript {
@Override
public Object run() {
hits.incrementAndGet();
try {
awaitBusy(() -> shouldBlock.get() == false);
} catch (Exception e) {
throw new RuntimeException(e);
}
return true;
}
}
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.search.query.CancellableCollector;
import org.elasticsearch.tasks.TaskCancelledException;
import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
public class SearchCancellationTests extends ESTestCase {
static Directory dir;
static IndexReader reader;
@BeforeClass
public static void before() throws IOException {
dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
w.setDoRandomForceMerge(false); // we need 2 segments
indexRandomDocuments(w, TestUtil.nextInt(random(), 2, 20));
w.flush();
indexRandomDocuments(w, TestUtil.nextInt(random(), 1, 20));
reader = w.getReader();
w.close();
}
private static void indexRandomDocuments(RandomIndexWriter w, int numDocs) throws IOException {
for (int i = 0; i < numDocs; ++i) {
final int numHoles = random().nextInt(5);
for (int j = 0; j < numHoles; ++j) {
w.addDocument(new Document());
}
Document doc = new Document();
doc.add(new StringField("foo", "bar", Field.Store.NO));
w.addDocument(doc);
}
}
@AfterClass
public static void after() throws IOException {
IOUtils.close(reader, dir);
dir = null;
reader = null;
}
public void testLowLevelCancellableCollector() throws IOException {
TotalHitCountCollector collector = new TotalHitCountCollector();
AtomicBoolean cancelled = new AtomicBoolean();
CancellableCollector cancellableCollector = new CancellableCollector(cancelled::get, true, collector);
final LeafCollector leafCollector = cancellableCollector.getLeafCollector(reader.leaves().get(0));
leafCollector.collect(0);
cancelled.set(true);
expectThrows(TaskCancelledException.class, () -> leafCollector.collect(1));
}
public void testCancellableCollector() throws IOException {
TotalHitCountCollector collector = new TotalHitCountCollector();
AtomicBoolean cancelled = new AtomicBoolean();
CancellableCollector cancellableCollector = new CancellableCollector(cancelled::get, false, collector);
final LeafCollector leafCollector = cancellableCollector.getLeafCollector(reader.leaves().get(0));
leafCollector.collect(0);
cancelled.set(true);
leafCollector.collect(1);
expectThrows(TaskCancelledException.class, () -> cancellableCollector.getLeafCollector(reader.leaves().get(1)));
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -175,11 +176,12 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
try { try {
QuerySearchResultProvider querySearchResultProvider = service.executeQueryPhase( QuerySearchResultProvider querySearchResultProvider = service.executeQueryPhase(
new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT,
new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY))); new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY)),
new SearchTask(123L, "", "", "", null));
IntArrayList intCursors = new IntArrayList(1); IntArrayList intCursors = new IntArrayList(1);
intCursors.add(0); intCursors.add(0);
ShardFetchRequest req = new ShardFetchRequest(querySearchResultProvider.id(), intCursors, null /* not a scroll */); ShardFetchRequest req = new ShardFetchRequest(querySearchResultProvider.id(), intCursors, null /* not a scroll */);
service.executeFetchPhase(req); service.executeFetchPhase(req, new SearchTask(123L, "", "", "", null));
} catch (AlreadyClosedException ex) { } catch (AlreadyClosedException ex) {
throw ex; throw ex;
} catch (IllegalStateException ex) { } catch (IllegalStateException ex) {

View File

@ -28,8 +28,7 @@ import org.elasticsearch.script.AbstractSearchScript;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.NativeScriptFactory; import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import java.util.Collection; import java.util.Collection;

View File

@ -28,7 +28,8 @@ import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import static java.util.Collections.singletonMap; import static java.util.Collections.singletonMap;
import static org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.script.ScriptType;
/** /**
* This class contains various mocked scripts that are used in aggregations integration tests. * This class contains various mocked scripts that are used in aggregations integration tests.

View File

@ -30,7 +30,7 @@ import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;

View File

@ -28,7 +28,7 @@ import org.elasticsearch.index.query.MatchNoneQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin; import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin; import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range;

View File

@ -27,10 +27,9 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.ScoreAccessor;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.LongTermsIT.CustomScriptPlugin;
import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;

View File

@ -26,7 +26,7 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;

View File

@ -37,8 +37,7 @@ import org.elasticsearch.script.AbstractSearchScript;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.NativeScriptFactory; import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;

View File

@ -26,10 +26,9 @@ import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.StringTermsIT.CustomScriptPlugin;
import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;

View File

@ -29,7 +29,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
@ -151,7 +151,7 @@ public class MinDocCountIT extends AbstractTermsTestCase {
YES { YES {
@Override @Override
TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) { TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) {
return builder.script(new org.elasticsearch.script.Script("doc['" + field + "'].values", ScriptService.ScriptType.INLINE, return builder.script(new org.elasticsearch.script.Script("doc['" + field + "'].values", ScriptType.INLINE,
CustomScriptPlugin.NAME, null)); CustomScriptPlugin.NAME, null));
} }
}; };

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
@ -34,8 +34,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
@ -47,7 +45,6 @@ import java.util.function.Function;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateRange;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.AggregationBuilders.range;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;

View File

@ -32,7 +32,7 @@ import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.script.NativeScriptFactory; import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;

View File

@ -30,7 +30,7 @@ import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;

View File

@ -29,7 +29,7 @@ import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.global.Global;

View File

@ -26,7 +26,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;

Some files were not shown because too many files have changed in this diff Show More