diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy index 3b1ec3c5d87..0c2e37ab821 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy @@ -42,14 +42,14 @@ public class DocsTestPlugin extends RestTestPlugin { 'List snippets that probably should be marked // CONSOLE' listConsoleCandidates.perSnippet { if ( - it.console // Already marked, nothing to do - || it.testResponse // It is a response + it.console != null // Already marked, nothing to do + || it.testResponse // It is a response ) { return } List languages = [ - // These languages should almost always be marked console - 'js', 'json', + // This language should almost always be marked console + 'js', // These are often curl commands that should be converted but // are probably false positives 'sh', 'shell', diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index 100715586d3..61a07f4fbd4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -32,6 +32,12 @@ import java.util.regex.Matcher * Generates REST tests for each snippet marked // TEST. */ public class RestTestsFromSnippetsTask extends SnippetsTask { + /** + * These languages aren't supported by the syntax highlighter so we + * shouldn't use them. + */ + private static final List BAD_LANGUAGES = ['json', 'javascript'] + @Input Map setups = new HashMap() @@ -87,9 +93,9 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { * calls buildTest to actually build the test. */ void handleSnippet(Snippet snippet) { - if (snippet.language == 'json') { + if (BAD_LANGUAGES.contains(snippet.language)) { throw new InvalidUserDataException( - "$snippet: Use `js` instead of `json`.") + "$snippet: Use `js` instead of `${snippet.language}`.") } if (snippet.testSetup) { setup(snippet) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy index 749c0f916f8..8c3524a9b9f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy @@ -114,23 +114,38 @@ public class SnippetsTask extends DefaultTask { return } if (line ==~ /\/\/\s*AUTOSENSE\s*/) { - throw new InvalidUserDataException("AUTOSENSE has been " + - "replaced by CONSOLE. Use that instead at " + - "$file:$lineNumber") + throw new InvalidUserDataException("$file:$lineNumber: " + + "AUTOSENSE has been replaced by CONSOLE.") } if (line ==~ /\/\/\s*CONSOLE\s*/) { if (snippet == null) { - throw new InvalidUserDataException("CONSOLE not " + - "paired with a snippet at $file:$lineNumber") + throw new InvalidUserDataException("$file:$lineNumber: " + + "CONSOLE not paired with a snippet") + } + if (snippet.console != null) { + throw new InvalidUserDataException("$file:$lineNumber: " + + "Can't be both CONSOLE and NOTCONSOLE") } snippet.console = true return } + if (line ==~ /\/\/\s*NOTCONSOLE\s*/) { + if (snippet == null) { + throw new InvalidUserDataException("$file:$lineNumber: " + + "NOTCONSOLE not paired with a snippet") + } + if (snippet.console != null) { + throw new InvalidUserDataException("$file:$lineNumber: " + + "Can't be both CONSOLE and NOTCONSOLE") + } + snippet.console = false + return + } matcher = line =~ /\/\/\s*TEST(\[(.+)\])?\s*/ if (matcher.matches()) { if (snippet == null) { - throw new InvalidUserDataException("TEST not " + - "paired with a snippet at $file:$lineNumber") + throw new InvalidUserDataException("$file:$lineNumber: " + + "TEST not paired with a snippet at ") } snippet.test = true if (matcher.group(2) != null) { @@ -172,8 +187,8 @@ public class SnippetsTask extends DefaultTask { matcher = line =~ /\/\/\s*TESTRESPONSE(\[(.+)\])?\s*/ if (matcher.matches()) { if (snippet == null) { - throw new InvalidUserDataException("TESTRESPONSE not " + - "paired with a snippet at $file:$lineNumber") + throw new InvalidUserDataException("$file:$lineNumber: " + + "TESTRESPONSE not paired with a snippet") } snippet.testResponse = true if (matcher.group(2) != null) { @@ -226,7 +241,7 @@ public class SnippetsTask extends DefaultTask { int end = NOT_FINISHED String contents - boolean console = false + Boolean console = null boolean test = false boolean testResponse = false boolean testSetup = false @@ -243,8 +258,8 @@ public class SnippetsTask extends DefaultTask { if (language != null) { result += "($language)" } - if (console) { - result += '// CONSOLE' + if (console != null) { + result += console ? '// CONSOLE' : '// NOTCONSOLE' } if (test) { result += '// TEST' diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 442882dfe99..b1f07265019 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -261,6 +261,7 @@ class ClusterFormationTasks { 'node.attr.testattr' : 'test', 'repositories.url.allowed_urls': 'http://snapshot.test*' ] + esConfig['node.max_local_storage_nodes'] = node.config.numNodes esConfig['http.port'] = node.config.httpPort esConfig['transport.tcp.port'] = node.config.transportPort esConfig.putAll(node.config.settings) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 79ac64c3795..14c98d42e36 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -575,18 +575,6 @@ - - - - - - - - - - - - diff --git a/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatter.java b/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatter.java index 2f7d53870cc..889e7f741ed 100644 --- a/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatter.java +++ b/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatter.java @@ -20,7 +20,7 @@ package org.apache.lucene.search.postingshighlight; import org.apache.lucene.search.highlight.Encoder; -import org.elasticsearch.search.highlight.HighlightUtils; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils; /** Custom passage formatter that allows us to: diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index fce3b0a40e5..f035bc0f4b7 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -467,17 +467,21 @@ public class TransportAnalyzeAction extends TransportSingleShardAction charFilterFactoryFactory; if (analysisService == null) { - AnalysisModule.AnalysisProvider charFilterFactoryFactory = analysisRegistry.getCharFilterProvider(charFilter.name); + charFilterFactoryFactory = analysisRegistry.getCharFilterProvider(charFilter.name); if (charFilterFactoryFactory == null) { throw new IllegalArgumentException("failed to find global char filter under [" + charFilter.name + "]"); } charFilterFactories[i] = charFilterFactoryFactory.get(environment, charFilter.name); } else { - charFilterFactories[i] = analysisService.charFilter(charFilter.name); - if (charFilterFactories[i] == null) { + charFilterFactoryFactory = analysisRegistry.getCharFilterProvider(charFilter.name, analysisService.getIndexSettings()); + if (charFilterFactoryFactory == null) { throw new IllegalArgumentException("failed to find char filter under [" + charFilter.name + "]"); } + charFilterFactories[i] = charFilterFactoryFactory.get(analysisService.getIndexSettings(), environment, charFilter.name, + AnalysisRegistry.getSettingsFromIndexSettings(analysisService.getIndexSettings(), + AnalysisRegistry.INDEX_ANALYSIS_CHAR_FILTER + "." + charFilter.name)); } } if (charFilterFactories[i] == null) { @@ -509,18 +513,21 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokenFilterFactoryFactory; if (analysisService == null) { - AnalysisModule.AnalysisProvider tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name); - + tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name); if (tokenFilterFactoryFactory == null) { throw new IllegalArgumentException("failed to find global token filter under [" + tokenFilter.name + "]"); } tokenFilterFactories[i] = tokenFilterFactoryFactory.get(environment, tokenFilter.name); } else { - tokenFilterFactories[i] = analysisService.tokenFilter(tokenFilter.name); - if (tokenFilterFactories[i] == null) { + tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name, analysisService.getIndexSettings()); + if (tokenFilterFactoryFactory == null) { throw new IllegalArgumentException("failed to find token filter under [" + tokenFilter.name + "]"); } + tokenFilterFactories[i] = tokenFilterFactoryFactory.get(analysisService.getIndexSettings(), environment, tokenFilter.name, + AnalysisRegistry.getSettingsFromIndexSettings(analysisService.getIndexSettings(), + AnalysisRegistry.INDEX_ANALYSIS_FILTER + "." + tokenFilter.name)); } } if (tokenFilterFactories[i] == null) { @@ -550,17 +557,21 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokenizerFactoryFactory; if (analysisService == null) { - AnalysisModule.AnalysisProvider tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(tokenizer.name); + tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(tokenizer.name); if (tokenizerFactoryFactory == null) { throw new IllegalArgumentException("failed to find global tokenizer under [" + tokenizer.name + "]"); } tokenizerFactory = tokenizerFactoryFactory.get(environment, tokenizer.name); } else { - tokenizerFactory = analysisService.tokenizer(tokenizer.name); - if (tokenizerFactory == null) { + tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(tokenizer.name, analysisService.getIndexSettings()); + if (tokenizerFactoryFactory == null) { throw new IllegalArgumentException("failed to find tokenizer under [" + tokenizer.name + "]"); } + tokenizerFactory = tokenizerFactoryFactory.get(analysisService.getIndexSettings(), environment, tokenizer.name, + AnalysisRegistry.getSettingsFromIndexSettings(analysisService.getIndexSettings(), + AnalysisRegistry.INDEX_ANALYSIS_TOKENIZER + "." + tokenizer.name)); } } return tokenizerFactory; diff --git a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java index 611d57345ba..851d9e6573d 100644 --- a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java +++ b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java index 2910736031f..c201315cbd8 100644 --- a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java @@ -24,7 +24,7 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; /** * A builder for {@link ExplainRequest}. diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java index 5d1bfc1ed23..42c4ccc701d 100644 --- a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.index.VersionType; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java index 4b57c4ba92c..7827de12eac 100644 --- a/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java @@ -24,7 +24,7 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.index.VersionType; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; /** * A get document action request builder. diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index c0b26aec59f..001e4ebd7a0 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.VersionType; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index f9103f0cddc..a61384f9595 100644 --- a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -322,11 +322,8 @@ abstract class AbstractSearchAsyncAction // we only release search context that we did not fetch from if we are not scrolling if (request.scroll() == null) { for (AtomicArray.Entry entry : queryResults.asList()) { - QuerySearchResult queryResult = entry.value.queryResult().queryResult(); - final TopDocs topDocs = queryResult.topDocs(); - final Suggest suggest = queryResult.suggest(); - if (((topDocs != null && topDocs.scoreDocs.length > 0) // the shard had matches - ||suggest != null && suggest.hasScoreDocs()) // or had suggest docs + QuerySearchResult queryResult = entry.value.queryResult(); + if (queryResult.hasHits() && docIdsToLoad.get(entry.index) == null) { // but none of them made it to the global top docs try { DiscoveryNode node = nodes.get(entry.value.queryResult().shardTarget().nodeId()); diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 5c08acb99ea..c03e904b6d7 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -31,7 +31,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortOrder; diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index ed7f0d76ab3..10b508d9a19 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -48,7 +48,7 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.lookup.SourceLookup; import java.util.ArrayList; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 1b410ce781e..eb9a18228f3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -546,24 +546,15 @@ public class RoutingNodes implements Iterable { assert failedShard.active(); if (failedShard.primary()) { // promote active replica to primary if active replica exists - ShardRouting candidate = activeReplica(failedShard.shardId()); - if (candidate == null) { + ShardRouting activeReplica = activeReplica(failedShard.shardId()); + if (activeReplica == null) { moveToUnassigned(failedShard, unassignedInfo); } else { + // if the activeReplica was relocating before this call to failShard, its relocation was cancelled above when we + // failed initializing replica shards (and moved replica relocation source back to started) + assert activeReplica.started() : "replica relocation should have been cancelled: " + activeReplica; movePrimaryToUnassignedAndDemoteToReplica(failedShard, unassignedInfo); - ShardRouting primarySwappedCandidate = promoteAssignedReplicaShardToPrimary(candidate); - if (primarySwappedCandidate.relocatingNodeId() != null) { - // its also relocating, make sure to move the other routing to primary - RoutingNode node = node(primarySwappedCandidate.relocatingNodeId()); - if (node != null) { - for (ShardRouting shardRouting : node) { - if (shardRouting.shardId().equals(primarySwappedCandidate.shardId()) && !shardRouting.primary()) { - promoteAssignedReplicaShardToPrimary(shardRouting); - break; - } - } - } - } + ShardRouting primarySwappedCandidate = promoteActiveReplicaShardToPrimary(activeReplica); if (IndexMetaData.isIndexUsingShadowReplicas(indexMetaData.getSettings())) { reinitShadowPrimary(primarySwappedCandidate); } @@ -621,8 +612,8 @@ public class RoutingNodes implements Iterable { * @param replicaShard the replica shard to be promoted to primary * @return the resulting primary shard */ - private ShardRouting promoteAssignedReplicaShardToPrimary(ShardRouting replicaShard) { - assert replicaShard.unassigned() == false : "unassigned shard cannot be promoted to primary: " + replicaShard; + private ShardRouting promoteActiveReplicaShardToPrimary(ShardRouting replicaShard) { + assert replicaShard.active() : "non-active shard cannot be promoted to primary: " + replicaShard; assert replicaShard.primary() == false : "primary shard cannot be promoted to primary: " + replicaShard; ShardRouting primaryShard = replicaShard.moveToPrimary(); updateAssigned(replicaShard, primaryShard); @@ -729,7 +720,7 @@ public class RoutingNodes implements Iterable { /** * Moves assigned primary to unassigned and demotes it to a replica. - * Used in conjunction with {@link #promoteAssignedReplicaShardToPrimary} when an active replica is promoted to primary. + * Used in conjunction with {@link #promoteActiveReplicaShardToPrimary} when an active replica is promoted to primary. */ private ShardRouting movePrimaryToUnassignedAndDemoteToReplica(ShardRouting shard, UnassignedInfo unassignedInfo) { assert shard.unassigned() == false : "only assigned shards can be moved to unassigned (" + shard + ")"; diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 367131d93cd..59ef122760b 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -68,6 +68,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.Set; @@ -151,7 +152,7 @@ public final class NodeEnvironment implements Closeable { /** * Maximum number of data nodes that should run in an environment. */ - public static final Setting MAX_LOCAL_STORAGE_NODES_SETTING = Setting.intSetting("node.max_local_storage_nodes", 50, 1, + public static final Setting MAX_LOCAL_STORAGE_NODES_SETTING = Setting.intSetting("node.max_local_storage_nodes", 1, 1, Property.NodeScope); /** @@ -244,8 +245,15 @@ public final class NodeEnvironment implements Closeable { } if (locks[0] == null) { - throw new IllegalStateException("Failed to obtain node lock, is the following location writable?: " - + Arrays.toString(environment.dataWithClusterFiles()), lastException); + final String message = String.format( + Locale.ROOT, + "failed to obtain node locks, tried [%s] with lock id%s;" + + " maybe these locations are not writable or multiple nodes were started without increasing [%s] (was [%d])?", + Arrays.toString(environment.dataWithClusterFiles()), + maxLocalStorageNodes == 1 ? " [0]" : "s [0--" + (maxLocalStorageNodes - 1) + "]", + MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), + maxLocalStorageNodes); + throw new IllegalStateException(message, lastException); } this.nodeMetaData = loadOrCreateNodeMetaData(settings, startupTraceLogger, nodePaths); this.logger = Loggers.getLogger(getClass(), Node.addNodeNameIfNeeded(settings, this.nodeMetaData.nodeId())); diff --git a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java index b4a6430b69e..148b676331d 100644 --- a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -33,7 +33,6 @@ import java.util.concurrent.TimeUnit; /** */ public final class SearchSlowLog implements SearchOperationListener { - private final Index index; private boolean reformat; private long queryWarnThreshold; @@ -84,10 +83,8 @@ public final class SearchSlowLog implements SearchOperationListener { public SearchSlowLog(IndexSettings indexSettings) { - this.queryLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query"); - this.fetchLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch"); - - this.index = indexSettings.getIndex(); + this.queryLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query", indexSettings.getSettings()); + this.fetchLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch", indexSettings.getSettings()); indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_REFORMAT, this::setReformat); this.reformat = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_REFORMAT); @@ -122,38 +119,36 @@ public final class SearchSlowLog implements SearchOperationListener { @Override public void onQueryPhase(SearchContext context, long tookInNanos) { if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) { - queryLogger.warn("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat)); + queryLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); } else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold) { - queryLogger.info("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat)); + queryLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); } else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold) { - queryLogger.debug("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat)); + queryLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); } else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold) { - queryLogger.trace("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat)); + queryLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); } } @Override public void onFetchPhase(SearchContext context, long tookInNanos) { if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) { - fetchLogger.warn("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat)); + fetchLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); } else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold) { - fetchLogger.info("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat)); + fetchLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); } else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold) { - fetchLogger.debug("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat)); + fetchLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); } else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold) { - fetchLogger.trace("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat)); + fetchLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); } } static final class SlowLogSearchContextPrinter { private final SearchContext context; - private final Index index; private final long tookInNanos; private final boolean reformat; - public SlowLogSearchContextPrinter(Index index, SearchContext context, long tookInNanos, boolean reformat) { + public SlowLogSearchContextPrinter(SearchContext context, long tookInNanos, boolean reformat) { this.context = context; - this.index = index; this.tookInNanos = tookInNanos; this.reformat = reformat; } @@ -161,7 +156,7 @@ public final class SearchSlowLog implements SearchOperationListener { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append(index).append(" "); + sb.append(context.indexShard().shardId()).append(" "); sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); if (context.getQueryShardContext().getTypes() == null) { sb.append("types[], "); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 548bc91b0a5..119e0c16ea0 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -49,6 +49,9 @@ import static java.util.Collections.unmodifiableMap; * This class exists per node and allows to create per-index {@link AnalysisService} via {@link #build(IndexSettings)} */ public final class AnalysisRegistry implements Closeable { + public static final String INDEX_ANALYSIS_CHAR_FILTER = "index.analysis.char_filter"; + public static final String INDEX_ANALYSIS_FILTER = "index.analysis.filter"; + public static final String INDEX_ANALYSIS_TOKENIZER = "index.analysis.tokenizer"; private final PrebuiltAnalysis prebuiltAnalysis = new PrebuiltAnalysis(); private final Map cachedAnalyzer = new ConcurrentHashMap<>(); @@ -70,6 +73,20 @@ public final class AnalysisRegistry implements Closeable { this.analyzers = unmodifiableMap(analyzers); } + /** + * Returns a {@link Settings} by groupName from {@link IndexSettings} or a default {@link Settings} + * @param indexSettings an index settings + * @param groupName tokenizer/token filter/char filter name + * @return {@link Settings} + */ + public static Settings getSettingsFromIndexSettings(IndexSettings indexSettings, String groupName) { + Settings settings = indexSettings.getSettings().getAsSettings(groupName); + if (settings.isEmpty()) { + settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexSettings.getIndexVersionCreated()).build(); + } + return settings; + } + /** * Returns a registered {@link TokenizerFactory} provider by name or null if the tokenizer was not registered */ @@ -122,9 +139,9 @@ public final class AnalysisRegistry implements Closeable { * Creates an index-level {@link AnalysisService} from this registry using the given index settings */ public AnalysisService build(IndexSettings indexSettings) throws IOException { - final Map charFiltersSettings = indexSettings.getSettings().getGroups("index.analysis.char_filter"); - final Map tokenFiltersSettings = indexSettings.getSettings().getGroups("index.analysis.filter"); - final Map tokenizersSettings = indexSettings.getSettings().getGroups("index.analysis.tokenizer"); + final Map charFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_CHAR_FILTER); + final Map tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER); + final Map tokenizersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_TOKENIZER); final Map analyzersSettings = indexSettings.getSettings().getGroups("index.analysis.analyzer"); final Map charFilterFactories = buildMapping(false, "charfilter", indexSettings, charFiltersSettings, charFilters, prebuiltAnalysis.charFilterFactories); @@ -136,13 +153,76 @@ public final class AnalysisRegistry implements Closeable { * instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and * hide internal data-structures as much as possible. */ - tokenFilters.put("synonym", requriesAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, tokenizerFactories, name, settings))); + tokenFilters.put("synonym", requriesAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings))); final Map tokenFilterFactories = buildMapping(false, "tokenfilter", indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.tokenFilterFactories); final Map> analyzierFactories = buildMapping(true, "analyzer", indexSettings, analyzersSettings, analyzers, prebuiltAnalysis.analyzerProviderFactories); return new AnalysisService(indexSettings, analyzierFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories); } + /** + * Returns a registered {@link TokenizerFactory} provider by {@link IndexSettings} + * or a registered {@link TokenizerFactory} provider by predefined name + * or null if the tokenizer was not registered + * @param tokenizer global or defined tokenizer name + * @param indexSettings an index settings + * @return {@link TokenizerFactory} provider or null + */ + public AnalysisProvider getTokenizerProvider(String tokenizer, IndexSettings indexSettings) { + final Map tokenizerSettings = indexSettings.getSettings().getGroups("index.analysis.tokenizer"); + if (tokenizerSettings.containsKey(tokenizer)) { + Settings currentSettings = tokenizerSettings.get(tokenizer); + return getAnalysisProvider("tokenizer", tokenizers, tokenizer, currentSettings.get("type")); + } else { + return prebuiltAnalysis.tokenizerFactories.get(tokenizer); + } + } + + /** + * Returns a registered {@link TokenFilterFactory} provider by {@link IndexSettings} + * or a registered {@link TokenFilterFactory} provider by predefined name + * or null if the tokenFilter was not registered + * @param tokenFilter global or defined tokenFilter name + * @param indexSettings an index settings + * @return {@link TokenFilterFactory} provider or null + */ + public AnalysisProvider getTokenFilterProvider(String tokenFilter, IndexSettings indexSettings) { + final Map tokenFilterSettings = indexSettings.getSettings().getGroups("index.analysis.filter"); + if (tokenFilterSettings.containsKey(tokenFilter)) { + Settings currentSettings = tokenFilterSettings.get(tokenFilter); + String typeName = currentSettings.get("type"); + /* + * synonym is different than everything else since it needs access to the tokenizer factories for this index. + * instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and + * hide internal data-structures as much as possible. + */ + if ("synonym".equals(typeName)) { + return requriesAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings)); + } else { + return getAnalysisProvider("tokenfilter", tokenFilters, tokenFilter, typeName); + } + } else { + return prebuiltAnalysis.tokenFilterFactories.get(tokenFilter); + } + } + + /** + * Returns a registered {@link CharFilterFactory} provider by {@link IndexSettings} + * or a registered {@link CharFilterFactory} provider by predefined name + * or null if the charFilter was not registered + * @param charFilter global or defined charFilter name + * @param indexSettings an index settings + * @return {@link CharFilterFactory} provider or null + */ + public AnalysisProvider getCharFilterProvider(String charFilter, IndexSettings indexSettings) { + final Map tokenFilterSettings = indexSettings.getSettings().getGroups("index.analysis.char_filter"); + if (tokenFilterSettings.containsKey(charFilter)) { + Settings currentSettings = tokenFilterSettings.get(charFilter); + return getAnalysisProvider("charfilter", charFilters, charFilter, currentSettings.get("type")); + } else { + return prebuiltAnalysis.charFilterFactories.get(charFilter); + } + } private static AnalysisModule.AnalysisProvider requriesAnalysisSettings(AnalysisModule.AnalysisProvider provider) { return new AnalysisModule.AnalysisProvider() { @@ -185,13 +265,7 @@ public final class AnalysisRegistry implements Closeable { } factories.put(name, factory); } else { - if (typeName == null) { - throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer"); - } - AnalysisModule.AnalysisProvider type = providerMap.get(typeName); - if (type == null) { - throw new IllegalArgumentException("Unknown " + toBuild + " type [" + typeName + "] for [" + name + "]"); - } + AnalysisProvider type = getAnalysisProvider(toBuild, providerMap, name, typeName); final T factory = type.get(settings, environment, name, currentSettings); factories.put(name, factory); } @@ -232,6 +306,17 @@ public final class AnalysisRegistry implements Closeable { return factories; } + private AnalysisProvider getAnalysisProvider(String toBuild, Map> providerMap, String name, String typeName) { + if (typeName == null) { + throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer"); + } + AnalysisProvider type = providerMap.get(typeName); + if (type == null) { + throw new IllegalArgumentException("Unknown " + toBuild + " type [" + typeName + "] for [" + name + "]"); + } + return type; + } + private static class PrebuiltAnalysis implements Closeable { final Map>> analyzerProviderFactories; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java index 1cd3abb0cb3..8daff40332d 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java @@ -32,18 +32,18 @@ import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.indices.analysis.AnalysisModule; import java.io.IOException; import java.io.Reader; import java.util.List; -import java.util.Map; public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { private final SynonymMap synonymMap; private final boolean ignoreCase; - public SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, Map tokenizerFactories, + public SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, AnalysisRegistry analysisRegistry, String name, Settings settings) throws IOException { super(indexSettings, name, settings); @@ -65,11 +65,13 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { boolean expand = settings.getAsBoolean("expand", true); String tokenizerName = settings.get("tokenizer", "whitespace"); - final TokenizerFactory tokenizerFactory = tokenizerFactories.get(tokenizerName); - if (tokenizerFactory == null) { + AnalysisModule.AnalysisProvider tokenizerFactoryFactory = + analysisRegistry.getTokenizerProvider(tokenizerName, indexSettings); + if (tokenizerFactoryFactory == null) { throw new IllegalArgumentException("failed to find tokenizer [" + tokenizerName + "] for synonym token filter"); } - + final TokenizerFactory tokenizerFactory = tokenizerFactoryFactory.get(indexSettings, env, tokenizerName, + AnalysisRegistry.getSettingsFromIndexSettings(indexSettings, AnalysisRegistry.INDEX_ANALYSIS_TOKENIZER + "." + tokenizerName)); Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { diff --git a/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java index dd9daad1824..b3676b984c2 100644 --- a/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -50,8 +50,8 @@ import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.ParentFieldSubFetchPhase; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; diff --git a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 5416b554641..ff783883ff5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -34,11 +34,11 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsFetchSubPhase; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.InnerHitsContext; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; @@ -585,7 +585,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl for (ScriptField field : scriptFields) { SearchScript searchScript = innerHitsContext.scriptService().search(innerHitsContext.lookup(), field.script(), ScriptContext.Standard.SEARCH, Collections.emptyMap()); - innerHitsContext.scriptFields().add(new org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField( + innerHitsContext.scriptFields().add(new org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField( field.fieldName(), searchScript, field.ignoreFailure())); } } diff --git a/core/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/core/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index 877ec6cbc1d..861f61eb396 100644 --- a/core/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -37,7 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.highlight.Highlighter; +import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.suggest.Suggester; import java.util.List; @@ -143,9 +143,9 @@ public interface SearchPlugin { /** * Specification of search time behavior extension like a custom {@link MovAvgModel} or {@link ScoreFunction}. * - * @param W the type of the main {@link NamedWriteable} for this spec. All specs have this but it isn't always *for* the same thing + * @param the type of the main {@link NamedWriteable} for this spec. All specs have this but it isn't always *for* the same thing * though, usually it is some sort of builder sent from the coordinating node to the data nodes executing the behavior - * @param P the type of the parser for this spec. The parser runs on the coordinating node, converting {@link XContent} into the + * @param

the type of the parser for this spec. The parser runs on the coordinating node, converting {@link XContent} into the * behavior to execute */ class SearchExtensionSpec { diff --git a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java index 19f55f4cd46..164e3e2a5f6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java @@ -39,7 +39,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java index 3cc7d8fd1ae..c05e7cece24 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java @@ -35,7 +35,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java index 9e9b54b5b0c..2cb5fa330db 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java @@ -33,7 +33,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java index de4f433dcbf..ce779fd5569 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java @@ -31,7 +31,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestToXContentListener; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index c34a9391d8a..7a73d306221 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -43,7 +43,7 @@ import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; diff --git a/core/src/main/java/org/elasticsearch/search/SearchHit.java b/core/src/main/java/org/elasticsearch/search/SearchHit.java index b149a4162fe..c9ccddd05e6 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/SearchHit.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.search.highlight.HighlightField; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import java.util.Map; diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 9bd79bbfc47..5a5137ea571 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -19,13 +19,6 @@ package org.elasticsearch.search; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; -import java.util.function.Consumer; -import java.util.function.Function; - import org.apache.lucene.search.BooleanQuery; import org.elasticsearch.common.NamedRegistry; import org.elasticsearch.common.ParseField; @@ -253,18 +246,18 @@ import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipel import org.elasticsearch.search.controller.SearchPhaseController; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsFetchSubPhase; -import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase; -import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase; -import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase; -import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase; -import org.elasticsearch.search.fetch.source.FetchSourceSubPhase; -import org.elasticsearch.search.fetch.version.VersionFetchSubPhase; -import org.elasticsearch.search.highlight.FastVectorHighlighter; -import org.elasticsearch.search.highlight.HighlightPhase; -import org.elasticsearch.search.highlight.Highlighter; -import org.elasticsearch.search.highlight.PlainHighlighter; -import org.elasticsearch.search.highlight.PostingsHighlighter; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.ExplainFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.FetchSourceSubPhase; +import org.elasticsearch.search.fetch.subphase.MatchedQueriesFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.ParentFieldSubFetchPhase; +import org.elasticsearch.search.fetch.subphase.ScriptFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.VersionFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase; +import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; +import org.elasticsearch.search.fetch.subphase.highlight.PlainHighlighter; +import org.elasticsearch.search.fetch.subphase.highlight.PostingsHighlighter; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -283,6 +276,13 @@ import org.elasticsearch.search.suggest.phrase.SmoothingModel; import org.elasticsearch.search.suggest.phrase.StupidBackoff; import org.elasticsearch.search.suggest.term.TermSuggester; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.function.Consumer; +import java.util.function.Function; + import static java.util.Collections.unmodifiableMap; import static java.util.Objects.requireNonNull; @@ -361,7 +361,7 @@ public class SearchModule extends AbstractModule { /** * The registry of {@link MovAvgModel}s. */ - public ParseFieldRegistry getMovingAverageMdelParserRegistry() { + public ParseFieldRegistry getMovingAverageModelParserRegistry() { return movingAverageModelParserRegistry; } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 4d618eb057a..5fd0a1417b1 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -67,11 +67,11 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext.DocValueField; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsFetchSubPhase; -import org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField; +import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.DefaultSearchContext; import org.elasticsearch.search.internal.InternalScrollSearchRequest; import org.elasticsearch.search.internal.ScrollContext; @@ -269,7 +269,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv loadOrExecuteQueryPhase(request, context); - if (hasHits(context.queryResult()) == false && context.scrollContext() == null) { + if (context.queryResult().hasHits() == false && context.scrollContext() == null) { freeContext(context.id()); } else { contextProcessedSuccessfully(context); @@ -324,7 +324,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv operationListener.onPreQueryPhase(context); long time = System.nanoTime(); queryPhase.execute(context); - if (hasHits(context.queryResult()) == false && context.scrollContext() == null) { + if (context.queryResult().hasHits() == false && context.scrollContext() == null) { // no hits, we can release the context since there will be no fetch phase freeContext(context.id()); } else { @@ -861,11 +861,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); } - private static boolean hasHits(final QuerySearchResult searchResult) { - return searchResult.topDocs().scoreDocs.length > 0 || - (searchResult.suggest() != null && searchResult.suggest().hasScoreDocs()); - } - private void processScroll(InternalScrollSearchRequest request, SearchContext context) { // process scroll context.from(context.from() + context.size()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java index ffa75472990..b95f4154cb7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java @@ -38,8 +38,8 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java index a3614889bdf..70a92695473 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java @@ -28,11 +28,11 @@ import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext.DocValueField; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsFetchSubPhase; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SubSearchContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; @@ -107,7 +107,7 @@ public class TopHitsAggregatorFactory extends AggregatorFactory entry : results) { - if (entry.value.queryResult().topDocs().scoreDocs.length > 0) { + if (entry.value.queryResult().hasHits()) { if (result != null) { // we already have one, can't really optimize canOptimize = false; break; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 81da15e087c..b292a2e800b 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -45,8 +45,8 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchPhase; -import org.elasticsearch.search.fetch.innerhits.InnerHitsFetchSubPhase; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHitField; import org.elasticsearch.search.internal.InternalSearchHits; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java index dec22308229..8efb995926a 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java @@ -31,7 +31,7 @@ import java.util.HashMap; import java.util.Map; /** - * Sub phase within the fetch phase used to fetch things *about* the documents highlghting. + * Sub phase within the fetch phase used to fetch things *about* the documents like highlghting or matched queries. */ public interface FetchSubPhase { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseContext.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseContext.java index c557208ac73..856c0ad902f 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseContext.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.fetch; -import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; /** * All configuration and context needed by the FetchSubPhase to execute on hits. diff --git a/core/src/main/java/org/elasticsearch/search/fetch/package-info.java b/core/src/main/java/org/elasticsearch/search/fetch/package-info.java new file mode 100644 index 00000000000..30c4404e557 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/fetch/package-info.java @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Search phase that fetches the top hits from the shards after the results of the query phase have been merged. Pluggable by implementing + * {@link org.elasticsearch.search.fetch.FetchSubPhase} and + * {@link org.elasticsearch.plugins.SearchPlugin#getFetchSubPhases(org.elasticsearch.plugins.SearchPlugin.FetchPhaseConstructionContext)}. + */ +package org.elasticsearch.search.fetch; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/docvalues/DocValueFieldsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsContext.java similarity index 96% rename from core/src/main/java/org/elasticsearch/search/fetch/docvalues/DocValueFieldsContext.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsContext.java index 27cde1f4df5..54185734f97 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/docvalues/DocValueFieldsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsContext.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.fetch.docvalues; +package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.search.fetch.FetchSubPhaseContext; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/docvalues/DocValueFieldsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/fetch/docvalues/DocValueFieldsFetchSubPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java index 9946920bc62..803cbb4348f 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/docvalues/DocValueFieldsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.fetch.docvalues; +package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java similarity index 90% rename from core/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java index e560b815d5a..4ad7db0c757 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java @@ -16,19 +16,18 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.fetch.explain; +package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.search.Explanation; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.rescore.RescoreSearchContext; import java.io.IOException; /** - * + * Explains the scoring calculations for the top hits. */ public final class ExplainFetchSubPhase implements FetchSubPhase { @@ -47,7 +46,8 @@ public final class ExplainFetchSubPhase implements FetchSubPhase { // we use the top level doc id, since we work with the top level searcher hitContext.hit().explanation(explanation); } catch (IOException e) { - throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#" + hitContext.hit().id() + "]", e); + throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#" + + hitContext.hit().id() + "]", e); } finally { context.clearReleasables(SearchContext.Lifetime.COLLECTION); } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/source/FetchSourceContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java similarity index 95% rename from core/src/main/java/org/elasticsearch/search/fetch/source/FetchSourceContext.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java index 658bb1d72ee..864de1628a7 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/source/FetchSourceContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.fetch.source; +package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.ParseField; @@ -38,6 +38,7 @@ import java.util.Arrays; import java.util.List; /** + * Context used to fetch the {@code _source}. */ public class FetchSourceContext implements Streamable, ToXContent { @@ -175,8 +176,8 @@ public class FetchSourceContext implements Streamable, ToXContent { if (token == XContentParser.Token.VALUE_STRING) { includesList.add(parser.text()); } else { - throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation()); + throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } includes = includesList.toArray(new String[includesList.size()]); @@ -186,14 +187,14 @@ public class FetchSourceContext implements Streamable, ToXContent { if (token == XContentParser.Token.VALUE_STRING) { excludesList.add(parser.text()); } else { - throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation()); + throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } excludes = excludesList.toArray(new String[excludesList.size()]); } else { - throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation()); + throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + + " in [" + currentFieldName + "].", parser.getTokenLocation()); } } else if (token == XContentParser.Token.VALUE_STRING) { if (context.getParseFieldMatcher().match(currentFieldName, INCLUDES_FIELD)) { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/source/FetchSourceSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/fetch/source/FetchSourceSubPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java index 900402bda13..7ba24442a7f 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/source/FetchSourceSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.fetch.source; +package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.BytesStreamOutput; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java similarity index 99% rename from core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java index 9d80c40e7d3..cbcab099765 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.fetch.innerhits; +package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java index d9ebc77ec07..23c63bc7eef 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.fetch.innerhits; +package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java index 17f5e5ac705..56223b1ec46 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.fetch.matchedqueries; +package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java index 8d6bcd1af6e..47f78c6ce53 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.fetch.parent; +package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.SortedDocValues; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsContext.java similarity index 97% rename from core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsContext.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsContext.java index 9cf680d228a..c886a3a157f 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsContext.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.fetch.script; +package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.script.SearchScript; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsFetchSubPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java index 19a7631ccba..80638860f6c 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.fetch.script; +package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.search.SearchHitField; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/version/VersionFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java similarity index 97% rename from core/src/main/java/org/elasticsearch/search/fetch/version/VersionFetchSubPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java index 0ee1112d9b9..884cf6d2bb6 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/version/VersionFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.fetch.version; +package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.ElasticsearchException; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java similarity index 99% rename from core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index 816be79a9e5..72bd436a88c 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.highlight.SimpleFragmenter; import org.apache.lucene.search.highlight.SimpleSpanFragmenter; @@ -32,7 +32,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.highlight.HighlightBuilder.Order; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Order; import java.io.IOException; import java.util.Arrays; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java index d2fe1692027..b62d28f8ab4 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/CustomQueryScorer.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.Query; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java similarity index 77% rename from core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java index 3d08019539c..873567de44e 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.vectorhighlight.BaseFragmentsBuilder; @@ -37,9 +37,6 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.highlight.vectorhighlight.SimpleFragmentsBuilder; -import org.elasticsearch.search.highlight.vectorhighlight.SourceScoreOrderFragmentsBuilder; -import org.elasticsearch.search.highlight.vectorhighlight.SourceSimpleFragmentsBuilder; import org.elasticsearch.search.internal.SearchContext; import java.util.Collections; @@ -68,7 +65,8 @@ public class FastVectorHighlighter implements Highlighter { FieldMapper mapper = highlighterContext.mapper; if (canHighlight(mapper) == false) { - throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + "] should be indexed with term vector with position offsets to be used with fast vector highlighter"); + throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + + "] should be indexed with term vector with position offsets to be used with fast vector highlighter"); } Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; @@ -82,14 +80,22 @@ public class FastVectorHighlighter implements Highlighter { FieldQuery fieldQuery; if (field.fieldOptions().requireFieldMatch()) { if (cache.fieldMatchFieldQuery == null) { - // we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...) - cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); + /* + * we use top level reader to rewrite the query against all readers, with use caching it across hits (and across + * readers...) + */ + cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), + true, field.fieldOptions().requireFieldMatch()); } fieldQuery = cache.fieldMatchFieldQuery; } else { if (cache.noFieldMatchFieldQuery == null) { - // we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...) - cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); + /* + * we use top level reader to rewrite the query against all readers, with use caching it across hits (and across + * readers...) + */ + cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), + true, field.fieldOptions().requireFieldMatch()); } fieldQuery = cache.noFieldMatchFieldQuery; } @@ -100,31 +106,40 @@ public class FastVectorHighlighter implements Highlighter { BaseFragmentsBuilder fragmentsBuilder; BoundaryScanner boundaryScanner = DEFAULT_BOUNDARY_SCANNER; - if (field.fieldOptions().boundaryMaxScan() != SimpleBoundaryScanner.DEFAULT_MAX_SCAN || field.fieldOptions().boundaryChars() != SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) { - boundaryScanner = new SimpleBoundaryScanner(field.fieldOptions().boundaryMaxScan(), field.fieldOptions().boundaryChars()); + if (field.fieldOptions().boundaryMaxScan() != SimpleBoundaryScanner.DEFAULT_MAX_SCAN + || field.fieldOptions().boundaryChars() != SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) { + boundaryScanner = new SimpleBoundaryScanner(field.fieldOptions().boundaryMaxScan(), + field.fieldOptions().boundaryChars()); } boolean forceSource = context.highlight().forceSource(field); if (field.fieldOptions().numberOfFragments() == 0) { fragListBuilder = new SingleFragListBuilder(); if (!forceSource && mapper.fieldType().stored()) { - fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); + fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), boundaryScanner); } else { - fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); + fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, + field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } } else { - fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ? new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset()); + fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ? + new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset()); if (field.fieldOptions().scoreOrdered()) { if (!forceSource && mapper.fieldType().stored()) { - fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); + fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(), + field.fieldOptions().postTags(), boundaryScanner); } else { - fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); + fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context, + field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } } else { if (!forceSource && mapper.fieldType().stored()) { - fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); + fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), boundaryScanner); } else { - fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); + fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), boundaryScanner); } } } @@ -146,16 +161,20 @@ public class FastVectorHighlighter implements Highlighter { String[] fragments; // a HACK to make highlighter do highlighting, even though its using the single frag list builder - int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? Integer.MAX_VALUE : field.fieldOptions().numberOfFragments(); - int fragmentCharSize = field.fieldOptions().numberOfFragments() == 0 ? Integer.MAX_VALUE : field.fieldOptions().fragmentCharSize(); + int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? + Integer.MAX_VALUE : field.fieldOptions().numberOfFragments(); + int fragmentCharSize = field.fieldOptions().numberOfFragments() == 0 ? + Integer.MAX_VALUE : field.fieldOptions().fragmentCharSize(); // we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible // Only send matched fields if they were requested to save time. if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), field.fieldOptions().matchedFields(), fragmentCharSize, - numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), + field.fieldOptions().matchedFields(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, + entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } else { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), fragmentCharSize, - numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), + fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), + field.fieldOptions().postTags(), encoder); } if (fragments != null && fragments.length > 0) { @@ -183,14 +202,13 @@ public class FastVectorHighlighter implements Highlighter { @Override public boolean canHighlight(FieldMapper fieldMapper) { - return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets() && fieldMapper.fieldType().storeTermVectorPositions(); + return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets() + && fieldMapper.fieldType().storeTermVectorPositions(); } private class MapperHighlightEntry { public FragListBuilder fragListBuilder; public FragmentsBuilder fragmentsBuilder; - - public org.apache.lucene.search.highlight.Highlighter highlighter; } private class HighlighterEntry { diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java index b9ae34b60b0..ac0dab3a638 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.highlight.vectorhighlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Field; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java index 92bdf6e8aa8..fe4587826c7 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.Query; import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner; @@ -32,7 +32,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions; import java.io.IOException; import java.util.ArrayList; @@ -279,7 +279,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder fieldOptions = new ArrayList<>(); + Collection fieldOptions = new ArrayList<>(); for (Field field : this.fields) { final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder(); fieldOptionsBuilder.fragmentOffset(field.fragmentOffset); diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java similarity index 98% rename from core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java index 30530b697f3..91fde32c888 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightField.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightField.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 5480291b973..84890857c79 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.Query; import org.elasticsearch.common.component.AbstractComponent; @@ -67,7 +67,8 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { if (context.highlight().forceSource(field)) { SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).sourceMapper(); if (!sourceFieldMapper.enabled()) { - throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + " but type [" + hitContext.hit().type() + "] has disabled _source"); + throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + + " but type [" + hitContext.hit().type() + "] has disabled _source"); } } @@ -105,11 +106,16 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { } Highlighter highlighter = highlighters.get(highlighterType); if (highlighter == null) { - throw new IllegalArgumentException("unknown highlighter type [" + highlighterType + "] for the field [" + fieldName + "]"); + throw new IllegalArgumentException("unknown highlighter type [" + highlighterType + + "] for the field [" + fieldName + "]"); } - Query highlightQuery = field.fieldOptions().highlightQuery() == null ? context.parsedQuery().query() : field.fieldOptions().highlightQuery(); - HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context, hitContext, highlightQuery); + Query highlightQuery = field.fieldOptions().highlightQuery(); + if (highlightQuery == null) { + highlightQuery = context.parsedQuery().query(); + } + HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context, + hitContext, highlightQuery); if ((highlighter.canHighlight(fieldMapper) == false) && fieldNameContainsWildcards) { // if several fieldnames matched the wildcard then we want to skip those that we cannot highlight diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java similarity index 86% rename from core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java index 16f5b94fdc5..4a6e991b9a3 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.highlight.DefaultEncoder; import org.apache.lucene.search.highlight.Encoder; @@ -43,7 +43,11 @@ public final class HighlightUtils { } - static List loadFieldValues(SearchContextHighlight.Field field, FieldMapper mapper, SearchContext searchContext, FetchSubPhase.HitContext hitContext) throws IOException { + /** + * Load field values for highlighting. + */ + public static List loadFieldValues(SearchContextHighlight.Field field, FieldMapper mapper, SearchContext searchContext, + FetchSubPhase.HitContext hitContext) throws IOException { //percolator needs to always load from source, thus it sets the global force source to true boolean forceSource = searchContext.highlight().forceSource(field); List textsToHighlight; @@ -65,7 +69,7 @@ public final class HighlightUtils { } static class Encoders { - static Encoder DEFAULT = new DefaultEncoder(); - static Encoder HTML = new SimpleHTMLEncoder(); + static final Encoder DEFAULT = new DefaultEncoder(); + static final Encoder HTML = new SimpleHTMLEncoder(); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/Highlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java similarity index 94% rename from core/src/main/java/org/elasticsearch/search/highlight/Highlighter.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java index f96536b2aff..ab76da6e726 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/Highlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.elasticsearch.index.mapper.FieldMapper; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java similarity index 96% rename from core/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java index e791aad4310..7b9526d152f 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.Query; import org.elasticsearch.index.mapper.FieldMapper; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java similarity index 94% rename from core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index 4bd27e11795..631d716f6f7 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; @@ -68,11 +68,13 @@ public class PlainHighlighter implements Highlighter { hitContext.cache().put(CACHE_KEY, mappers); } @SuppressWarnings("unchecked") - Map cache = (Map) hitContext.cache().get(CACHE_KEY); + Map cache = + (Map) hitContext.cache().get(CACHE_KEY); org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper); if (entry == null) { - QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.fieldType().name() : null); + QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, + field.fieldOptions().requireFieldMatch() ? mapper.fieldType().name() : null); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; if (field.fieldOptions().numberOfFragments() == 0) { @@ -84,7 +86,8 @@ public class PlainHighlighter implements Highlighter { } else if ("span".equals(field.fieldOptions().fragmenter())) { fragmenter = new SimpleSpanFragmenter(queryScorer, field.fieldOptions().fragmentCharSize()); } else { - throw new IllegalArgumentException("unknown fragmenter option [" + field.fieldOptions().fragmenter() + "] for the field [" + highlighterContext.fieldName + "]"); + throw new IllegalArgumentException("unknown fragmenter option [" + field.fieldOptions().fragmenter() + + "] for the field [" + highlighterContext.fieldName + "]"); } Formatter formatter = new SimpleHTMLFormatter(field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0]); @@ -181,7 +184,8 @@ public class PlainHighlighter implements Highlighter { return true; } - private static int findGoodEndForNoHighlightExcerpt(int noMatchSize, Analyzer analyzer, String fieldName, String contents) throws IOException { + private static int findGoodEndForNoHighlightExcerpt(int noMatchSize, Analyzer analyzer, String fieldName, String contents) + throws IOException { try (TokenStream tokenStream = analyzer.tokenStream(fieldName, contents)) { if (!tokenStream.hasAttribute(OffsetAttribute.class)) { // Can't split on term boundaries without offsets diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java index 51c460c5c68..7ed50c7a1dd 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.IndexOptions; @@ -33,6 +33,7 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils.Encoders; import java.io.IOException; import java.text.BreakIterator; @@ -53,7 +54,8 @@ public class PostingsHighlighter implements Highlighter { FieldMapper fieldMapper = highlighterContext.mapper; SearchContextHighlight.Field field = highlighterContext.field; if (canHighlight(fieldMapper) == false) { - throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + "] should be indexed with positions and offsets in the postings list to be used with postings highlighter"); + throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + + "] should be indexed with positions and offsets in the postings list to be used with postings highlighter"); } SearchContext context = highlighterContext.context; @@ -67,8 +69,9 @@ public class PostingsHighlighter implements Highlighter { MapperHighlighterEntry mapperHighlighterEntry = highlighterEntry.mappers.get(fieldMapper); if (mapperHighlighterEntry == null) { - Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter(field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0], encoder); + Encoder encoder = field.fieldOptions().encoder().equals("html") ? Encoders.HTML : Encoders.DEFAULT; + CustomPassageFormatter passageFormatter = new CustomPassageFormatter( + field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0], encoder); mapperHighlighterEntry = new MapperHighlighterEntry(passageFormatter); } @@ -83,17 +86,20 @@ public class PostingsHighlighter implements Highlighter { //so we don't lose the distinction between the different values of a field and we get back a snippet per value String fieldValue = mergeFieldValues(fieldValues, HighlightUtils.NULL_SEPARATOR); CustomSeparatorBreakIterator breakIterator = new CustomSeparatorBreakIterator(HighlightUtils.NULL_SEPARATOR); - highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter, breakIterator, fieldValue, field.fieldOptions().noMatchSize() > 0); + highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter, breakIterator, + fieldValue, field.fieldOptions().noMatchSize() > 0); numberOfFragments = fieldValues.size(); //we are highlighting the whole content, one snippet per value } else { //using paragraph separator we make sure that each field value holds a discrete passage for highlighting String fieldValue = mergeFieldValues(fieldValues, HighlightUtils.PARAGRAPH_SEPARATOR); - highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter, fieldValue, field.fieldOptions().noMatchSize() > 0); + highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter, + fieldValue, field.fieldOptions().noMatchSize() > 0); numberOfFragments = field.fieldOptions().numberOfFragments(); } IndexSearcher searcher = new IndexSearcher(hitContext.reader()); - Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().name(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); + Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().name(), highlighterContext.query, searcher, + hitContext.docId(), numberOfFragments); for (Snippet fieldSnippet : fieldSnippets) { if (Strings.hasText(fieldSnippet.getText())) { snippets.add(fieldSnippet); diff --git a/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SearchContextHighlight.java similarity index 99% rename from core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SearchContextHighlight.java index 26f638b15a9..9f2074d7412 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SearchContextHighlight.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.Query; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SimpleFragmentsBuilder.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java similarity index 92% rename from core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SimpleFragmentsBuilder.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java index 271b0c7acf5..c1e74bd00bc 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SimpleFragmentsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight.vectorhighlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.document.Field; import org.apache.lucene.search.highlight.Encoder; @@ -40,6 +40,7 @@ public class SimpleFragmentsBuilder extends org.apache.lucene.search.vectorhighl @Override protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo, String[] preTags, String[] postTags, Encoder encoder ){ - return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo), preTags, postTags, encoder); + return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo), + preTags, postTags, encoder); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java similarity index 94% rename from core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java index 1220a76c120..e46cda49290 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight.vectorhighlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.document.Field; import org.apache.lucene.document.TextField; @@ -66,6 +66,7 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder @Override protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo, String[] preTags, String[] postTags, Encoder encoder ){ - return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo), preTags, postTags, encoder); + return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo), + preTags, postTags, encoder); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java similarity index 97% rename from core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java rename to core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java index 7ec6fe457de..4ff52547c7d 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight.vectorhighlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.document.Field; import org.apache.lucene.document.TextField; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/package-info.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/package-info.java new file mode 100644 index 00000000000..0e0daf66708 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/package-info.java @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Fetch sub phase that extracts significant portions of string fields, marking the matches. Pluggable by implementing + * {@link org.elasticsearch.search.fetch.subphase.highlight.Highlighter} and + * {@link org.elasticsearch.plugins.SearchPlugin#getHighlighters()}. + */ +package org.elasticsearch.search.fetch.subphase.highlight; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/package-info.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/package-info.java new file mode 100644 index 00000000000..167ed4aa132 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Built in {@link org.elasticsearch.search.fetch.FetchSubPhase}s like matched queries and fetching {@code _source}. + */ +package org.elasticsearch.search.fetch.subphase; diff --git a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 50e91e082cd..8d33140e3ee 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -133,6 +133,10 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { @Override public Explanation explain(Query query, int doc) throws IOException { + if (aggregatedDfs != null) { + // dfs data is needed to explain the score + return super.explain(createNormalizedWeight(query, true), doc); + } return in.explain(query, doc); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 5e1b1ec9790..131849ce3e5 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -60,9 +60,9 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseContext; -import org.elasticsearch.search.fetch.script.ScriptFieldsContext; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.SearchContextHighlight; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QueryPhaseExecutionException; diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index d187c88b4c9..9e132e40137 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -45,10 +45,10 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseContext; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.fetch.script.ScriptFieldsContext; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.SearchContextHighlight; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.InnerHitsContext; +import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index e1d46dd5fd2..9f5054dccd7 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -38,7 +38,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.highlight.HighlightField; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.internal.InternalSearchHits.StreamContext.ShardTargetType; import org.elasticsearch.search.lookup.SourceLookup; @@ -55,7 +55,7 @@ import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.lucene.Lucene.readExplanation; import static org.elasticsearch.common.lucene.Lucene.writeExplanation; -import static org.elasticsearch.search.highlight.HighlightField.readHighlightField; +import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.readHighlightField; import static org.elasticsearch.search.internal.InternalSearchHitField.readSearchHitField; /** diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index d8679d3fbbd..0c257191c2f 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -49,10 +49,10 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseContext; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.fetch.script.ScriptFieldsContext; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.SearchContextHighlight; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.InnerHitsContext; +import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; diff --git a/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index 9276e86edda..2116300c19f 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -24,9 +24,9 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.fetch.FetchSearchResult; -import org.elasticsearch.search.fetch.script.ScriptFieldsContext; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.SearchContextHighlight; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index 5125950a412..e583cfbf13e 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -188,6 +188,12 @@ public class QuerySearchResult extends QuerySearchResultProvider { return this; } + /** Returns true iff the result has hits */ + public boolean hasHits() { + return (topDocs != null && topDocs.scoreDocs.length > 0) || + (suggest != null && suggest.hasScoreDocs()); + } + public static QuerySearchResult readQuerySearchResult(StreamInput in) throws IOException { QuerySearchResult result = new QuerySearchResult(); result.readFrom(in); diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 40c805e0b00..3ca80155270 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -117,6 +117,9 @@ public class TribeService extends AbstractLifecycleComponent { sb.put(Node.NODE_MASTER_SETTING.getKey(), false); sb.put(Node.NODE_DATA_SETTING.getKey(), false); sb.put(Node.NODE_INGEST_SETTING.getKey(), false); + if (!NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.exists(settings)) { + sb.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), nodesSettings.size()); + } sb.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local"); // a tribe node should not use zen discovery // nothing is going to be discovered, since no master will be elected sb.put(DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); diff --git a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighterTests.java b/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighterTests.java index 737b3df41ac..c95819a0e53 100644 --- a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighterTests.java +++ b/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighterTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.highlight.DefaultEncoder; import org.apache.lucene.store.Directory; -import org.elasticsearch.search.highlight.HighlightUtils; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomSeparatorBreakIteratorTests.java b/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomSeparatorBreakIteratorTests.java index ac3a24346ac..17aeb869c1a 100644 --- a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomSeparatorBreakIteratorTests.java +++ b/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomSeparatorBreakIteratorTests.java @@ -19,7 +19,7 @@ package org.apache.lucene.search.postingshighlight; -import org.elasticsearch.search.highlight.HighlightUtils; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils; import org.elasticsearch.test.ESTestCase; import java.text.BreakIterator; diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 7806e575629..6919db1b733 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -51,12 +52,21 @@ public class TransportAnalyzeActionTests extends ESTestCase { Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) .put("index.analysis.filter.wordDelimiter.type", "word_delimiter") .put("index.analysis.filter.wordDelimiter.split_on_numerics", false) .put("index.analysis.analyzer.custom_analyzer.tokenizer", "whitespace") .putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter") .put("index.analysis.analyzer.custom_analyzer.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter").build(); + .putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter") + .put("index.analysis.tokenizer.trigram.type", "ngram") + .put("index.analysis.tokenizer.trigram.min_gram", 3) + .put("index.analysis.tokenizer.trigram.max_gram", 3) + .put("index.analysis.filter.synonym.type", "synonym") + .putArray("index.analysis.filter.synonym.synonyms", "kimchy => shay") + .put("index.analysis.filter.synonym.tokenizer", "trigram") + .put("index.analysis.filter.synonym.min_gram", 3) + .put("index.analysis.filter.synonym.max_gram", 3).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); environment = new Environment(settings); registry = new AnalysisModule(environment, emptyList()).getAnalysisRegistry(); @@ -168,6 +178,16 @@ public class TransportAnalyzeActionTests extends ESTestCase { assertEquals("brown", tokens.get(2).getTerm()); assertEquals("fox", tokens.get(3).getTerm()); assertEquals("dog", tokens.get(4).getTerm()); + + request.analyzer(null); + request.tokenizer("trigram"); + request.addTokenFilter("synonym"); + request.text("kimchy"); + analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, analysisService, registry, environment); + tokens = analyze.getTokens(); + assertEquals(2, tokens.size()); + assertEquals("sha", tokens.get(0).getTerm()); + assertEquals("hay", tokens.get(1).getTerm()); } public void testGetIndexAnalyserWithoutAnalysisService() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java index ef259463139..dab737cf7f5 100644 --- a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.get; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.VersionType; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.test.ESTestCase; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index e1f5a1a719b..3cafff08a07 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.store.Store; @@ -118,6 +119,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() // manual collection or upon cluster forming. + .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 2) .put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.getKey(), "1s") .build(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index ed91d98e532..354b18d0b2a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -374,50 +374,59 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).size(), equalTo(1)); - logger.info("--> cancel the move of the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node3", false)), false, false); - clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); - assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1)); + if (randomBoolean()) { + logger.info("--> cancel the primary allocation (with allow_primary set to true)"); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", true)), false, false); + clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); + assertThat(rerouteResult.changed(), equalTo(true)); + assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(0)); + assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).iterator().next().primary(), equalTo(true)); + assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); + } else { + logger.info("--> cancel the move of the replica shard"); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node3", false)), false, false); + clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); + assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1)); - logger.info("--> move the replica shard again"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, "node2", "node3")), false, false); - clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); - assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(RELOCATING).size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).size(), equalTo(1)); + logger.info("--> move the replica shard again"); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, "node2", "node3")), false, false); + clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); + assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(RELOCATING).size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).size(), equalTo(1)); - logger.info("--> cancel the source replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false)), false, false); - clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); - assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(0)); - assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).get(0).relocatingNodeId(), nullValue()); + logger.info("--> cancel the source replica shard"); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false)), false, false); + clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); + assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(0)); + assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).get(0).relocatingNodeId(), nullValue()); - logger.info("--> start the former target replica shard"); - rerouteResult = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); - clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); - assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); - assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(0)); - assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(STARTED).size(), equalTo(1)); + logger.info("--> start the former target replica shard"); + rerouteResult = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); + clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); + assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); + assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(0)); + assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(STARTED).size(), equalTo(1)); - - logger.info("--> cancel the primary allocation (with allow_primary set to true)"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", true)), false, false); - clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); - assertThat(rerouteResult.changed(), equalTo(true)); - assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(STARTED).iterator().next().primary(), equalTo(true)); - assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(0)); - assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(0)); + logger.info("--> cancel the primary allocation (with allow_primary set to true)"); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", true)), false, false); + clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); + assertThat(rerouteResult.changed(), equalTo(true)); + assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(STARTED).iterator().next().primary(), equalTo(true)); + assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(0)); + assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(0)); + } } public void testSerialization() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index ab42abd4aac..9acb1f738bc 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -55,6 +55,7 @@ import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.discovery.zen.ping.ZenPingService; import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.indices.store.IndicesStoreIntegrationIT; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -207,6 +208,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // TODO: Rarely use default settings form some of these Settings nodeSettings = Settings.builder() .put(settings) + .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numberOfNodes) .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minimumMasterNode) .build(); diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 96c52d9dc8e..ee403bfe910 100644 --- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -74,18 +74,14 @@ public class NodeEnvironmentTests extends ESTestCase { } public void testNodeLockSingleEnvironment() throws IOException { - final Settings settings = buildEnvSettings(Settings.builder() - .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 1).build()); + final Settings settings = buildEnvSettings(Settings.builder().put("node.max_local_storage_nodes", 1).build()); NodeEnvironment env = newNodeEnvironment(settings); List dataPaths = Environment.PATH_DATA_SETTING.get(settings); - try { - // Reuse the same location and attempt to lock again - new NodeEnvironment(settings, new Environment(settings)); - fail("env has already locked all the data directories it is allowed"); - } catch (IllegalStateException ex) { - assertThat(ex.getMessage(), containsString("Failed to obtain node lock")); - } + // Reuse the same location and attempt to lock again + IllegalStateException ex = + expectThrows(IllegalStateException.class, () -> new NodeEnvironment(settings, new Environment(settings))); + assertThat(ex.getMessage(), containsString("failed to obtain node lock")); // Close the environment that holds the lock and make sure we can get the lock after release env.close(); @@ -121,7 +117,7 @@ public class NodeEnvironmentTests extends ESTestCase { } public void testNodeLockMultipleEnvironment() throws IOException { - final Settings settings = buildEnvSettings(Settings.EMPTY); + final Settings settings = buildEnvSettings(Settings.builder().put("node.max_local_storage_nodes", 2).build()); final NodeEnvironment first = newNodeEnvironment(settings); List dataPaths = Environment.PATH_DATA_SETTING.get(settings); NodeEnvironment second = new NodeEnvironment(settings, new Environment(settings)); diff --git a/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index d2bffb0f749..062f8c01f42 100644 --- a/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -41,7 +41,6 @@ import java.io.IOException; import static org.hamcrest.Matchers.startsWith; - public class SearchSlowLogTests extends ESSingleNodeTestCase { @Override protected SearchContext createSearchContext(IndexService indexService) { @@ -54,7 +53,7 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase { return new ShardSearchRequest() { @Override public ShardId shardId() { - return null; + return new ShardId(indexService.index(), 0); } @Override @@ -129,8 +128,8 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase { IndexService index = createIndex("foo"); // Turning off document logging doesn't log source[] SearchContext searchContext = createSearchContext(index); - SearchSlowLog.SlowLogSearchContextPrinter p = new SearchSlowLog.SlowLogSearchContextPrinter(index.index(), searchContext, 10, true); - assertThat(p.toString(), startsWith(index.index().toString())); + SearchSlowLog.SlowLogSearchContextPrinter p = new SearchSlowLog.SlowLogSearchContextPrinter(searchContext, 10, true); + assertThat(p.toString(), startsWith("[foo][0]")); } public void testReformatSetting() { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java index eae136cb645..342fa247d73 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index 3ec16948450..d26dd477a85 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -42,7 +42,7 @@ import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; diff --git a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java index b3002848693..14dedabd4bd 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index e814d2970cb..cde6500d605 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -51,8 +51,8 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.HighlightBuilderTests; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index c59fb147902..f122d66ebe3 100644 --- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; @@ -27,7 +28,7 @@ import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java index 02eab6dc0aa..627eb74007b 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.indices.memory.breaker; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -40,6 +41,7 @@ public class CircuitBreakerNoopIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() + .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 2) .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), "noop") // This is set low, because if the "noop" is not a noop, it will break .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") diff --git a/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java b/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java index 9841b5ba2b3..15fac5ab4c2 100644 --- a/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java +++ b/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 0a69622c9b8..d7bc1645c33 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -36,12 +36,12 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase; -import org.elasticsearch.search.highlight.CustomHighlighter; -import org.elasticsearch.search.highlight.FastVectorHighlighter; -import org.elasticsearch.search.highlight.Highlighter; -import org.elasticsearch.search.highlight.PlainHighlighter; -import org.elasticsearch.search.highlight.PostingsHighlighter; +import org.elasticsearch.search.fetch.subphase.ExplainFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.highlight.CustomHighlighter; +import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; +import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; +import org.elasticsearch.search.fetch.subphase.highlight.PlainHighlighter; +import org.elasticsearch.search.fetch.subphase.highlight.PostingsHighlighter; import org.elasticsearch.search.suggest.CustomSuggester; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.completion.CompletionSuggester; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index e2b91b59fe3..f5350544917 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -44,8 +44,8 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; -import org.elasticsearch.search.highlight.HighlightBuilder; -import org.elasticsearch.search.highlight.HighlightField; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java index ab42d076a23..72f961963ea 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java @@ -27,8 +27,8 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationInitializationException; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.HighlightBuilderTests; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index f63fe22c6be..49c24ba9952 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -60,6 +60,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -147,6 +148,10 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { for (int i = 0; i < hits.length; ++i) { SearchHit hit = hits[i]; assertThat(hit.explanation(), notNullValue()); + assertThat(hit.explanation().getDetails().length, equalTo(1)); + assertThat(hit.explanation().getDetails()[0].getDetails().length, equalTo(2)); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDescription(), + endsWith("idf(docFreq=100, docCount=100)")); assertThat("id[" + hit.id() + "] -> " + hit.explanation().toString(), hit.id(), equalTo(Integer.toString(100 - total - i - 1))); } total += hits.length; @@ -171,6 +176,10 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { for (int i = 0; i < hits.length; ++i) { SearchHit hit = hits[i]; assertThat(hit.explanation(), notNullValue()); + assertThat(hit.explanation().getDetails().length, equalTo(1)); + assertThat(hit.explanation().getDetails()[0].getDetails().length, equalTo(2)); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDescription(), + endsWith("idf(docFreq=100, docCount=100)")); assertThat("id[" + hit.id() + "]", hit.id(), equalTo(Integer.toString(total + i))); } total += hits.length; @@ -317,6 +326,10 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { SearchHit hit = searchResponse.getHits().hits()[i]; // System.out.println(hit.shard() + ": " + hit.explanation()); assertThat(hit.explanation(), notNullValue()); + assertThat(hit.explanation().getDetails().length, equalTo(1)); + assertThat(hit.explanation().getDetails()[0].getDetails().length, equalTo(2)); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDescription(), + endsWith("idf(docFreq=100, docCount=100)")); // assertThat("id[" + hit.id() + "]", hit.id(), equalTo(Integer.toString(100 - i - 1))); assertThat("make sure we don't have duplicates", expectedIds.remove(hit.id()), notNullValue()); } diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 833e14fe21c..4101190df83 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -60,8 +60,8 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorParsers; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.HighlightBuilderTests; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests; import org.elasticsearch.search.rescore.QueryRescoreBuilderTests; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder; diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 201ad62ed20..3aa98942833 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -47,8 +47,8 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.highlight.HighlightBuilder; -import org.elasticsearch.search.highlight.HighlightField; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java similarity index 90% rename from core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java rename to core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index 974d0ade282..1e43ffe532e 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -17,18 +17,17 @@ * under the License. */ -package org.elasticsearch.search.innerhits; +package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.ArrayUtil; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; @@ -36,8 +35,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -168,14 +166,16 @@ public class InnerHitsIT extends ESIntegTestCase { new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message")) .setExplain(true) .addDocValueField("comments.message") - .addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) + .addScriptField("script", + new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) .setSize(1) )).get(); assertNoFailures(response); innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); assertThat(innerHits.getTotalHits(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(), equalTo("fox eat quick")); + assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(), + equalTo("fox eat quick")); assertThat(innerHits.getAt(0).explanation().toString(), containsString("weight(comments.message:fox in")); assertThat(innerHits.getAt(0).getFields().get("comments.message").getValue().toString(), equalTo("eat")); assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5")); @@ -334,12 +334,14 @@ public class InnerHitsIT extends ESIntegTestCase { int numChildDocs = child1InnerObjects[parent] = scaledRandomIntBetween(1, numDocs); int limit = child1 + numChildDocs; for (; child1 < limit; child1++) { - requestBuilders.add(client().prepareIndex("idx", "child1", String.format(Locale.ENGLISH, "%04d", child1)).setParent(parentId).setSource("{}")); + requestBuilders.add(client().prepareIndex("idx", "child1", + String.format(Locale.ENGLISH, "%04d", child1)).setParent(parentId).setSource("{}")); } numChildDocs = child2InnerObjects[parent] = scaledRandomIntBetween(1, numDocs); limit = child2 + numChildDocs; for (; child2 < limit; child2++) { - requestBuilders.add(client().prepareIndex("idx", "child2", String.format(Locale.ENGLISH, "%04d", child2)).setParent(parentId).setSource("{}")); + requestBuilders.add(client().prepareIndex("idx", "child2", + String.format(Locale.ENGLISH, "%04d", child2)).setParent(parentId).setSource("{}")); } } indexRandom(true, requestBuilders); @@ -399,10 +401,14 @@ public class InnerHitsIT extends ESIntegTestCase { .addMapping("answer", "_parent", "type=question", "body", "type=text") ); List requests = new ArrayList<>(); - requests.add(client().prepareIndex("stack", "question", "1").setSource("body", "I'm using HTTPS + Basic authentication to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?")); - requests.add(client().prepareIndex("stack", "answer", "1").setParent("1").setSource("body", "install fail2ban and enable rules for apache")); - requests.add(client().prepareIndex("stack", "question", "2").setSource("body", "I have firewall rules set up and also denyhosts installed.\\ndo I also need to install fail2ban?")); - requests.add(client().prepareIndex("stack", "answer", "2").setParent("2").setSource("body", "Denyhosts protects only ssh; Fail2Ban protects all daemons.")); + requests.add(client().prepareIndex("stack", "question", "1").setSource("body", "I'm using HTTPS + Basic authentication " + + "to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?")); + requests.add(client().prepareIndex("stack", "answer", "1").setParent("1").setSource("body", + "install fail2ban and enable rules for apache")); + requests.add(client().prepareIndex("stack", "question", "2").setSource("body", + "I have firewall rules set up and also denyhosts installed.\\ndo I also need to install fail2ban?")); + requests.add(client().prepareIndex("stack", "answer", "2").setParent("2").setSource("body", + "Denyhosts protects only ssh; Fail2Ban protects all daemons.")); indexRandom(true, requests); SearchResponse response = client().prepareSearch("stack") @@ -491,24 +497,25 @@ public class InnerHitsIT extends ESIntegTestCase { } public void testNestedMultipleLayers() throws Exception { - assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject().startObject("article").startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message") - .field("type", "text") - .endObject() - .startObject("remarks") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "text").endObject() + assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject() + .startObject("article").startObject("properties") + .startObject("comments") + .field("type", "nested") + .startObject("properties") + .startObject("message") + .field("type", "text") + .endObject() + .startObject("remarks") + .field("type", "nested") + .startObject("properties") + .startObject("message").field("type", "text").endObject() + .endObject() .endObject() .endObject() .endObject() - .endObject() - .startObject("title") - .field("type", "text") - .endObject() + .startObject("title") + .field("type", "text") + .endObject() .endObject().endObject().endObject())); List requests = new ArrayList<>(); @@ -560,8 +567,8 @@ public class InnerHitsIT extends ESIntegTestCase { // Directly refer to the second level: response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg).innerHit(new InnerHitBuilder())) - .get(); + .setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg) + .innerHit(new InnerHitBuilder())).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("2")); @@ -621,7 +628,8 @@ public class InnerHitsIT extends ESIntegTestCase { assertThat(response.getHits().getAt(0).id(), equalTo("1")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), + equalTo("comments")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); } @@ -654,28 +662,32 @@ public class InnerHitsIT extends ESIntegTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder())) - .get(); + .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg) + .innerHit(new InnerHitBuilder())).get(); assertNoFailures(response); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getChild(), nullValue()); + SearchHit hit = response.getHits().getAt(0); + assertThat(hit.id(), equalTo("1")); + SearchHits messages = hit.getInnerHits().get("comments.messages"); + assertThat(messages.getTotalHits(), equalTo(1L)); + assertThat(messages.getAt(0).id(), equalTo("1")); + assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); + assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg).innerHit(new InnerHitBuilder())) - .get(); + .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg) + .innerHit(new InnerHitBuilder())).get(); assertNoFailures(response); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getOffset(), equalTo(1)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getChild(), nullValue()); + hit = response.getHits().getAt(0); + assertThat(hit.id(), equalTo("1")); + messages = hit.getInnerHits().get("comments.messages"); + assertThat(messages.getTotalHits(), equalTo(1L)); + assertThat(messages.getAt(0).id(), equalTo("1")); + assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); + assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); + assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); // index the message in an object form instead of an array requests = new ArrayList<>(); @@ -685,16 +697,18 @@ public class InnerHitsIT extends ESIntegTestCase { .endObject())); indexRandom(true, requests); response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder())) - .get(); + .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg) + .innerHit(new InnerHitBuilder())).get(); assertNoFailures(response); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).id(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getChild(), nullValue()); + hit = response.getHits().getAt(0);; + assertThat(hit.id(), equalTo("1")); + messages = hit.getInnerHits().get("comments.messages"); + assertThat(messages.getTotalHits(), equalTo(1L)); + assertThat(messages.getAt(0).id(), equalTo("1")); + assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); + assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); + assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); } public void testRoyals() throws Exception { @@ -841,12 +855,14 @@ public class InnerHitsIT extends ESIntegTestCase { indexRandom(true, requests); waitForRelocation(ClusterHealthStatus.GREEN); + QueryBuilder query = boolQuery() + .should(termQuery("nested1.n_field1", "n_value1_1").queryName("test1")) + .should(termQuery("nested1.n_field1", "n_value1_3").queryName("test2")) + .should(termQuery("nested1.n_field2", "n_value2_2").queryName("test3")); + query = nestedQuery("nested1", query, ScoreMode.Avg).innerHit( + new InnerHitBuilder().addSort(new FieldSortBuilder("nested1.n_field1").order(SortOrder.ASC))); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(nestedQuery("nested1", boolQuery() - .should(termQuery("nested1.n_field1", "n_value1_1").queryName("test1")) - .should(termQuery("nested1.n_field1", "n_value1_3").queryName("test2")) - .should(termQuery("nested1.n_field2", "n_value2_2").queryName("test3")), - ScoreMode.Avg).innerHit(new InnerHitBuilder().addSort(new FieldSortBuilder("nested1.n_field1").order(SortOrder.ASC)))) + .setQuery(query) .setSize(numDocs) .addSort("field1", SortOrder.ASC) .get(); @@ -885,7 +901,8 @@ public class InnerHitsIT extends ESIntegTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("index") - .setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None).innerHit(new InnerHitBuilder())) + .setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None) + .innerHit(new InnerHitBuilder())) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -899,8 +916,10 @@ public class InnerHitsIT extends ESIntegTestCase { assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); + QueryBuilder query = hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None) + .innerHit(new InnerHitBuilder()); response = client().prepareSearch("index") - .setQuery(hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None).innerHit(new InnerHitBuilder())) + .setQuery(query) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 1); @@ -917,8 +936,10 @@ public class InnerHitsIT extends ESIntegTestCase { requests.add(client().prepareIndex("index1", "child", "1").setParent("1").setSource("field", "value1")); indexRandom(true, requests); + QueryBuilder query = hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None) + .innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)); SearchResponse response = client().prepareSearch("index1") - .setQuery(hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None).innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1))) + .setQuery(query) .addSort("_uid", SortOrder.ASC) .get(); assertNoFailures(response); @@ -935,8 +956,10 @@ public class InnerHitsIT extends ESIntegTestCase { .setRefreshPolicy(IMMEDIATE) .get(); + query = nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg) + .innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)); response = client().prepareSearch("index2") - .setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1))) + .setQuery(query) .addSort("_uid", SortOrder.ASC) .get(); assertNoFailures(response); diff --git a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java similarity index 98% rename from core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java rename to core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java index 300c4f141b0..170638b295f 100644 --- a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.matchedqueries; +package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.QueryBuilder; @@ -69,8 +69,10 @@ public class MatchedQueriesIT extends ESIntegTestCase { } } - searchResponse = client().prepareSearch() - .setQuery(boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2"))).get(); + searchResponse = client().prepareSearch().setQuery( + boolQuery() + .should(rangeQuery("number").lte(2).queryName("test1")) + .should(rangeQuery("number").gt(2).queryName("test2"))).get(); assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.id().equals("1") || hit.id().equals("2")) { diff --git a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/NestedChildrenFilterTests.java similarity index 96% rename from core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java rename to core/src/test/java/org/elasticsearch/search/fetch/subphase/NestedChildrenFilterTests.java index 7587866b144..440d90bdba4 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/NestedChildrenFilterTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.fetch.innerhits; +package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -38,7 +38,7 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.QueryBitSetProducer; import org.apache.lucene.store.Directory; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext.NestedInnerHits.NestedChildrenQuery; +import org.elasticsearch.search.fetch.subphase.InnerHitsContext.NestedInnerHits.NestedChildrenQuery; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighter.java similarity index 88% rename from core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java rename to core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighter.java index 05b999a9196..0ec6d7cd83a 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighter.java @@ -16,10 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; +import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; +import org.elasticsearch.search.fetch.subphase.highlight.HighlighterContext; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import java.util.ArrayList; import java.util.List; diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterPlugin.java similarity index 90% rename from core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java rename to core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterPlugin.java index 0f42fd43f00..b234d7236e1 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterPlugin.java @@ -17,10 +17,11 @@ * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import java.util.Map; diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java similarity index 97% rename from core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterSearchIT.java rename to core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java index a5810c115b2..8e354a89774 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java @@ -16,11 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java similarity index 98% rename from core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java rename to core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 54e33aad581..ed272040f51 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.Query; import org.elasticsearch.Version; @@ -49,9 +49,12 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.search.highlight.HighlightBuilder.Field; -import org.elasticsearch.search.highlight.HighlightBuilder.Order; -import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions; +import org.elasticsearch.search.fetch.subphase.highlight.AbstractHighlighterBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Field; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Order; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.junit.AfterClass; diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java similarity index 85% rename from core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java rename to core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index ff0fcccd105..c7a5fc241ab 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -16,9 +16,10 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -38,12 +39,11 @@ import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.search.MatchQuery; -import org.elasticsearch.index.search.MatchQuery.Type; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.highlight.HighlightBuilder.Field; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Field; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.Matcher; @@ -242,7 +242,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { * query. We cut off and extract terms if there are more than 16 terms in the query */ assertAcked(prepareCreate("test") - .addMapping("test", "body", "type=text,analyzer=custom_analyzer,search_analyzer=custom_analyzer,term_vector=with_positions_offsets") + .addMapping("test", + "body", "type=text,analyzer=custom_analyzer,search_analyzer=custom_analyzer,term_vector=with_positions_offsets") .setSettings( Settings.builder().put(indexSettings()) .put("analysis.filter.wordDelimiter.type", "word_delimiter") @@ -258,32 +259,33 @@ public class HighlighterSearchIT extends ESIntegTestCase { ensureGreen(); client().prepareIndex("test", "test", "1") - .setSource("body", "Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature") + .setSource("body", "Test: http://www.facebook.com http://elasticsearch.org http://xing.com " + + "http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature Test: " + + "http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com " + + "http://twitter.com this is a test for highlighting feature") .get(); refresh(); - SearchResponse search = client().prepareSearch().setQuery(matchQuery("body", "Test: http://www.facebook.com ").type(Type.PHRASE)) + SearchResponse search = client().prepareSearch().setQuery(matchPhraseQuery("body", "Test: http://www.facebook.com ")) .highlighter(new HighlightBuilder().field("body")).execute().actionGet(); assertHighlight(search, 0, "body", 0, startsWith("Test: http://www.facebook.com")); search = client() .prepareSearch() - .setQuery( - matchQuery( - "body", - "Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature") - .type(Type.PHRASE)).highlighter(new HighlightBuilder().field("body")).execute().actionGet(); - assertHighlight( - search, - 0, - "body", - 0, - equalTo("Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com")); + .setQuery(matchPhraseQuery("body", "Test: http://www.facebook.com http://elasticsearch.org http://xing.com " + + "http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature Test: " + + "http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com " + + "http://twitter.com this is a test for highlighting feature")) + .highlighter(new HighlightBuilder().field("body")).execute().actionGet(); + assertHighlight(search, 0, "body", 0, equalTo("Test: http://www.facebook.com " + + "http://elasticsearch.org http://xing.com http://cnn.com http://quora.com")); } public void testNgramHighlighting() throws IOException { assertAcked(prepareCreate("test") .addMapping("test", - "name", "type=text,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer,term_vector=with_positions_offsets", - "name2", "type=text,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer,term_vector=with_positions_offsets") + "name", "type=text,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer," + + "term_vector=with_positions_offsets", + "name2", "type=text,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer," + + "term_vector=with_positions_offsets") .setSettings(Settings.builder() .put(indexSettings()) .put("analysis.filter.my_ngram.max_gram", 20) @@ -304,7 +306,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { ensureGreen(); SearchResponse search = client().prepareSearch().setQuery(matchQuery("name", "logica m")) .highlighter(new HighlightBuilder().field("name")).get(); - assertHighlight(search, 0, "name", 0, equalTo("logicacmg ehemals avinci - the know how company")); + assertHighlight(search, 0, "name", 0, + equalTo("logicacmg ehemals avinci - the know how company")); search = client().prepareSearch().setQuery(matchQuery("name", "logica ma")).highlighter(new HighlightBuilder().field("name")).get(); assertHighlight(search, 0, "name", 0, equalTo("logicacmg ehemals avinci - the know how company")); @@ -332,7 +335,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1") .setSource("no_long_term", "This is a test where foo is highlighed and should be highlighted", - "long_term", "This is a test thisisaverylongwordandmakessurethisfails where foo is highlighed and should be highlighted") + "long_term", "This is a test thisisaverylongwordandmakessurethisfails where foo is highlighed " + + "and should be highlighted") .get(); refresh(); @@ -343,13 +347,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(search, 0, "long_term", 0, 1, equalTo("thisisaverylongwordandmakessurethisfails")); search = client().prepareSearch() - .setQuery(matchQuery("no_long_term", "test foo highlighed").type(Type.PHRASE).slop(3)) + .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).postTags("").preTags("")) .get(); assertNotHighlighted(search, 0, "no_long_term"); search = client().prepareSearch() - .setQuery(matchQuery("no_long_term", "test foo highlighed").type(Type.PHRASE).slop(3)) + .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) .highlighter(new HighlightBuilder().field("no_long_term", 30, 1).postTags("").preTags("")) .get(); @@ -360,8 +364,17 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") // we don't store title and don't use term vector, now lets see if it works... - .startObject("title").field("type", "text").field("store", false).field("term_vector", "no").endObject() - .startObject("attachments").startObject("properties").startObject("body").field("type", "text").field("store", false).field("term_vector", "no").endObject().endObject().endObject() + .startObject("title") + .field("type", "text") + .field("store", false) + .field("term_vector", "no") + .endObject() + .startObject("attachments").startObject("properties") + .startObject("body") + .field("type", "text") + .field("store", false) + .field("term_vector", "no") + .endObject().endObject().endObject() .endObject().endObject().endObject())); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -369,8 +382,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(XContentFactory.jsonBuilder().startObject() .field("title", "This is a test on the highlighting bug present in elasticsearch") - .startArray("attachments").startObject().field("body", "attachment 1").endObject().startObject().field("body", "attachment 2").endObject().endArray() - .endObject()); + .startArray("attachments") + .startObject().field("body", "attachment 1").endObject() + .startObject().field("body", "attachment 2").endObject() + .endArray().endObject()); } indexRandom(true, indexRequestBuilders); @@ -398,8 +413,20 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") // we don't store title, now lets see if it works... - .startObject("title").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").endObject() - .startObject("attachments").startObject("properties").startObject("body").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").endObject().endObject().endObject() + .startObject("title") + .field("type", "text") + .field("store", false) + .field("term_vector", "with_positions_offsets") + .endObject() + .startObject("attachments") + .startObject("properties") + .startObject("body") + .field("type", "text") + .field("store", false) + .field("term_vector", "with_positions_offsets") + .endObject() + .endObject() + .endObject() .endObject().endObject().endObject())); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -407,8 +434,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(XContentFactory.jsonBuilder().startObject() .field("title", "This is a test on the highlighting bug present in elasticsearch") - .startArray("attachments").startObject().field("body", "attachment 1").endObject().startObject().field("body", "attachment 2").endObject().endArray() - .endObject()); + .startArray("attachments") + .startObject().field("body", "attachment 1").endObject() + .startObject().field("body", "attachment 2").endObject() + .endArray().endObject()); } indexRandom(true, indexRequestBuilders); @@ -436,8 +465,20 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") // we don't store title, now lets see if it works... - .startObject("title").field("type", "text").field("store", false).field("index_options", "offsets").endObject() - .startObject("attachments").startObject("properties").startObject("body").field("type", "text").field("store", false).field("index_options", "offsets").endObject().endObject().endObject() + .startObject("title") + .field("type", "text") + .field("store", false) + .field("index_options", "offsets") + .endObject() + .startObject("attachments") + .startObject("properties") + .startObject("body") + .field("type", "text") + .field("store", false) + .field("index_options", "offsets") + .endObject() + .endObject() + .endObject() .endObject().endObject().endObject())); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -446,8 +487,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource(XContentFactory.jsonBuilder().startObject() .array("title", "This is a test on the highlighting bug present in elasticsearch. Hopefully it works.", "This is the second bug to perform highlighting on.") - .startArray("attachments").startObject().field("body", "attachment for this test").endObject().startObject().field("body", "attachment 2").endObject().endArray() - .endObject()); + .startArray("attachments") + .startObject().field("body", "attachment for this test").endObject() + .startObject().field("body", "attachment 2").endObject() + .endArray().endObject()); } indexRandom(true, indexRequestBuilders); @@ -457,7 +500,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(new HighlightBuilder().field("title", -1, 0)).get(); for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch. Hopefully it works.")); + assertHighlight(search, i, "title", 0, + equalTo("This is a test on the highlighting bug present in elasticsearch. Hopefully it works.")); assertHighlight(search, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); } @@ -484,11 +528,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testHighlightIssue1994() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", "title", "type=text,store=false", "titleTV", "type=text,store=false,term_vector=with_positions_offsets")); + .addMapping("type1", + "title", "type=text,store=false", + "titleTV", "type=text,store=false,term_vector=with_positions_offsets")); - indexRandom(false, client().prepareIndex("test", "type1", "1") - .setSource("title", new String[]{"This is a test on the highlighting bug present in elasticsearch", "The bug is bugging us"}, - "titleTV", new String[]{"This is a test on the highlighting bug present in elasticsearch", "The bug is bugging us"})); + String[] titles = new String[] {"This is a test on the highlighting bug present in elasticsearch", "The bug is bugging us"}; + indexRandom(false, client().prepareIndex("test", "type1", "1").setSource("title", titles, "titleTV", titles)); indexRandom(true, client().prepareIndex("test", "type1", "2") .setSource("titleTV", new String[]{"some text to highlight", "highlight other text"})); @@ -526,7 +571,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { .query(termQuery("field1", "test")) .highlighter(highlight().order("score").preTags("").postTags("").fragmentSize(1).numOfFragments(1) .field(new HighlightBuilder.Field("field1").numOfFragments(2)) - .field(new HighlightBuilder.Field("field2").preTags("").postTags("").fragmentSize(50).requireFieldMatch(false))); + .field(new HighlightBuilder.Field("field2").preTags("").postTags("") + .fragmentSize(50).requireFieldMatch(false))); SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); @@ -704,7 +750,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { logger.info("--> searching on _all, highlighting on field1"); source = searchSource() .query(termQuery("_all", "test")) - .highlighter(highlight().field("field1", 100, 0).order("score").preTags("").postTags("").requireFieldMatch(false)); + .highlighter(highlight().field("field1", 100, 0).order("score").preTags("").postTags("") + .requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source).get(); @@ -713,7 +760,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { logger.info("--> searching on _all, highlighting on field2"); source = searchSource() .query(termQuery("_all", "quick")) - .highlighter(highlight().field("field2", 100, 0).order("score").preTags("").postTags("").requireFieldMatch(false)); + .highlighter(highlight().field("field2", 100, 0).order("score").preTags("").postTags("") + .requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source).get(); @@ -722,7 +770,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { logger.info("--> searching on _all, highlighting on field2"); source = searchSource() .query(prefixQuery("_all", "qui")) - .highlighter(highlight().field("field2", 100, 0).order("score").preTags("").postTags("").requireFieldMatch(false)); + .highlighter(highlight().field("field2", 100, 0).order("score").preTags("").postTags("") + .requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source).get(); @@ -773,7 +822,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { logger.info("--> highlighting and searching on field1 with large phrase limit"); source = searchSource() .query(termQuery("field1", "t")) - .highlighter(highlight().highlighterType("fvh").field("field1", 20, 1).order("score").preTags("").postTags("").phraseLimit(30000)); + .highlighter(highlight().highlighterType("fvh").field("field1", 20, 1).order("score").preTags("").postTags("") + .phraseLimit(30000)); SearchResponse largePhraseLimit = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(largePhraseLimit, 0, "field1", 0, 1, containsString("t")); @@ -1055,7 +1105,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, 1, equalTo("This is a html escaping highlighting test for *&? elasticsearch")); + assertHighlight(search, i, "title", 0, 1, + equalTo("This is a html escaping highlighting test for *&? elasticsearch")); } } @@ -1083,11 +1134,20 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "text").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "classic") - .startObject("fields") - .startObject("key").field("type", "text").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() - .endObject().endObject() - .endObject().endObject().endObject())); + .startObject("title") + .field("type", "text") + .field("store", true) + .field("term_vector", "with_positions_offsets") + .field("analyzer", "classic") + .startObject("fields") + .startObject("key") + .field("type", "text") + .field("store", true) + .field("term_vector", "with_positions_offsets") + .field("analyzer", "whitespace") + .endObject() + .endObject() + .endObject().endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); @@ -1111,11 +1171,20 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "classic") - .startObject("fields") - .startObject("key").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() - .endObject().endObject() - .endObject().endObject().endObject())); + .startObject("title") + .field("type", "text") + .field("store", false) + .field("term_vector", "with_positions_offsets") + .field("analyzer", "classic") + .startObject("fields") + .startObject("key") + .field("type", "text") + .field("store", false) + .field("term_vector", "with_positions_offsets") + .field("analyzer", "whitespace") + .endObject() + .endObject() + .endObject().endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); @@ -1141,10 +1210,20 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperNoVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "text").field("store", true).field("term_vector", "no").field("analyzer", "classic") - .startObject("fields") - .startObject("key").field("type", "text").field("store", true).field("term_vector", "no").field("analyzer", "whitespace").endObject() - .endObject().endObject() + .startObject("title") + .field("type", "text") + .field("store", true) + .field("term_vector", "no") + .field("analyzer", "classic") + .startObject("fields") + .startObject("key") + .field("type", "text") + .field("store", true) + .field("term_vector", "no") + .field("analyzer", "whitespace") + .endObject() + .endObject() + .endObject() .endObject().endObject().endObject())); ensureGreen(); @@ -1171,10 +1250,20 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testMultiMapperNoVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "text").field("store", false).field("term_vector", "no").field("analyzer", "classic") - .startObject("fields") - .startObject("key").field("type", "text").field("store", false).field("term_vector", "no").field("analyzer", "whitespace").endObject() - .endObject().endObject() + .startObject("title") + .field("type", "text") + .field("store", false) + .field("term_vector", "no") + .field("analyzer", "classic") + .startObject("fields") + .startObject("key") + .field("type", "text") + .field("store", false) + .field("term_vector", "no") + .field("analyzer", "whitespace") + .endObject() + .endObject() + .endObject() .endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); @@ -1219,7 +1308,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setQuery(matchPhraseQuery("title", "this is a test")) .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("fvh")), RestStatus.BAD_REQUEST, - containsString("the field [title] should be indexed with term vector with position offsets to be used with fast vector highlighter")); + containsString("the field [title] should be indexed with term vector with position offsets to be " + + "used with fast vector highlighter")); //should not fail if there is a wildcard assertNoFailures(client().prepareSearch() @@ -1256,7 +1346,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, 1, equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724")); + assertHighlight(search, i, "title", 0, 1, + equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724")); } // Using plain highlighter instead of FVH on the field level @@ -1268,7 +1359,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, 1, equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724")); + assertHighlight(search, i, "title", 0, 1, + equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724")); } } @@ -1277,7 +1369,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .addMapping("type1", "tags", "type=text,term_vector=with_positions_offsets")); ensureGreen(); client().prepareIndex("test", "type1", "1") - .setSource("tags", new String[]{ + .setSource("tags", new String[] { "this is a really long tag i would like to highlight", "here is another one that is very long and has the tag token near the end"}).get(); refresh(); @@ -1287,7 +1379,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(new HighlightBuilder().field("tags", -1, 0)).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long and has the tag token near the end")); + assertHighlight(response, 0, "tags", 1, 2, + equalTo("here is another one that is very long and has the tag token near the end")); } public void testBoostingQuery() { @@ -1311,8 +1404,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testBoostingQueryTermVector() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") - .get(); + client().prepareIndex("test", "type1").setSource( + "field1", "this is a test", + "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -1347,7 +1441,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); + client().prepareIndex("test", "type1").setSource( + "field1", "this is a test", + "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) @@ -1372,10 +1468,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { "field3", "type=text,analyzer=synonym")); ensureGreen(); - client().prepareIndex("test", "type1", "0") - .setSource("field0", "The quick brown fox jumps over the lazy dog", "field1", "The quick brown fox jumps over the lazy dog").get(); - client().prepareIndex("test", "type1", "1") - .setSource("field1", "The quick browse button is a fancy thing, right bro?").get(); + client().prepareIndex("test", "type1", "0").setSource( + "field0", "The quick brown fox jumps over the lazy dog", + "field1", "The quick brown fox jumps over the lazy dog").get(); + client().prepareIndex("test", "type1", "1").setSource("field1", "The quick browse button is a fancy thing, right bro?").get(); refresh(); logger.info("--> highlighting and searching on field0"); SearchSourceBuilder source = searchSource() @@ -1393,16 +1489,21 @@ public class HighlighterSearchIT extends ESIntegTestCase { searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf(equalTo("The quick browse button is a fancy thing, right bro?"), equalTo("The quick brown fox jumps over the lazy dog"))); - assertHighlight(searchResponse, 1, "field1", 0, 1, anyOf(equalTo("The quick browse button is a fancy thing, right bro?"), equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight(searchResponse, 1, "field1", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); // with synonyms - client().prepareIndex("test", "type2", "0") - .setSource("field4", "The quick brown fox jumps over the lazy dog", "field3", "The quick brown fox jumps over the lazy dog").get(); - client().prepareIndex("test", "type2", "1") - .setSource("field4", "The quick browse button is a fancy thing, right bro?").get(); - client().prepareIndex("test", "type2", "2") - .setSource("field4", "a quick fast blue car").get(); + client().prepareIndex("test", "type2", "0").setSource( + "field4", "The quick brown fox jumps over the lazy dog", + "field3", "The quick brown fox jumps over the lazy dog").get(); + client().prepareIndex("test", "type2", "1").setSource( + "field4", "The quick browse button is a fancy thing, right bro?").get(); + client().prepareIndex("test", "type2", "2").setSource( + "field4", "a quick fast blue car").get(); refresh(); source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field3", "fast bro")) @@ -1417,8 +1518,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(highlight().field("field4").order("score").preTags("").postTags("")); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field4", 0, 1, anyOf(equalTo("The quick browse button is a fancy thing, right bro?"), equalTo("The quick brown fox jumps over the lazy dog"))); - assertHighlight(searchResponse, 1, "field4", 0, 1, anyOf(equalTo("The quick browse button is a fancy thing, right bro?"), equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight(searchResponse, 0, "field4", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight(searchResponse, 1, "field4", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); logger.info("--> highlighting and searching on field4"); source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field4", "a fast quick blue ca")) @@ -1445,7 +1550,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { .fragmenter("simple"))).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); + assertHighlight(response, 0, "tags", 1, 2, + equalTo("here is another one that is very long tag and has the tag token near the end")); response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) @@ -1453,8 +1559,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { new HighlightBuilder().field(new HighlightBuilder.Field("tags").fragmentSize(-1).numOfFragments(2) .fragmenter("span"))).get(); - assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); + assertHighlight(response, 0, "tags", 0, + equalTo("this is a really long tag i would like to highlight")); + assertHighlight(response, 0, "tags", 1, 2, + equalTo("here is another one that is very long tag and has the tag token near the end")); assertFailures(client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) @@ -1484,8 +1592,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { } public void testFastVectorHighlighterMultipleFields() { - assertAcked(prepareCreate("test") - .addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets", "field2", "type=text,term_vector=with_positions_offsets")); + assertAcked(prepareCreate("test").addMapping("type1", + "field1", "type=text,term_vector=with_positions_offsets", + "field2", "type=text,term_vector=with_positions_offsets")); ensureGreen(); index("test", "type1", "1", "field1", "The quick brown fox", "field2", "The slow brown fox"); @@ -1524,9 +1633,14 @@ public class HighlighterSearchIT extends ESIntegTestCase { // Issue #3211 public void testNumericHighlighting() throws Exception { assertAcked(prepareCreate("test") - .addMapping("test", "text", "type=text", - "byte", "type=byte", "short", "type=short", "int", "type=integer", "long", "type=long", - "float", "type=float", "double", "type=double")); + .addMapping("test", + "text", "type=text", + "byte", "type=byte", + "short", "type=short", + "int", "type=integer", + "long", "type=long", + "float", "type=float", + "double", "type=double")); ensureGreen(); client().prepareIndex("test", "test", "1").setSource("text", "elasticsearch test", @@ -1566,8 +1680,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { } public void testHighlightUsesHighlightQuery() throws IOException { - assertAcked(prepareCreate("test") - .addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); + assertAcked(prepareCreate("test").addMapping("type1", "text", + "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); index("test", "type1", "1", "text", "Testing the highlight query feature"); @@ -1632,8 +1746,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { } public void testHighlightNoMatchSize() throws IOException { - assertAcked(prepareCreate("test") - .addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); + assertAcked(prepareCreate("test").addMapping("type1", "text", + "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); String text = "I am pretty long so some of me should get cut off. Second sentence"; @@ -1740,8 +1854,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { } public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { - assertAcked(prepareCreate("test") - .addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); + assertAcked(prepareCreate("test").addMapping("type1", + "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); String text1 = "I am pretty long so some of me should get cut off. We'll see how that goes."; @@ -1775,19 +1889,19 @@ public class HighlighterSearchIT extends ESIntegTestCase { field.highlighterType("plain"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) -.highlighter(new HighlightBuilder().field(field)).get(); + .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) -.highlighter(new HighlightBuilder().field(field)).get(); + .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("postings"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) -.highlighter(new HighlightBuilder().field(field)).get(); + .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); // But if the field was actually empty then you should get no highlighting field @@ -1797,19 +1911,19 @@ public class HighlighterSearchIT extends ESIntegTestCase { field.highlighterType("plain"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) -.highlighter(new HighlightBuilder().field(field)).get(); + .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) -.highlighter(new HighlightBuilder().field(field)).get(); + .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("postings"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) -.highlighter(new HighlightBuilder().field(field)).get(); + .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); // Same for if the field doesn't even exist on the document @@ -1820,19 +1934,19 @@ public class HighlighterSearchIT extends ESIntegTestCase { field.highlighterType("plain"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) -.highlighter(new HighlightBuilder().field(field)).get(); + .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) -.highlighter(new HighlightBuilder().field(field)).get(); + .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) -.highlighter(new HighlightBuilder().field(field)).get(); + .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "postings"); // Again same if the field isn't mapped @@ -1852,8 +1966,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { } public void testHighlightNoMatchSizeNumberOfFragments() throws IOException { - assertAcked(prepareCreate("test") - .addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); + assertAcked(prepareCreate("test").addMapping("type1", + "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); String text1 = "This is the first sentence. This is the second sentence." + HighlightUtils.PARAGRAPH_SEPARATOR; @@ -1930,7 +2044,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy quick dog")); + assertHighlight(searchResponse, 0, "field2", 0, 1, + equalTo("The quick brown fox jumps over the lazy quick dog")); logger.info("--> searching on field2, highlighting on field2"); source = searchSource() @@ -1940,24 +2055,29 @@ public class HighlighterSearchIT extends ESIntegTestCase { searchResponse = client().search(searchRequest("test").source(source)).actionGet(); //phrase query results in highlighting all different terms regardless of their positions - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy quick dog")); + assertHighlight(searchResponse, 0, "field2", 0, 1, + equalTo("The quick brown fox jumps over the lazy quick dog")); //lets fall back to the standard highlighter then, what people would do to highlight query matches logger.info("--> searching on field2, highlighting on field2, falling back to the plain highlighter"); source = searchSource() .query(matchPhraseQuery("_all", "quick brown")) - .highlighter(highlight().field("field2").preTags("").postTags("").highlighterType("plain").requireFieldMatch(false)); + .highlighter(highlight() + .field("field2").preTags("").postTags("").highlighterType("plain").requireFieldMatch(false)); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy quick dog")); + assertHighlight(searchResponse, 0, "field2", 0, 1, + equalTo("The quick brown fox jumps over the lazy quick dog")); } public void testPostingsHighlighterMultipleFields() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping()).get()); ensureGreen(); - index("test", "type1", "1", "field1", "The quick brown fox. Second sentence.", "field2", "The slow brown fox. Second sentence."); + index("test", "type1", "1", + "field1", "The quick brown fox. Second sentence.", + "field2", "The slow brown fox. Second sentence."); refresh(); SearchResponse response = client().prepareSearch("test") @@ -1973,9 +2093,11 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1", "1") - .setSource("field1", "The quick brown fox jumps over the lazy dog. The lazy red fox jumps over the quick dog. The quick brown dog jumps over the lazy fox.", - "field2", "The quick brown fox jumps over the lazy dog. The lazy red fox jumps over the quick dog. The quick brown dog jumps over the lazy fox.").get(); + client().prepareIndex("test", "type1", "1").setSource( + "field1", "The quick brown fox jumps over the lazy dog. The lazy red fox jumps over the quick dog. " + + "The quick brown dog jumps over the lazy fox.", + "field2", "The quick brown fox jumps over the lazy dog. The lazy red fox jumps over the quick dog. " + + "The quick brown dog jumps over the lazy fox.").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -1991,7 +2113,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(searchResponse, 0, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy fox.")); client().prepareIndex("test", "type1", "2") - .setSource("field1", new String[]{"The quick brown fox jumps over the lazy dog. Second sentence not finished", "The lazy red fox jumps over the quick dog.", "The quick brown dog jumps over the lazy fox."}).get(); + .setSource("field1", new String[] { + "The quick brown fox jumps over the lazy dog. Second sentence not finished", + "The lazy red fox jumps over the quick dog.", + "The quick brown dog jumps over the lazy fox."}).get(); refresh(); source = searchSource() @@ -2004,9 +2129,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { for (SearchHit searchHit : searchResponse.getHits()) { if ("1".equals(searchHit.id())) { - assertHighlight(searchHit, "field1", 0, 1, equalTo("The quick brown fox jumps over the lazy dog. The lazy red fox jumps over the quick dog. The quick brown dog jumps over the lazy fox.")); + assertHighlight(searchHit, "field1", 0, 1, equalTo("The quick brown fox jumps over the lazy dog. " + + "The lazy red fox jumps over the quick dog. " + + "The quick brown dog jumps over the lazy fox.")); } else if ("2".equals(searchHit.id())) { - assertHighlight(searchHit, "field1", 0, 3, equalTo("The quick brown fox jumps over the lazy dog. Second sentence not finished")); + assertHighlight(searchHit, "field1", 0, 3, + equalTo("The quick brown fox jumps over the lazy dog. Second sentence not finished")); assertHighlight(searchHit, "field1", 1, 3, equalTo("The lazy red fox jumps over the quick dog.")); assertHighlight(searchHit, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy fox.")); } else { @@ -2020,8 +2148,16 @@ public class HighlighterSearchIT extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("_all").field("store", true).field("index_options", "offsets").endObject() .startObject("properties") - .startObject("field1").field("type", "text").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject() - .startObject("field2").field("type", "text").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject() + .startObject("field1") + .field("type", "text") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .endObject() + .startObject("field2") + .field("type", "text") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping)); @@ -2035,17 +2171,25 @@ public class HighlighterSearchIT extends ESIntegTestCase { String highlighterType = rarely() ? null : RandomPicks.randomFrom(random(), highlighterTypes); MultiMatchQueryBuilder.Type[] supportedQueryTypes; if ("postings".equals(highlighterType)) { - //phrase_prefix is not supported by postings highlighter, as it rewrites against an empty reader, the prefix will never match any term - supportedQueryTypes = new MultiMatchQueryBuilder.Type[]{MultiMatchQueryBuilder.Type.BEST_FIELDS, MultiMatchQueryBuilder.Type.CROSS_FIELDS, MultiMatchQueryBuilder.Type.MOST_FIELDS, MultiMatchQueryBuilder.Type.PHRASE}; + /* + * phrase_prefix is not supported by postings highlighter, as it rewrites against an empty reader, the prefix will never + * match any term + */ + supportedQueryTypes = new MultiMatchQueryBuilder.Type[]{ + MultiMatchQueryBuilder.Type.BEST_FIELDS, + MultiMatchQueryBuilder.Type.CROSS_FIELDS, + MultiMatchQueryBuilder.Type.MOST_FIELDS, + MultiMatchQueryBuilder.Type.PHRASE}; } else { supportedQueryTypes = MultiMatchQueryBuilder.Type.values(); } MultiMatchQueryBuilder.Type matchQueryType = RandomPicks.randomFrom(random(), supportedQueryTypes); - final MultiMatchQueryBuilder multiMatchQueryBuilder = multiMatchQuery("the quick brown fox", "field1", "field2").type(matchQueryType); + MultiMatchQueryBuilder multiMatchQueryBuilder = multiMatchQuery("the quick brown fox", "field1", "field2").type(matchQueryType); SearchSourceBuilder source = searchSource() .query(multiMatchQueryBuilder) - .highlighter(highlight().highlightQuery(randomBoolean() ? multiMatchQueryBuilder : null).highlighterType(highlighterType) + .highlighter(highlight().highlightQuery(randomBoolean() ? multiMatchQueryBuilder : null) + .highlighterType(highlighterType) .field(new Field("field1").requireFieldMatch(true).preTags("").postTags(""))); logger.info("Running multi-match type: [{}] highlight with type: [{}]", matchQueryType, highlighterType); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); @@ -2060,9 +2204,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { ensureGreen(); client().prepareIndex("test", "type1") - .setSource("field1", new String[]{"This sentence contains one match, not that short. This sentence contains two sentence matches. This one contains no matches.", - "This is the second value's first sentence. This one contains no matches. This sentence contains three sentence occurrences (sentence).", - "One sentence match here and scored lower since the text is quite long, not that appealing. This one contains no matches."}).get(); + .setSource("field1", new String[]{ + "This sentence contains one match, not that short. This sentence contains two sentence matches. " + + "This one contains no matches.", + "This is the second value's first sentence. This one contains no matches. " + + "This sentence contains three sentence occurrences (sentence).", + "One sentence match here and scored lower since the text is quite long, not that appealing. " + + "This one contains no matches."}).get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -2076,11 +2224,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertThat(highlightFieldMap.size(), equalTo(1)); HighlightField field1 = highlightFieldMap.get("field1"); assertThat(field1.fragments().length, equalTo(5)); - assertThat(field1.fragments()[0].string(), equalTo("This sentence contains three sentence occurrences (sentence).")); + assertThat(field1.fragments()[0].string(), + equalTo("This sentence contains three sentence occurrences (sentence).")); assertThat(field1.fragments()[1].string(), equalTo("This sentence contains two sentence matches.")); assertThat(field1.fragments()[2].string(), equalTo("This is the second value's first sentence.")); assertThat(field1.fragments()[3].string(), equalTo("This sentence contains one match, not that short.")); - assertThat(field1.fragments()[4].string(), equalTo("One sentence match here and scored lower since the text is quite long, not that appealing.")); + assertThat(field1.fragments()[4].string(), + equalTo("One sentence match here and scored lower since the text is quite long, not that appealing.")); } public void testPostingsHighlighterEscapeHtml() throws Exception { @@ -2105,13 +2255,21 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testPostingsHighlighterMultiMapperWithStore() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("properties") - .startObject("title").field("type", "text").field("store", true).field("index_options", "offsets").field("analyzer", "classic") - .startObject("fields") - .startObject("key").field("type", "text").field("store", true).field("index_options", "offsets").field("analyzer", "whitespace").endObject() - .endObject().endObject() - .endObject().endObject().endObject())); + .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") + .startObject("title") + .field("type", "text") + .field("store", true) + .field("index_options", "offsets") + .field("analyzer", "classic") + .startObject("fields") + .startObject("key") + .field("type", "text") + .field("store", true) + .field("index_options", "offsets") + .field("analyzer", "whitespace") + .endObject() + .endObject() + .endObject().endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test . Second sentence.").get(); refresh(); @@ -2139,11 +2297,20 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testPostingsHighlighterMultiMapperFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("title").field("type", "text").field("store", false).field("index_options", "offsets").field("analyzer", "classic") - .startObject("fields") - .startObject("key").field("type", "text").field("store", false).field("index_options", "offsets").field("analyzer", "whitespace").endObject() - .endObject().endObject() - .endObject().endObject().endObject())); + .startObject("title") + .field("type", "text") + .field("store", false) + .field("index_options", "offsets") + .field("analyzer", "classic") + .startObject("fields") + .startObject("key") + .field("type", "text") + .field("store", false) + .field("index_options", "offsets") + .field("analyzer", "whitespace") + .endObject() + .endObject() + .endObject().endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); @@ -2189,14 +2356,16 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setQuery(matchQuery("title", "this is a test")) .highlighter(new HighlightBuilder().field("title").highlighterType("postings")), RestStatus.BAD_REQUEST, - containsString("the field [title] should be indexed with positions and offsets in the postings list to be used with postings highlighter")); + containsString("the field [title] should be indexed with positions and offsets in the " + + "postings list to be used with postings highlighter")); assertFailures(client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .highlighter(new HighlightBuilder().field("title").highlighterType("postings")), RestStatus.BAD_REQUEST, - containsString("the field [title] should be indexed with positions and offsets in the postings list to be used with postings highlighter")); + containsString("the field [title] should be indexed with positions and offsets in the " + + "postings list to be used with postings highlighter")); //should not fail if there is a wildcard assertNoFailures(client().prepareSearch() @@ -2207,8 +2376,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testPostingsHighlighterBoostingQuery() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") - .get(); + client().prepareIndex("test", "type1") + .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field1"); @@ -2224,7 +2393,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); + client().prepareIndex("test", "type1") + .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) @@ -2248,7 +2418,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); + client().prepareIndex("test", "type1") + .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); @@ -2263,7 +2434,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); + client().prepareIndex("test", "type1") + .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(fuzzyQuery("field2", "quck")) @@ -2277,7 +2449,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); + client().prepareIndex("test", "type1") + .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(regexpQuery("field2", "qu[a-l]+k")) @@ -2291,7 +2464,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); + client().prepareIndex("test", "type1") + .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(wildcardQuery("field2", "qui*")) @@ -2326,7 +2500,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); - client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); + client().prepareIndex("test", "type1") + .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(queryStringQuery("qui*").defaultField("field2")) @@ -2374,7 +2549,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource().query(boolQuery().must(prefixQuery("field1", "photo")).should(matchQuery("field1", "test").minimumShouldMatch("0"))) + SearchSourceBuilder source = searchSource() + .query(boolQuery().must(prefixQuery("field1", "photo")).should(matchQuery("field1", "test").minimumShouldMatch("0"))) .highlighter(highlight().field("field1")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); @@ -2601,12 +2777,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource(jsonBuilder().startObject().field("keyword_field", "some text").endObject()) .get(); refresh(); - SearchResponse search = client().prepareSearch().setSource( - new SearchSourceBuilder().query(QueryBuilders.matchQuery("keyword_field", "some text")).highlighter(new HighlightBuilder().field("*"))) - .get(); + SearchResponse search = client().prepareSearch().setSource(new SearchSourceBuilder() + .query(QueryBuilders.matchQuery("keyword_field", "some text")) + .highlighter(new HighlightBuilder().field("*"))).get(); assertNoFailures(search); assertThat(search.getHits().totalHits(), equalTo(1L)); - assertThat(search.getHits().getAt(0).getHighlightFields().get("keyword_field").getFragments()[0].string(), equalTo("some text")); + assertThat(search.getHits().getAt(0).getHighlightFields().get("keyword_field").getFragments()[0].string(), + equalTo("some text")); } public void testStringFieldHighlighting() throws IOException { @@ -2629,12 +2806,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource(jsonBuilder().startObject().field("string_field", "some text").endObject()) .get(); refresh(); - SearchResponse search = client().prepareSearch().setSource( - new SearchSourceBuilder().query(QueryBuilders.matchQuery("string_field", "some text")).highlighter(new HighlightBuilder().field("*"))) - .get(); + SearchResponse search = client().prepareSearch().setSource(new SearchSourceBuilder() + .query(QueryBuilders.matchQuery("string_field", "some text")) + .highlighter(new HighlightBuilder().field("*"))).get(); assertNoFailures(search); assertThat(search.getHits().totalHits(), equalTo(1L)); - assertThat(search.getHits().getAt(0).getHighlightFields().get("string_field").getFragments()[0].string(), equalTo("some text")); + assertThat(search.getHits().getAt(0).getHighlightFields().get("string_field").getFragments()[0].string(), + equalTo("some text")); } public void testACopyFieldWithNestedQuery() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java similarity index 97% rename from core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java rename to core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java index 9c14e2116d1..428751e8859 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/PlainHighlighterTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.highlight; +package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; @@ -35,6 +35,7 @@ import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery; import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.index.analysis.FieldNameAnalyzer; +import org.elasticsearch.search.fetch.subphase.highlight.CustomQueryScorer; import java.io.IOException; import java.util.HashMap; diff --git a/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java b/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java index 3162e94f3e2..1c296a3724a 100644 --- a/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java +++ b/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java @@ -34,7 +34,8 @@ import org.elasticsearch.index.search.stats.SearchStats.Stats; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; @@ -47,7 +48,6 @@ import java.util.function.Function; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionTests.java index 1f2c14ba00d..60722fb5f6d 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionTests.java @@ -32,7 +32,6 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; public class CompletionSuggestionTests extends ESTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/19896") public void testToReduce() throws Exception { List> shardSuggestions = new ArrayList<>(); int nShards = randomIntBetween(1, 10); @@ -47,8 +46,11 @@ public class CompletionSuggestionTests extends ESTestCase { float maxScore = randomIntBetween(totalResults, totalResults*2); for (int i = 0; i < totalResults; i++) { Suggest.Suggestion suggestion = randomFrom(shardSuggestions); - suggestion.getEntries().get(0).addOption(new CompletionSuggestion.Entry.Option(i, new Text(""), - maxScore - i, Collections.emptyMap())); + CompletionSuggestion.Entry entry = suggestion.getEntries().get(0); + if (entry.getOptions().size() < size) { + entry.addOption(new CompletionSuggestion.Entry.Option(i, new Text(""), + maxScore - i, Collections.emptyMap())); + } } CompletionSuggestion reducedSuggestion = CompletionSuggestion.reduceTo(shardSuggestions); assertNotNull(reducedSuggestion); diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index e1df7201fbe..01e6a490dad 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; @@ -128,6 +129,7 @@ public class TribeIT extends ESIntegTestCase { tribe1Defaults.putArray("tribe.t2." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey(), getUnicastHosts(cluster2.client())); Settings merged = Settings.builder() + .put(internalCluster().getDefaultSettings()) .put("tribe.t1.cluster.name", internalCluster().getClusterName()) .put("tribe.t2.cluster.name", cluster2.getClusterName()) .put("tribe.t1.transport.type", "local") @@ -142,7 +144,6 @@ public class TribeIT extends ESIntegTestCase { .put(tribe1Defaults.build()) .put(tribe2Defaults.build()) - .put(internalCluster().getDefaultSettings()) .put("node.name", "tribe_node") // make sure we can identify threads from this node .build(); diff --git a/distribution/src/main/packaging/env/elasticsearch b/distribution/src/main/packaging/env/elasticsearch index 34eff906661..8ce1bba934d 100644 --- a/distribution/src/main/packaging/env/elasticsearch +++ b/distribution/src/main/packaging/env/elasticsearch @@ -24,7 +24,7 @@ #ES_JAVA_OPTS= # Configure restart on package upgrade (true, every other setting will lead to not restarting) -#ES_RESTART_ON_UPGRADE=true +#RESTART_ON_UPGRADE=true ################################ # Elasticsearch service diff --git a/docs/plugins/analysis-icu.asciidoc b/docs/plugins/analysis-icu.asciidoc index a21e0c5c82f..b43a10e25b9 100644 --- a/docs/plugins/analysis-icu.asciidoc +++ b/docs/plugins/analysis-icu.asciidoc @@ -17,6 +17,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install analysis-icu ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -31,6 +32,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove analysis-icu ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/analysis-kuromoji.asciidoc b/docs/plugins/analysis-kuromoji.asciidoc index d7b357cb455..454ba3d2de2 100644 --- a/docs/plugins/analysis-kuromoji.asciidoc +++ b/docs/plugins/analysis-kuromoji.asciidoc @@ -14,6 +14,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install analysis-kuromoji ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -28,6 +29,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove analysis-kuromoji ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/analysis-phonetic.asciidoc b/docs/plugins/analysis-phonetic.asciidoc index 34f14abe3c5..4fcfcf6caba 100644 --- a/docs/plugins/analysis-phonetic.asciidoc +++ b/docs/plugins/analysis-phonetic.asciidoc @@ -15,6 +15,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install analysis-phonetic ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -29,6 +30,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove analysis-phonetic ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/analysis-smartcn.asciidoc b/docs/plugins/analysis-smartcn.asciidoc index fb1eebb4518..665ccbaf611 100644 --- a/docs/plugins/analysis-smartcn.asciidoc +++ b/docs/plugins/analysis-smartcn.asciidoc @@ -20,6 +20,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install analysis-smartcn ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -34,6 +35,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove analysis-smartcn ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. @@ -45,4 +47,3 @@ The plugin provides the `smartcn` analyzer and `smartcn_tokenizer` tokenizer, which are not configurable. NOTE: The `smartcn_word` token filter and `smartcn_sentence` have been deprecated. - diff --git a/docs/plugins/analysis-stempel.asciidoc b/docs/plugins/analysis-stempel.asciidoc index 26aad5248ec..8a42135a879 100644 --- a/docs/plugins/analysis-stempel.asciidoc +++ b/docs/plugins/analysis-stempel.asciidoc @@ -17,6 +17,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install analysis-stempel ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -31,6 +32,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove analysis-stempel ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. @@ -40,4 +42,3 @@ The node must be stopped before removing the plugin. The plugin provides the `polish` analyzer and `polish_stem` token filter, which are not configurable. - diff --git a/docs/plugins/discovery-azure-classic.asciidoc b/docs/plugins/discovery-azure-classic.asciidoc index 13fe47b40fa..a9ec9929b49 100644 --- a/docs/plugins/discovery-azure-classic.asciidoc +++ b/docs/plugins/discovery-azure-classic.asciidoc @@ -17,6 +17,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install discovery-azure-classic ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -31,6 +32,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove discovery-azure-classic ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. @@ -385,7 +387,7 @@ curl http://localhost:9200/ This command should give you a JSON result: -[source,javascript] +[source,js] ---- { "status" : 200, diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index 7a9cd3313b2..8c11942fff2 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -13,6 +13,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install discovery-ec2 ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -27,6 +28,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove discovery-ec2 ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. @@ -140,7 +142,7 @@ discovery: You must also set `cloud.aws.region` if you are not using default AWS region. See <> for details. The ec2 discovery is using the same credentials as the rest of the AWS services provided by this plugin (`repositories`). -See <> for details. +See <> for details. The following are a list of settings (prefixed with `discovery.ec2`) that can further control the discovery: @@ -267,7 +269,7 @@ When selecting disk please be aware of the following order of preference: * http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html[Instance Store] - When running clusters of larger size and with replicas the ephemeral nature of Instance Store is ideal since Elasticsearch can tolerate the loss of shards. With Instance Store one gets the performance benefit of having disk physically attached to the host running the instance and also the cost benefit of avoiding paying extra for EBS. -Prefer https://aws.amazon.com/amazon-linux-ami/[Amazon Linux AMIs] as since Elasticsearch runs on the JVM, OS dependencies are very minimal and one can benefit from the lightweight nature, support, and performance tweaks specific to EC2 that the Amazon Linux AMIs offer. +Prefer https://aws.amazon.com/amazon-linux-ami/[Amazon Linux AMIs] as since Elasticsearch runs on the JVM, OS dependencies are very minimal and one can benefit from the lightweight nature, support, and performance tweaks specific to EC2 that the Amazon Linux AMIs offer. ===== Networking * Networking throttling takes place on smaller instance types in both the form of https://lab.getbase.com/how-we-discovered-limitations-on-the-aws-tcp-stack/[bandwidth and number of connections]. Therefore if large number of connections are needed and networking is becoming a bottleneck, avoid https://aws.amazon.com/ec2/instance-types/[instance types] with networking labeled as `Moderate` or `Low`. diff --git a/docs/plugins/discovery-gce.asciidoc b/docs/plugins/discovery-gce.asciidoc index f543bf92709..aa458d28af4 100644 --- a/docs/plugins/discovery-gce.asciidoc +++ b/docs/plugins/discovery-gce.asciidoc @@ -13,6 +13,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install discovery-gce ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -27,6 +28,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove discovery-gce ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/ingest-attachment.asciidoc b/docs/plugins/ingest-attachment.asciidoc index 65add6ac927..3c72e514094 100644 --- a/docs/plugins/ingest-attachment.asciidoc +++ b/docs/plugins/ingest-attachment.asciidoc @@ -21,6 +21,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install ingest-attachment ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -35,6 +36,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove ingest-attachment ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc index 1626be6c8e6..d6eced47eca 100644 --- a/docs/plugins/ingest-geoip.asciidoc +++ b/docs/plugins/ingest-geoip.asciidoc @@ -21,6 +21,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install ingest-geoip ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -35,6 +36,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove ingest-geoip ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/ingest-user-agent.asciidoc b/docs/plugins/ingest-user-agent.asciidoc index 29903224f39..95997a34c10 100644 --- a/docs/plugins/ingest-user-agent.asciidoc +++ b/docs/plugins/ingest-user-agent.asciidoc @@ -16,6 +16,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install ingest-user-agent ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -30,6 +31,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove ingest-user-agent ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/lang-javascript.asciidoc b/docs/plugins/lang-javascript.asciidoc index b7f858d6b4e..422c23ed0b2 100644 --- a/docs/plugins/lang-javascript.asciidoc +++ b/docs/plugins/lang-javascript.asciidoc @@ -15,6 +15,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install lang-javascript ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -29,6 +30,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove lang-javascript ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/lang-python.asciidoc b/docs/plugins/lang-python.asciidoc index 3920ff0ab0a..96d00d9e2c8 100644 --- a/docs/plugins/lang-python.asciidoc +++ b/docs/plugins/lang-python.asciidoc @@ -14,6 +14,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install lang-python ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -28,6 +29,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove lang-python ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/mapper-attachments.asciidoc b/docs/plugins/mapper-attachments.asciidoc index 119ec10c905..a35d9efe614 100644 --- a/docs/plugins/mapper-attachments.asciidoc +++ b/docs/plugins/mapper-attachments.asciidoc @@ -19,6 +19,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install mapper-attachments ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -33,6 +34,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove mapper-attachments ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/mapper-murmur3.asciidoc b/docs/plugins/mapper-murmur3.asciidoc index 28b7a2387ef..f81c226d1f0 100644 --- a/docs/plugins/mapper-murmur3.asciidoc +++ b/docs/plugins/mapper-murmur3.asciidoc @@ -15,6 +15,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install mapper-murmur3 ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -29,6 +30,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove mapper-murmur3 ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/mapper-size.asciidoc b/docs/plugins/mapper-size.asciidoc index df16d7eb857..4b2d02a6a2b 100644 --- a/docs/plugins/mapper-size.asciidoc +++ b/docs/plugins/mapper-size.asciidoc @@ -15,6 +15,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install mapper-size ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -29,6 +30,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove mapper-size ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. @@ -106,4 +108,3 @@ GET my_index/_search <2> Aggregating on the `_size` field <3> Sorting on the `_size` field <4> Accessing the `_size` field in scripts (inline scripts must be modules-security-scripting.html#enable-dynamic-scripting[enabled] for this example to work) - diff --git a/docs/plugins/repository-azure.asciidoc b/docs/plugins/repository-azure.asciidoc index 03466f0c643..726f55cc889 100644 --- a/docs/plugins/repository-azure.asciidoc +++ b/docs/plugins/repository-azure.asciidoc @@ -14,6 +14,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install repository-azure ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -28,6 +29,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove repository-azure ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc index 5b2c633f23e..a9658e1f219 100644 --- a/docs/plugins/repository-gcs.asciidoc +++ b/docs/plugins/repository-gcs.asciidoc @@ -14,6 +14,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install repository-gcs ---------------------------------------------------------------- +// NOTCONSOLE NOTE: The plugin requires new permission to be installed in order to work @@ -30,6 +31,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove repository-gcs ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index 62b1d2a95ca..02239a78b15 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -14,6 +14,7 @@ This plugin can be installed through the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install repository-hdfs ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on _every_ node in the cluster, and each node must be restarted after installation. @@ -28,6 +29,7 @@ The plugin can be removed by specifying the _installed_ package: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove repository-hdfs ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index 90b0c803996..3b35656b750 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -14,6 +14,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install repository-s3 ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -28,6 +29,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove repository-s3 ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/plugins/store-smb.asciidoc b/docs/plugins/store-smb.asciidoc index 731894ae0a8..ac35342f2f8 100644 --- a/docs/plugins/store-smb.asciidoc +++ b/docs/plugins/store-smb.asciidoc @@ -13,6 +13,7 @@ This plugin can be installed using the plugin manager: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin install store-smb ---------------------------------------------------------------- +// NOTCONSOLE The plugin must be installed on every node in the cluster, and each node must be restarted after installation. @@ -27,6 +28,7 @@ The plugin can be removed with the following command: ---------------------------------------------------------------- sudo bin/elasticsearch-plugin remove store-smb ---------------------------------------------------------------- +// NOTCONSOLE The node must be stopped before removing the plugin. diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index 76ee65a6abe..f6875f60c60 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -310,3 +310,14 @@ The unit 'w' representing weeks is no longer supported. Fractional time values (e.g., 0.5s) are no longer supported. For example, this means when setting timeouts "0.5s" will be rejected and should instead be input as "500ms". + +==== Node max local storage nodes + +Previous versions of Elasticsearch defaulted to allowing multiple nodes to share the same data +directory (up to 50). This can be confusing where users accidentally startup multiple nodes and end +up thinking that they've lost data because the second node will start with an empty data directory. +While the default of allowing multiple nodes is friendly to playing with forming a small cluster on +a laptop, and end-users do sometimes run multiple nodes on the same host, this tends to be the +exception. Keeping with Elasticsearch's continual movement towards safer out-of-the-box defaults, +and optimizing for the norm instead of the exception, the default for +`node.max_local_storage_nodes` is now one. diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc index 5857e1b0ee7..3b3980f7ca7 100644 --- a/docs/reference/modules/snapshots.asciidoc +++ b/docs/reference/modules/snapshots.asciidoc @@ -38,7 +38,7 @@ which returns: "my_backup": { "type": "fs", "settings": { - "compress": "true", + "compress": true, "location": "/mount/backups/my_backup" } } @@ -219,7 +219,7 @@ specifying the list of indices in the body of the snapshot request. PUT /_snapshot/my_backup/snapshot_1 { "indices": "index_1,index_2", - "ignore_unavailable": "true", + "ignore_unavailable": true, "include_global_state": false } ----------------------------------- @@ -340,7 +340,7 @@ with associated indices POST /_snapshot/my_backup/snapshot_1/_restore { "indices": "index_1,index_2", - "ignore_unavailable": "true", + "ignore_unavailable": true, "include_global_state": true, "rename_pattern": "index_(.+)", "rename_replacement": "restored_index_$1" diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index a83dd93a17e..6216ec2354e 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -293,10 +293,19 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri // NOTE: we truncate the stack because IndyInterface has security issue (needs getClassLoader) // we don't do a security check just as a tradeoff, it cannot really escalate to anything. return AccessController.doPrivileged((PrivilegedAction) script::run); - } catch (Exception e) { - if (logger.isTraceEnabled()) { - logger.trace("failed to run {}", e, compiledScript); + } catch (AssertionError ae) { + // Groovy asserts are not java asserts, and cannot be disabled, so we do a best-effort trying to determine if this is a + // Groovy assert (in which case we wrap it and throw), or a real Java assert, in which case we rethrow it as-is, likely + // resulting in the uncaughtExceptionHandler handling it. + final StackTraceElement[] elements = ae.getStackTrace(); + if (elements.length > 0 && "org.codehaus.groovy.runtime.InvokerHelper".equals(elements[0].getClassName())) { + logger.trace("failed to run {}", ae, compiledScript); + throw new ScriptException("Error evaluating " + compiledScript.name(), + ae, emptyList(), "", compiledScript.lang()); } + throw ae; + } catch (Exception | NoClassDefFoundError e) { + logger.trace("failed to run {}", e, compiledScript); throw new ScriptException("Error evaluating " + compiledScript.name(), e, emptyList(), "", compiledScript.lang()); } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java index 96a5343913d..31dc154a9e2 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java @@ -123,6 +123,13 @@ public class GroovySecurityTests extends ESTestCase { } } + public void testGroovyScriptsThatThrowErrors() throws Exception { + assertFailure("assert false, \"msg\";", AssertionError.class); + assertFailure("def foo=false; assert foo;", AssertionError.class); + // Groovy's asserts require org.codehaus.groovy.runtime.InvokerHelper, so they are denied + assertFailure("def foo=false; assert foo, \"msg2\";", NoClassDefFoundError.class); + } + /** runs a script */ private void doTest(String script) { Map vars = new HashMap(); @@ -146,7 +153,7 @@ public class GroovySecurityTests extends ESTestCase { doTest(script); } - /** asserts that a script triggers securityexception */ + /** asserts that a script triggers the given exceptionclass */ private void assertFailure(String script, Class exceptionClass) { try { doTest(script); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequestBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequestBuilder.java index e1e71c8e0e3..e73a3267fee 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequestBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequestBuilder.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; import java.util.Map; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateResponse.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateResponse.java index 3d82cb12d2c..22d18b5f3d8 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateResponse.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateResponse.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.highlight.HighlightField; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import java.io.IOException; import java.util.Arrays; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateSourceBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateSourceBuilder.java index e61f9d68dab..4019f91300c 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateSourceBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateSourceBuilder.java @@ -32,7 +32,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.SortBuilder; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 2e9fd517d53..2afa2c92ed1 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -34,9 +34,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.highlight.HighlightPhase; -import org.elasticsearch.search.highlight.Highlighter; -import org.elasticsearch.search.highlight.SearchContextHighlight; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase; +import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SubSearchContext; diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java index 55a25768187..ddd01a1a9d9 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.search.highlight.SearchContextHighlight; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index bdfa49016e9..e4a10ce04a0 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -42,7 +42,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.functionscore.WeightBuilder; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 3cc60d75cf2..b21c131a625 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESSingleNodeTestCase; diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 95550f6f654..247caa42210 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -60,7 +60,9 @@ public class TribeUnitTests extends ESTestCase { .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put("transport.type", "local") .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 2) + .build(); tribe1 = new TribeClientNode( Settings.builder() diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 63aa4484255..dffa285101f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -31,6 +31,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.script.ScriptService; import org.elasticsearch.transport.MockTcpTransportPlugin; import org.elasticsearch.action.ActionListener; @@ -1630,6 +1631,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() + .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), Integer.MAX_VALUE) // Default the watermarks to absurdly low to prevent the tests // from failing on nodes without enough disk space .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b") diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index c121164fb01..cc78afb987f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -175,8 +175,8 @@ public final class InternalTestCluster extends TestCluster { public final int HTTP_BASE_PORT = GLOBAL_HTTP_BASE_PORT + CLUSTER_BASE_PORT_OFFSET; - static final int DEFAULT_LOW_NUM_MASTER_NODES = 1; - static final int DEFAULT_HIGH_NUM_MASTER_NODES = 3; + public static final int DEFAULT_LOW_NUM_MASTER_NODES = 1; + public static final int DEFAULT_HIGH_NUM_MASTER_NODES = 3; static final int DEFAULT_MIN_NUM_DATA_NODES = 1; static final int DEFAULT_MAX_NUM_DATA_NODES = TEST_NIGHTLY ? 6 : 3; @@ -300,6 +300,7 @@ public final class InternalTestCluster extends TestCluster { builder.put(Environment.PATH_DATA_SETTING.getKey(), dataPath.toString()); } } + builder.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), Integer.MAX_VALUE); builder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), baseDir.resolve("custom")); builder.put(Environment.PATH_HOME_SETTING.getKey(), baseDir); builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 6b123b73c3c..df35b9c3456 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -50,9 +50,9 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseContext; -import org.elasticsearch.search.fetch.script.ScriptFieldsContext; -import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.highlight.SearchContextHighlight; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java index 67d7b99171d..b393498ec89 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; import org.elasticsearch.transport.TransportSettings; @@ -108,7 +109,7 @@ public class ClusterDiscoveryConfiguration extends NodeConfigurationSource { @Override public Settings nodeSettings(int nodeOrdinal) { - Settings.Builder builder = Settings.builder(); + Settings.Builder builder = Settings.builder().put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numOfNodes); String[] unicastHosts = new String[unicastHostOrdinals.length]; if (nodeOrdinal >= unicastHostPorts.length) { diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index d4af031aa84..f0b7454fe9d 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -128,12 +128,16 @@ public class InternalTestClusterTests extends ESTestCase { boolean masterNodes = randomBoolean(); int minNumDataNodes = randomIntBetween(0, 3); int maxNumDataNodes = randomIntBetween(minNumDataNodes, 4); + int numClientNodes = randomIntBetween(0, 2); final String clusterName1 = "shared1"; final String clusterName2 = "shared2"; NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() + .put( + NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), + 2 * ((masterNodes ? InternalTestCluster.DEFAULT_HIGH_NUM_MASTER_NODES : 0) + maxNumDataNodes + numClientNodes)) .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); @@ -145,7 +149,7 @@ public class InternalTestClusterTests extends ESTestCase { .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); } }; - int numClientNodes = randomIntBetween(0, 2); + boolean enableHttpPipelining = randomBoolean(); String nodePrefix = "foobar"; @@ -187,13 +191,17 @@ public class InternalTestClusterTests extends ESTestCase { long clusterSeed = randomLong(); boolean masterNodes = randomBoolean(); // we need one stable node - int minNumDataNodes = 2; - int maxNumDataNodes = 2; + final int minNumDataNodes = 2; + final int maxNumDataNodes = 2; + final int numClientNodes = randomIntBetween(0, 2); final String clusterName1 = "shared1"; NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put( + NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), + 2 + (masterNodes ? InternalTestCluster.DEFAULT_HIGH_NUM_MASTER_NODES : 0) + maxNumDataNodes + numClientNodes) .put(NetworkModule.TRANSPORT_TYPE_KEY, "local") .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") .build(); @@ -203,7 +211,7 @@ public class InternalTestClusterTests extends ESTestCase { return Settings.builder() .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); } - }; int numClientNodes = randomIntBetween(0, 2); + }; boolean enableHttpPipelining = randomBoolean(); String nodePrefix = "test"; Path baseDir = createTempDir(); @@ -269,11 +277,13 @@ public class InternalTestClusterTests extends ESTestCase { public void testDifferentRolesMaintainPathOnRestart() throws Exception { final Path baseDir = createTempDir(); + final int numNodes = 5; InternalTestCluster cluster = new InternalTestCluster(randomLong(), baseDir, true, 0, 0, "test", new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() + .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numNodes) .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(NetworkModule.TRANSPORT_TYPE_KEY, "local") .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") @@ -289,7 +299,7 @@ public class InternalTestClusterTests extends ESTestCase { cluster.beforeTest(random(), 0.0); try { Map> pathsPerRole = new HashMap<>(); - for (int i = 0; i < 5; i++) { + for (int i = 0; i < numNodes; i++) { final DiscoveryNode.Role role = randomFrom(MASTER, DiscoveryNode.Role.DATA, DiscoveryNode.Role.INGEST); final String node; switch (role) {