Merge branch 'master' into enhancement/discovery_node_one_getter
This commit is contained in:
commit
b9f9b2e3ee
|
@ -432,7 +432,7 @@ and in another window:
|
|||
|
||||
----------------------------------------------------
|
||||
vagrant up centos-7 --provider virtualbox && vagrant ssh centos-7
|
||||
cd $RPM
|
||||
cd $TESTROOT
|
||||
sudo bats $BATS/*rpm*.bats
|
||||
----------------------------------------------------
|
||||
|
||||
|
@ -440,7 +440,7 @@ If you wanted to retest all the release artifacts on a single VM you could:
|
|||
|
||||
-------------------------------------------------
|
||||
gradle prepareTestRoot
|
||||
vagrant up trusty --provider virtualbox && vagrant ssh trusty
|
||||
vagrant up ubuntu-1404 --provider virtualbox && vagrant ssh ubuntu-1404
|
||||
cd $TESTROOT
|
||||
sudo bats $BATS/*.bats
|
||||
-------------------------------------------------
|
||||
|
|
|
@ -258,7 +258,7 @@ class ClusterFormationTasks {
|
|||
'path.repo' : "${node.sharedDir}/repo",
|
||||
'path.shared_data' : "${node.sharedDir}/",
|
||||
// Define a node attribute so we can test that it exists
|
||||
'node.testattr' : 'test',
|
||||
'node.attr.testattr' : 'test',
|
||||
'repositories.url.allowed_urls': 'http://snapshot.test*'
|
||||
]
|
||||
esConfig['http.port'] = node.config.httpPort
|
||||
|
|
|
@ -415,7 +415,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyHttpServerTransport.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]AlreadyExpiredException.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]CompositeIndexEventListener.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexModule.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettings.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLog.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicyConfig.java" checks="LineLength" />
|
||||
|
|
|
@ -107,7 +107,16 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
|||
request.indices());
|
||||
|
||||
shardsIts = clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, request.preference());
|
||||
expectedSuccessfulOps = shardsIts.size();
|
||||
final int shardCount = shardsIts.size();
|
||||
final long shardCountLimit = clusterService.getClusterSettings().get(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING);
|
||||
if (shardCount > shardCountLimit) {
|
||||
throw new IllegalArgumentException("Trying to query " + shardCount + " shards, which is over the limit of "
|
||||
+ shardCountLimit + ". This limit exists because querying many shards at the same time can make the "
|
||||
+ "job of the coordinating node very CPU and/or memory intensive. It is usually a better idea to "
|
||||
+ "have a smaller number of larger shards. Update [" + TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey()
|
||||
+ "] to a greater value if you really want to query that many shards at the same time.");
|
||||
}
|
||||
expectedSuccessfulOps = shardCount;
|
||||
// we need to add 1 for non active partition, since we count it in the total!
|
||||
expectedTotalOps = shardsIts.totalSizeWith1ForEmpty();
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.elasticsearch.search.Scroll;
|
|||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
import org.elasticsearch.index.query.support.InnerHitsBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
|
|
@ -26,6 +26,8 @@ import org.elasticsearch.cluster.ClusterState;
|
|||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.indices.IndexClosedException;
|
||||
|
@ -45,6 +47,10 @@ import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH;
|
|||
*/
|
||||
public class TransportSearchAction extends HandledTransportAction<SearchRequest, SearchResponse> {
|
||||
|
||||
/** The maximum number of shards for a single search request. */
|
||||
public static final Setting<Long> SHARD_COUNT_LIMIT_SETTING = Setting.longSetting(
|
||||
"action.search.shard_count.limit", 1000L, 1L, Property.Dynamic, Property.NodeScope);
|
||||
|
||||
private final ClusterService clusterService;
|
||||
private final SearchTransportService searchTransportService;
|
||||
private final SearchPhaseController searchPhaseController;
|
||||
|
|
|
@ -520,7 +520,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
|
|||
|
||||
private void handleBlockException(ClusterBlockException blockException) {
|
||||
if (blockException.retryable()) {
|
||||
logger.trace("cluster is blocked ({}), scheduling a retry", blockException.getMessage());
|
||||
logger.trace("cluster is blocked, scheduling a retry", blockException);
|
||||
retry(blockException);
|
||||
} else {
|
||||
finishAsFailed(blockException);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.cluster;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
|
@ -307,7 +308,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu
|
|||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
if (e instanceof ReceiveTimeoutTransportException) {
|
||||
logger.error("NodeStatsAction timed out for ClusterInfoUpdateJob (reason [{}])", e.getMessage());
|
||||
logger.error("NodeStatsAction timed out for ClusterInfoUpdateJob", e);
|
||||
} else {
|
||||
if (e instanceof ClusterBlockException) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
|
@ -337,7 +338,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu
|
|||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
if (e instanceof ReceiveTimeoutTransportException) {
|
||||
logger.error("IndicesStatsAction timed out for ClusterInfoUpdateJob (reason [{}])", e.getMessage());
|
||||
logger.error("IndicesStatsAction timed out for ClusterInfoUpdateJob", e);
|
||||
} else {
|
||||
if (e instanceof ClusterBlockException) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
|
|
|
@ -68,20 +68,17 @@ public class DiscoveryNodeService extends AbstractComponent {
|
|||
public DiscoveryNode buildLocalNode(TransportAddress publishAddress) {
|
||||
final String nodeId = generateNodeId(settings);
|
||||
Map<String, String> attributes = new HashMap<>(Node.NODE_ATTRIBUTES.get(this.settings).getAsMap());
|
||||
if (attributes.containsKey("client")) {
|
||||
throw new IllegalArgumentException("node.client setting is no longer supported, use " + Node.NODE_MASTER_SETTING.getKey()
|
||||
+ ", " + Node.NODE_DATA_SETTING.getKey() + " and " + Node.NODE_INGEST_SETTING.getKey() + " explicitly instead");
|
||||
Set<DiscoveryNode.Role> roles = new HashSet<>();
|
||||
if (Node.NODE_INGEST_SETTING.get(settings)) {
|
||||
roles.add(DiscoveryNode.Role.INGEST);
|
||||
}
|
||||
if (Node.NODE_MASTER_SETTING.get(settings)) {
|
||||
roles.add(DiscoveryNode.Role.MASTER);
|
||||
}
|
||||
if (Node.NODE_DATA_SETTING.get(settings)) {
|
||||
roles.add(DiscoveryNode.Role.DATA);
|
||||
}
|
||||
|
||||
attributes.remove("name"); // name is extracted in other places
|
||||
Set<DiscoveryNode.Role> roles = new HashSet<>();
|
||||
for (DiscoveryNode.Role role : DiscoveryNode.Role.values()) {
|
||||
String isRoleEnabled = attributes.remove(role.getRoleName());
|
||||
//all existing roles default to true
|
||||
if (isRoleEnabled == null || Booleans.parseBooleanExact(isRoleEnabled)) {
|
||||
roles.add(role);
|
||||
}
|
||||
}
|
||||
for (CustomAttributesProvider provider : customAttributesProviders) {
|
||||
try {
|
||||
Map<String, String> customAttributes = provider.buildAttributes();
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.common.settings;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction;
|
||||
import org.elasticsearch.action.search.TransportSearchAction;
|
||||
import org.elasticsearch.action.support.AutoCreateIndex;
|
||||
import org.elasticsearch.action.support.DestructiveOperations;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
|
||||
|
@ -258,6 +259,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
|
||||
SearchService.DEFAULT_SEARCH_TIMEOUT_SETTING,
|
||||
ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING,
|
||||
TransportSearchAction.SHARD_COUNT_LIMIT_SETTING,
|
||||
TransportService.TRACE_LOG_EXCLUDE_SETTING,
|
||||
TransportService.TRACE_LOG_INCLUDE_SETTING,
|
||||
TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING,
|
||||
|
|
|
@ -129,6 +129,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
|
|||
MapperService.INDEX_MAPPER_DYNAMIC_SETTING,
|
||||
MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING,
|
||||
MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING,
|
||||
MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING,
|
||||
BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING,
|
||||
IndexModule.INDEX_STORE_TYPE_SETTING,
|
||||
IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING,
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.index.engine.EngineFactory;
|
|||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.shard.IndexingOperationListener;
|
||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||
import org.elasticsearch.index.similarity.BM25SimilarityProvider;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
|
@ -43,11 +44,14 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
|||
import org.elasticsearch.indices.mapper.MapperRegistry;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
@ -55,13 +59,15 @@ import java.util.function.Function;
|
|||
/**
|
||||
* IndexModule represents the central extension point for index level custom implementations like:
|
||||
* <ul>
|
||||
* <li>{@link SimilarityProvider} - New {@link SimilarityProvider} implementations can be registered through {@link #addSimilarity(String, BiFunction)}
|
||||
* while existing Providers can be referenced through Settings under the {@link IndexModule#SIMILARITY_SETTINGS_PREFIX} prefix
|
||||
* along with the "type" value. For example, to reference the {@link BM25SimilarityProvider}, the configuration
|
||||
* <tt>"index.similarity.my_similarity.type : "BM25"</tt> can be used.</li>
|
||||
* <li>{@link SimilarityProvider} - New {@link SimilarityProvider} implementations can be registered through
|
||||
* {@link #addSimilarity(String, BiFunction)}while existing Providers can be referenced through Settings under the
|
||||
* {@link IndexModule#SIMILARITY_SETTINGS_PREFIX} prefix along with the "type" value. For example, to reference the
|
||||
* {@link BM25SimilarityProvider}, the configuration <tt>"index.similarity.my_similarity.type : "BM25"</tt> can be used.</li>
|
||||
* <li>{@link IndexStore} - Custom {@link IndexStore} instances can be registered via {@link #addIndexStore(String, BiFunction)}</li>
|
||||
* <li>{@link IndexEventListener} - Custom {@link IndexEventListener} instances can be registered via {@link #addIndexEventListener(IndexEventListener)}</li>
|
||||
* <li>Settings update listener - Custom settings update listener can be registered via {@link #addSettingsUpdateConsumer(Setting, Consumer)}</li>
|
||||
* <li>{@link IndexEventListener} - Custom {@link IndexEventListener} instances can be registered via
|
||||
* {@link #addIndexEventListener(IndexEventListener)}</li>
|
||||
* <li>Settings update listener - Custom settings update listener can be registered via
|
||||
* {@link #addSettingsUpdateConsumer(Setting, Consumer)}</li>
|
||||
* </ul>
|
||||
*/
|
||||
public final class IndexModule {
|
||||
|
@ -84,11 +90,13 @@ public final class IndexModule {
|
|||
final SetOnce<EngineFactory> engineFactory = new SetOnce<>();
|
||||
private SetOnce<IndexSearcherWrapperFactory> indexSearcherWrapper = new SetOnce<>();
|
||||
private final Set<IndexEventListener> indexEventListeners = new HashSet<>();
|
||||
private IndexEventListener listener;
|
||||
private final Map<String, BiFunction<String, Settings, SimilarityProvider>> similarities = new HashMap<>();
|
||||
private final Map<String, BiFunction<IndexSettings, IndexStoreConfig, IndexStore>> storeTypes = new HashMap<>();
|
||||
private final Map<String, BiFunction<IndexSettings, IndicesQueryCache, QueryCache>> queryCaches = new HashMap<>();
|
||||
private final SetOnce<String> forceQueryCacheType = new SetOnce<>();
|
||||
private final List<SearchOperationListener> searchOperationListeners = new ArrayList<>();
|
||||
private final List<IndexingOperationListener> indexOperationListeners = new ArrayList<>();
|
||||
private final AtomicBoolean frozen = new AtomicBoolean(false);
|
||||
|
||||
public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig, AnalysisRegistry analysisRegistry) {
|
||||
this.indexStoreConfig = indexStoreConfig;
|
||||
|
@ -96,12 +104,15 @@ public final class IndexModule {
|
|||
this.analysisRegistry = analysisRegistry;
|
||||
registerQueryCache(INDEX_QUERY_CACHE, IndexQueryCache::new);
|
||||
registerQueryCache(NONE_QUERY_CACHE, (a, b) -> new NoneQueryCache(a));
|
||||
this.searchOperationListeners.add(new SearchSlowLog(indexSettings));
|
||||
this.indexOperationListeners.add(new IndexingSlowLog(indexSettings));
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a Setting and it's consumer for this index.
|
||||
*/
|
||||
public <T> void addSettingsUpdateConsumer(Setting<T> setting, Consumer<T> consumer) {
|
||||
ensureNotFrozen();
|
||||
if (setting == null) {
|
||||
throw new IllegalArgumentException("setting must not be null");
|
||||
}
|
||||
|
@ -134,9 +145,7 @@ public final class IndexModule {
|
|||
* </p>
|
||||
*/
|
||||
public void addIndexEventListener(IndexEventListener listener) {
|
||||
if (this.listener != null) {
|
||||
throw new IllegalStateException("can't add listener after listeners are frozen");
|
||||
}
|
||||
ensureNotFrozen();
|
||||
if (listener == null) {
|
||||
throw new IllegalArgumentException("listener must not be null");
|
||||
}
|
||||
|
@ -147,6 +156,52 @@ public final class IndexModule {
|
|||
this.indexEventListeners.add(listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an {@link SearchOperationListener} for this index. All listeners added here
|
||||
* are maintained for the entire index lifecycle on this node. Once an index is closed or deleted these
|
||||
* listeners go out of scope.
|
||||
* <p>
|
||||
* Note: an index might be created on a node multiple times. For instance if the last shard from an index is
|
||||
* relocated to another node the internal representation will be destroyed which includes the registered listeners.
|
||||
* Once the node holds at least one shard of an index all modules are reloaded and listeners are registered again.
|
||||
* Listeners can't be unregistered they will stay alive for the entire time the index is allocated on a node.
|
||||
* </p>
|
||||
*/
|
||||
public void addSearchOperationListener(SearchOperationListener listener) {
|
||||
ensureNotFrozen();
|
||||
if (listener == null) {
|
||||
throw new IllegalArgumentException("listener must not be null");
|
||||
}
|
||||
if (searchOperationListeners.contains(listener)) {
|
||||
throw new IllegalArgumentException("listener already added");
|
||||
}
|
||||
|
||||
this.searchOperationListeners.add(listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an {@link IndexingOperationListener} for this index. All listeners added here
|
||||
* are maintained for the entire index lifecycle on this node. Once an index is closed or deleted these
|
||||
* listeners go out of scope.
|
||||
* <p>
|
||||
* Note: an index might be created on a node multiple times. For instance if the last shard from an index is
|
||||
* relocated to another node the internal representation will be destroyed which includes the registered listeners.
|
||||
* Once the node holds at least one shard of an index all modules are reloaded and listeners are registered again.
|
||||
* Listeners can't be unregistered they will stay alive for the entire time the index is allocated on a node.
|
||||
* </p>
|
||||
*/
|
||||
public void addIndexOperationListener(IndexingOperationListener listener) {
|
||||
ensureNotFrozen();
|
||||
if (listener == null) {
|
||||
throw new IllegalArgumentException("listener must not be null");
|
||||
}
|
||||
if (indexOperationListeners.contains(listener)) {
|
||||
throw new IllegalArgumentException("listener already added");
|
||||
}
|
||||
|
||||
this.indexOperationListeners.add(listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an {@link IndexStore} type to this index module. Typically stores are registered with a reference to
|
||||
* it's constructor:
|
||||
|
@ -158,6 +213,7 @@ public final class IndexModule {
|
|||
* @param provider the instance provider / factory method
|
||||
*/
|
||||
public void addIndexStore(String type, BiFunction<IndexSettings, IndexStoreConfig, IndexStore> provider) {
|
||||
ensureNotFrozen();
|
||||
if (storeTypes.containsKey(type)) {
|
||||
throw new IllegalArgumentException("key [" + type +"] already registered");
|
||||
}
|
||||
|
@ -172,6 +228,7 @@ public final class IndexModule {
|
|||
* @param similarity SimilarityProvider to register
|
||||
*/
|
||||
public void addSimilarity(String name, BiFunction<String, Settings, SimilarityProvider> similarity) {
|
||||
ensureNotFrozen();
|
||||
if (similarities.containsKey(name) || SimilarityService.BUILT_IN.containsKey(name)) {
|
||||
throw new IllegalArgumentException("similarity for name: [" + name + " is already registered");
|
||||
}
|
||||
|
@ -184,6 +241,7 @@ public final class IndexModule {
|
|||
* @param provider the provider instance
|
||||
*/
|
||||
public void registerQueryCache(String name, BiFunction<IndexSettings, IndicesQueryCache, QueryCache> provider) {
|
||||
ensureNotFrozen();
|
||||
if (provider == null) {
|
||||
throw new IllegalArgumentException("provider must not be null");
|
||||
}
|
||||
|
@ -194,19 +252,21 @@ public final class IndexModule {
|
|||
}
|
||||
|
||||
/**
|
||||
* Sets a {@link org.elasticsearch.index.IndexModule.IndexSearcherWrapperFactory} that is called once the IndexService is fully constructed.
|
||||
* Sets a {@link org.elasticsearch.index.IndexModule.IndexSearcherWrapperFactory} that is called once the IndexService
|
||||
* is fully constructed.
|
||||
* Note: this method can only be called once per index. Multiple wrappers are not supported.
|
||||
*/
|
||||
public void setSearcherWrapper(IndexSearcherWrapperFactory indexSearcherWrapperFactory) {
|
||||
ensureNotFrozen();
|
||||
this.indexSearcherWrapper.set(indexSearcherWrapperFactory);
|
||||
}
|
||||
|
||||
public IndexEventListener freeze() {
|
||||
// TODO somehow we need to make this pkg private...
|
||||
if (listener == null) {
|
||||
listener = new CompositeIndexEventListener(indexSettings, indexEventListeners);
|
||||
IndexEventListener freeze() { // pkg private for testing
|
||||
if (this.frozen.compareAndSet(false, true)) {
|
||||
return new CompositeIndexEventListener(indexSettings, indexEventListeners);
|
||||
} else {
|
||||
throw new IllegalStateException("already frozen");
|
||||
}
|
||||
return listener;
|
||||
}
|
||||
|
||||
private static boolean isBuiltinType(String storeType) {
|
||||
|
@ -246,10 +306,13 @@ public final class IndexModule {
|
|||
IndexSearcherWrapper newWrapper(final IndexService indexService);
|
||||
}
|
||||
|
||||
public IndexService newIndexService(NodeEnvironment environment, IndexService.ShardStoreDeleter shardStoreDeleter, NodeServicesProvider servicesProvider, IndicesQueryCache indicesQueryCache, MapperRegistry mapperRegistry, IndicesFieldDataCache indicesFieldDataCache,
|
||||
IndexingOperationListener... listeners) throws IOException {
|
||||
IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get();
|
||||
IndexEventListener eventListener = freeze();
|
||||
public IndexService newIndexService(NodeEnvironment environment, IndexService.ShardStoreDeleter shardStoreDeleter,
|
||||
NodeServicesProvider servicesProvider, IndicesQueryCache indicesQueryCache,
|
||||
MapperRegistry mapperRegistry, IndicesFieldDataCache indicesFieldDataCache) throws IOException {
|
||||
final IndexEventListener eventListener = freeze();
|
||||
IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null
|
||||
? (shard) -> null : indexSearcherWrapper.get();
|
||||
eventListener.beforeIndexCreated(indexSettings.getIndex(), indexSettings.getSettings());
|
||||
final String storeType = indexSettings.getValue(INDEX_STORE_TYPE_SETTING);
|
||||
final IndexStore store;
|
||||
if (Strings.isEmpty(storeType) || isBuiltinType(storeType)) {
|
||||
|
@ -265,12 +328,15 @@ public final class IndexModule {
|
|||
}
|
||||
}
|
||||
indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, store::setType);
|
||||
indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, store::setMaxRate);
|
||||
final String queryCacheType = forceQueryCacheType.get() != null ? forceQueryCacheType.get() : indexSettings.getValue(INDEX_QUERY_CACHE_TYPE_SETTING);
|
||||
indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING,
|
||||
store::setMaxRate);
|
||||
final String queryCacheType = forceQueryCacheType.get() != null
|
||||
? forceQueryCacheType.get() : indexSettings.getValue(INDEX_QUERY_CACHE_TYPE_SETTING);
|
||||
final BiFunction<IndexSettings, IndicesQueryCache, QueryCache> queryCacheProvider = queryCaches.get(queryCacheType);
|
||||
final QueryCache queryCache = queryCacheProvider.apply(indexSettings, indicesQueryCache);
|
||||
return new IndexService(indexSettings, environment, new SimilarityService(indexSettings, similarities), shardStoreDeleter, analysisRegistry, engineFactory.get(),
|
||||
servicesProvider, queryCache, store, eventListener, searcherWrapperFactory, mapperRegistry, indicesFieldDataCache, listeners);
|
||||
return new IndexService(indexSettings, environment, new SimilarityService(indexSettings, similarities), shardStoreDeleter,
|
||||
analysisRegistry, engineFactory.get(), servicesProvider, queryCache, store, eventListener, searcherWrapperFactory,
|
||||
mapperRegistry, indicesFieldDataCache, searchOperationListeners, indexOperationListeners);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -282,7 +348,14 @@ public final class IndexModule {
|
|||
* @see #INDEX_QUERY_CACHE_TYPE_SETTING
|
||||
*/
|
||||
public void forceQueryCacheType(String type) {
|
||||
ensureNotFrozen();
|
||||
this.forceQueryCacheType.set(type);
|
||||
}
|
||||
|
||||
private void ensureNotFrozen() {
|
||||
if (this.frozen.get()) {
|
||||
throw new IllegalStateException("Can't modify IndexModule once the index service has been created");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -56,6 +56,7 @@ import org.elasticsearch.index.shard.IndexEventListener;
|
|||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.IndexingOperationListener;
|
||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||
import org.elasticsearch.index.shard.ShadowIndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.ShardNotFoundException;
|
||||
|
@ -73,8 +74,10 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
@ -109,11 +112,10 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
private final AtomicBoolean closed = new AtomicBoolean(false);
|
||||
private final AtomicBoolean deleted = new AtomicBoolean(false);
|
||||
private final IndexSettings indexSettings;
|
||||
private final IndexingSlowLog slowLog;
|
||||
private final IndexingOperationListener[] listeners;
|
||||
private final List<IndexingOperationListener> indexingOperationListeners;
|
||||
private final List<SearchOperationListener> searchOperationListeners;
|
||||
private volatile AsyncRefreshTask refreshTask;
|
||||
private volatile AsyncTranslogFSync fsyncTask;
|
||||
private final SearchSlowLog searchSlowLog;
|
||||
private final ThreadPool threadPool;
|
||||
private final BigArrays bigArrays;
|
||||
|
||||
|
@ -129,7 +131,8 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
IndexModule.IndexSearcherWrapperFactory wrapperFactory,
|
||||
MapperRegistry mapperRegistry,
|
||||
IndicesFieldDataCache indicesFieldDataCache,
|
||||
IndexingOperationListener... listenersIn) throws IOException {
|
||||
List<SearchOperationListener> searchOperationListeners,
|
||||
List<IndexingOperationListener> indexingOperationListeners) throws IOException {
|
||||
super(indexSettings);
|
||||
this.indexSettings = indexSettings;
|
||||
this.analysisService = registry.build(indexSettings);
|
||||
|
@ -155,15 +158,10 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
this.engineFactory = engineFactory;
|
||||
// initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE
|
||||
this.searcherWrapper = wrapperFactory.newWrapper(this);
|
||||
this.slowLog = new IndexingSlowLog(indexSettings);
|
||||
|
||||
// Add our slowLog to the incoming IndexingOperationListeners:
|
||||
this.listeners = new IndexingOperationListener[1+listenersIn.length];
|
||||
this.listeners[0] = slowLog;
|
||||
System.arraycopy(listenersIn, 0, this.listeners, 1, listenersIn.length);
|
||||
this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners);
|
||||
this.searchOperationListeners = Collections.unmodifiableList(searchOperationListeners);
|
||||
// kick off async ops for the first shard in this index
|
||||
this.refreshTask = new AsyncRefreshTask(this);
|
||||
searchSlowLog = new SearchSlowLog(indexSettings);
|
||||
rescheduleFsyncTask(indexSettings.getTranslogDurability());
|
||||
}
|
||||
|
||||
|
@ -338,12 +336,13 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
new StoreCloseListener(shardId, canDeleteShardContent, () -> eventListener.onStoreClosed(shardId)));
|
||||
if (useShadowEngine(primary, indexSettings)) {
|
||||
indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService,
|
||||
indexFieldData, engineFactory, eventListener, searcherWrapper, threadPool, bigArrays, searchSlowLog, engineWarmer);
|
||||
indexFieldData, engineFactory, eventListener, searcherWrapper, threadPool, bigArrays, engineWarmer,
|
||||
searchOperationListeners);
|
||||
// no indexing listeners - shadow engines don't index
|
||||
} else {
|
||||
indexShard = new IndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService,
|
||||
indexFieldData, engineFactory, eventListener, searcherWrapper, threadPool, bigArrays, searchSlowLog, engineWarmer,
|
||||
listeners);
|
||||
indexFieldData, engineFactory, eventListener, searcherWrapper, threadPool, bigArrays, engineWarmer,
|
||||
searchOperationListeners, indexingOperationListeners);
|
||||
}
|
||||
eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created");
|
||||
eventListener.afterIndexShardCreated(indexShard);
|
||||
|
@ -455,8 +454,12 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
return bigArrays;
|
||||
}
|
||||
|
||||
public SearchSlowLog getSearchSlowLog() {
|
||||
return searchSlowLog;
|
||||
List<IndexingOperationListener> getIndexOperationListeners() { // pkg private for testing
|
||||
return indexingOperationListeners;
|
||||
}
|
||||
|
||||
List<SearchOperationListener> getSearchOperationListener() { // pkg private for testing
|
||||
return searchOperationListeners;
|
||||
}
|
||||
|
||||
private class StoreCloseListener implements Store.OnClose {
|
||||
|
|
|
@ -136,6 +136,7 @@ public final class IndexingSlowLog implements IndexingOperationListener {
|
|||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void postIndex(Engine.Index index, boolean created) {
|
||||
final long took = index.endTime() - index.startTime();
|
||||
postIndexing(index.parsedDoc(), took);
|
||||
|
|
|
@ -25,13 +25,14 @@ import org.elasticsearch.common.logging.Loggers;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
*/
|
||||
public final class SearchSlowLog {
|
||||
public final class SearchSlowLog implements SearchOperationListener {
|
||||
|
||||
private boolean reformat;
|
||||
|
||||
|
@ -116,7 +117,7 @@ public final class SearchSlowLog {
|
|||
this.queryLogger.setLevel(level.name());
|
||||
this.fetchLogger.setLevel(level.name());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onQueryPhase(SearchContext context, long tookInNanos) {
|
||||
if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) {
|
||||
queryLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
|
||||
|
@ -129,6 +130,7 @@ public final class SearchSlowLog {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPhase(SearchContext context, long tookInNanos) {
|
||||
if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) {
|
||||
fetchLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
|
||||
|
|
|
@ -86,6 +86,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
Setting.longSetting("index.mapping.nested_fields.limit", 50L, 0, Property.Dynamic, Property.IndexScope);
|
||||
public static final Setting<Long> INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING =
|
||||
Setting.longSetting("index.mapping.total_fields.limit", 1000L, 0, Property.Dynamic, Property.IndexScope);
|
||||
public static final Setting<Long> INDEX_MAPPING_DEPTH_LIMIT_SETTING =
|
||||
Setting.longSetting("index.mapping.depth.limit", 20L, 1, Property.Dynamic, Property.IndexScope);
|
||||
public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true;
|
||||
public static final Setting<Boolean> INDEX_MAPPER_DYNAMIC_SETTING =
|
||||
Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, Property.IndexScope);
|
||||
|
@ -292,6 +294,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
// this check will be skipped.
|
||||
checkNestedFieldsLimit(fullPathObjectMappers);
|
||||
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size());
|
||||
checkDepthLimit(fullPathObjectMappers.keySet());
|
||||
}
|
||||
|
||||
Set<String> parentTypes = this.parentTypes;
|
||||
|
@ -418,6 +421,27 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
private void checkDepthLimit(Collection<String> objectPaths) {
|
||||
final long maxDepth = indexSettings.getValue(INDEX_MAPPING_DEPTH_LIMIT_SETTING);
|
||||
for (String objectPath : objectPaths) {
|
||||
checkDepthLimit(objectPath, maxDepth);
|
||||
}
|
||||
}
|
||||
|
||||
private void checkDepthLimit(String objectPath, long maxDepth) {
|
||||
int numDots = 0;
|
||||
for (int i = 0; i < objectPath.length(); ++i) {
|
||||
if (objectPath.charAt(i) == '.') {
|
||||
numDots += 1;
|
||||
}
|
||||
}
|
||||
final int depth = numDots + 2;
|
||||
if (depth > maxDepth) {
|
||||
throw new IllegalArgumentException("Limit of mapping depth [" + maxDepth + "] in index [" + index().getName()
|
||||
+ "] has been exceeded due to object field [" + objectPath + "]");
|
||||
}
|
||||
}
|
||||
|
||||
public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException {
|
||||
String defaultMappingSource;
|
||||
if (PercolatorFieldMapper.TYPE_NAME.equals(mappingType)) {
|
||||
|
|
|
@ -43,7 +43,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
|
||||
|
||||
/** A {@link FieldMapper} for full-text fields. */
|
||||
|
@ -60,7 +59,6 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
|||
public static final MappedFieldType FIELD_TYPE = new TextFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setTokenized(true);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
|
@ -177,8 +175,8 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
|||
private int fielddataMinSegmentSize;
|
||||
|
||||
public TextFieldType() {
|
||||
// TODO: change the default to false
|
||||
fielddata = true;
|
||||
setTokenized(true);
|
||||
fielddata = false;
|
||||
fielddataMinFrequency = Defaults.FIELDDATA_MIN_FREQUENCY;
|
||||
fielddataMaxFrequency = Defaults.FIELDDATA_MAX_FREQUENCY;
|
||||
fielddataMinSegmentSize = Defaults.FIELDDATA_MIN_SEGMENT_SIZE;
|
||||
|
|
|
@ -31,17 +31,13 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
|
@ -77,16 +73,20 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
|
||||
private int maxChildren = DEFAULT_MAX_CHILDREN;
|
||||
|
||||
private QueryInnerHits queryInnerHits;
|
||||
private InnerHitBuilder innerHitBuilder;
|
||||
|
||||
static final HasChildQueryBuilder PROTOTYPE = new HasChildQueryBuilder("", EmptyQueryBuilder.PROTOTYPE);
|
||||
|
||||
public HasChildQueryBuilder(String type, QueryBuilder query, int maxChildren, int minChildren, ScoreMode scoreMode, QueryInnerHits queryInnerHits) {
|
||||
public HasChildQueryBuilder(String type, QueryBuilder query, int maxChildren, int minChildren, ScoreMode scoreMode, InnerHitBuilder innerHitBuilder) {
|
||||
this(type, query);
|
||||
scoreMode(scoreMode);
|
||||
this.maxChildren = maxChildren;
|
||||
this.minChildren = minChildren;
|
||||
this.queryInnerHits = queryInnerHits;
|
||||
this.innerHitBuilder = innerHitBuilder;
|
||||
if (this.innerHitBuilder != null) {
|
||||
this.innerHitBuilder.setParentChildType(type);
|
||||
this.innerHitBuilder.setQuery(query);
|
||||
}
|
||||
}
|
||||
|
||||
public HasChildQueryBuilder(String type, QueryBuilder query) {
|
||||
|
@ -136,16 +136,18 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
/**
|
||||
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
|
||||
*/
|
||||
public HasChildQueryBuilder innerHit(QueryInnerHits queryInnerHits) {
|
||||
this.queryInnerHits = queryInnerHits;
|
||||
public HasChildQueryBuilder innerHit(InnerHitBuilder innerHitBuilder) {
|
||||
this.innerHitBuilder = Objects.requireNonNull(innerHitBuilder);
|
||||
this.innerHitBuilder.setParentChildType(type);
|
||||
this.innerHitBuilder.setQuery(query);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns inner hit definition in the scope of this query and reusing the defined type and query.
|
||||
*/
|
||||
public QueryInnerHits innerHit() {
|
||||
return queryInnerHits;
|
||||
public InnerHitBuilder innerHit() {
|
||||
return innerHitBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -193,8 +195,8 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
builder.field(HasChildQueryParser.MIN_CHILDREN_FIELD.getPreferredName(), minChildren);
|
||||
builder.field(HasChildQueryParser.MAX_CHILDREN_FIELD.getPreferredName(), maxChildren);
|
||||
printBoostAndQueryName(builder);
|
||||
if (queryInnerHits != null) {
|
||||
queryInnerHits.toXContent(builder, params);
|
||||
if (innerHitBuilder != null) {
|
||||
builder.field(HasChildQueryParser.INNER_HITS_FIELD.getPreferredName(), innerHitBuilder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -226,20 +228,8 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
if (parentFieldMapper.active() == false) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured");
|
||||
}
|
||||
if (queryInnerHits != null) {
|
||||
try (XContentParser parser = queryInnerHits.getXcontentParser()) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalStateException("start object expected but was: [" + token + "]");
|
||||
}
|
||||
InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.getMapperService(), childDocMapper);
|
||||
String name = innerHits.getName() != null ? innerHits.getName() : type;
|
||||
context.addInnerHits(name, parentChildInnerHits);
|
||||
}
|
||||
}
|
||||
if (innerHitBuilder != null) {
|
||||
context.addInnerHit(innerHitBuilder);
|
||||
}
|
||||
|
||||
String parentType = parentFieldMapper.type();
|
||||
|
@ -363,12 +353,12 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
&& Objects.equals(scoreMode, that.scoreMode)
|
||||
&& Objects.equals(minChildren, that.minChildren)
|
||||
&& Objects.equals(maxChildren, that.maxChildren)
|
||||
&& Objects.equals(queryInnerHits, that.queryInnerHits);
|
||||
&& Objects.equals(innerHitBuilder, that.innerHitBuilder);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(query, type, scoreMode, minChildren, maxChildren, queryInnerHits);
|
||||
return Objects.hash(query, type, scoreMode, minChildren, maxChildren, innerHitBuilder);
|
||||
}
|
||||
|
||||
protected HasChildQueryBuilder(StreamInput in) throws IOException {
|
||||
|
@ -378,9 +368,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
final int ordinal = in.readVInt();
|
||||
scoreMode = ScoreMode.values()[ordinal];
|
||||
query = in.readQuery();
|
||||
if (in.readBoolean()) {
|
||||
queryInnerHits = new QueryInnerHits(in);
|
||||
}
|
||||
innerHitBuilder = InnerHitBuilder.optionalReadFromStream(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -395,9 +383,9 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
out.writeInt(maxChildren());
|
||||
out.writeVInt(scoreMode.ordinal());
|
||||
out.writeQuery(query);
|
||||
if (queryInnerHits != null) {
|
||||
if (innerHitBuilder != null) {
|
||||
out.writeBoolean(true);
|
||||
queryInnerHits.writeTo(out);
|
||||
innerHitBuilder.writeTo(out);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
|
@ -408,10 +396,10 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
QueryBuilder rewrite = query.rewrite(queryRewriteContext);
|
||||
if (rewrite != query) {
|
||||
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(type, rewrite);
|
||||
hasChildQueryBuilder.minChildren = minChildren;
|
||||
hasChildQueryBuilder.maxChildren = maxChildren;
|
||||
hasChildQueryBuilder.scoreMode = scoreMode;
|
||||
hasChildQueryBuilder.queryInnerHits = queryInnerHits;
|
||||
hasChildQueryBuilder.minChildren(minChildren);
|
||||
hasChildQueryBuilder.maxChildren(maxChildren);
|
||||
hasChildQueryBuilder.scoreMode(scoreMode);
|
||||
hasChildQueryBuilder.innerHit(innerHitBuilder);
|
||||
return hasChildQueryBuilder;
|
||||
}
|
||||
return this;
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.common.ParseField;
|
|||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
@ -55,7 +55,7 @@ public class HasChildQueryParser implements QueryParser<HasChildQueryBuilder> {
|
|||
int minChildren = HasChildQueryBuilder.DEFAULT_MIN_CHILDREN;
|
||||
int maxChildren = HasChildQueryBuilder.DEFAULT_MAX_CHILDREN;
|
||||
String queryName = null;
|
||||
QueryInnerHits queryInnerHits = null;
|
||||
InnerHitBuilder innerHitBuilder = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
QueryBuilder iqb = null;
|
||||
|
@ -68,7 +68,7 @@ public class HasChildQueryParser implements QueryParser<HasChildQueryBuilder> {
|
|||
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
iqb = parseContext.parseInnerQueryBuilder();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
|
||||
queryInnerHits = new QueryInnerHits(parser);
|
||||
innerHitBuilder = InnerHitBuilder.fromXContent(parser, parseContext);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ public class HasChildQueryParser implements QueryParser<HasChildQueryBuilder> {
|
|||
}
|
||||
}
|
||||
}
|
||||
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, maxChildren, minChildren, scoreMode, queryInnerHits);
|
||||
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, maxChildren, minChildren, scoreMode, innerHitBuilder);
|
||||
hasChildQueryBuilder.queryName(queryName);
|
||||
hasChildQueryBuilder.boost(boost);
|
||||
return hasChildQueryBuilder;
|
||||
|
|
|
@ -26,13 +26,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
|
@ -49,7 +46,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
private final QueryBuilder query;
|
||||
private final String type;
|
||||
private boolean score = DEFAULT_SCORE;
|
||||
private QueryInnerHits innerHit;
|
||||
private InnerHitBuilder innerHit;
|
||||
|
||||
/**
|
||||
* @param type The parent type
|
||||
|
@ -66,10 +63,14 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
this.query = query;
|
||||
}
|
||||
|
||||
public HasParentQueryBuilder(String type, QueryBuilder query, boolean score, QueryInnerHits innerHits) {
|
||||
public HasParentQueryBuilder(String type, QueryBuilder query, boolean score, InnerHitBuilder innerHit) {
|
||||
this(type, query);
|
||||
this.score = score;
|
||||
this.innerHit = innerHits;
|
||||
this.innerHit = innerHit;
|
||||
if (this.innerHit != null) {
|
||||
this.innerHit.setParentChildType(type);
|
||||
this.innerHit.setQuery(query);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -83,8 +84,10 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
/**
|
||||
* Sets inner hit definition in the scope of this query and reusing the defined type and query.
|
||||
*/
|
||||
public HasParentQueryBuilder innerHit(QueryInnerHits innerHit) {
|
||||
this.innerHit = innerHit;
|
||||
public HasParentQueryBuilder innerHit(InnerHitBuilder innerHit) {
|
||||
this.innerHit = Objects.requireNonNull(innerHit);
|
||||
this.innerHit.setParentChildType(type);
|
||||
this.innerHit.setQuery(query);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -112,7 +115,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
/**
|
||||
* Returns inner hit definition in the scope of this query and reusing the defined type and query.
|
||||
*/
|
||||
public QueryInnerHits innerHit() {
|
||||
public InnerHitBuilder innerHit() {
|
||||
return innerHit;
|
||||
}
|
||||
|
||||
|
@ -137,19 +140,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
}
|
||||
|
||||
if (innerHit != null) {
|
||||
try (XContentParser parser = innerHit.getXcontentParser()) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalStateException("start object expected but was: [" + token + "]");
|
||||
}
|
||||
InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.getMapperService(), parentDocMapper);
|
||||
String name = innerHits.getName() != null ? innerHits.getName() : type;
|
||||
context.addInnerHits(name, parentChildInnerHits);
|
||||
}
|
||||
}
|
||||
context.addInnerHit(innerHit);
|
||||
}
|
||||
|
||||
Set<String> childTypes = new HashSet<>();
|
||||
|
@ -200,7 +191,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
builder.field(HasParentQueryParser.SCORE_FIELD.getPreferredName(), score);
|
||||
printBoostAndQueryName(builder);
|
||||
if (innerHit != null) {
|
||||
innerHit.toXContent(builder, params);
|
||||
builder.field(HasParentQueryParser.INNER_HITS_FIELD.getPreferredName(), innerHit, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -214,9 +205,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
type = in.readString();
|
||||
score = in.readBoolean();
|
||||
query = in.readQuery();
|
||||
if (in.readBoolean()) {
|
||||
innerHit = new QueryInnerHits(in);
|
||||
}
|
||||
innerHit = InnerHitBuilder.optionalReadFromStream(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -255,8 +244,8 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
QueryBuilder rewrite = query.rewrite(queryShardContext);
|
||||
if (rewrite != query) {
|
||||
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(type, rewrite);
|
||||
hasParentQueryBuilder.score = score;
|
||||
hasParentQueryBuilder.innerHit = innerHit;
|
||||
hasParentQueryBuilder.score(score);
|
||||
hasParentQueryBuilder.innerHit(innerHit);
|
||||
return hasParentQueryBuilder;
|
||||
}
|
||||
return this;
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.common.ParseField;
|
|||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -35,6 +35,7 @@ public class HasParentQueryParser implements QueryParser<HasParentQueryBuilder>
|
|||
public static final ParseField SCORE_MODE_FIELD = new ParseField("score_mode").withAllDeprecated("score");
|
||||
public static final ParseField TYPE_FIELD = new ParseField("parent_type", "type");
|
||||
public static final ParseField SCORE_FIELD = new ParseField("score");
|
||||
public static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits");
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
|
@ -48,7 +49,7 @@ public class HasParentQueryParser implements QueryParser<HasParentQueryBuilder>
|
|||
String parentType = null;
|
||||
boolean score = HasParentQueryBuilder.DEFAULT_SCORE;
|
||||
String queryName = null;
|
||||
QueryInnerHits innerHits = null;
|
||||
InnerHitBuilder innerHits = null;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
|
@ -59,8 +60,8 @@ public class HasParentQueryParser implements QueryParser<HasParentQueryBuilder>
|
|||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
iqb = parseContext.parseInnerQueryBuilder();
|
||||
} else if ("inner_hits".equals(currentFieldName)) {
|
||||
innerHits = new QueryInnerHits(parser);
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
|
||||
innerHits = InnerHitBuilder.fromXContent(parser, parseContext);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
|
|
@ -27,14 +27,10 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder> {
|
||||
|
@ -55,7 +51,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
|
||||
private ScoreMode scoreMode = DEFAULT_SCORE_MODE;
|
||||
|
||||
private QueryInnerHits queryInnerHits;
|
||||
private InnerHitBuilder innerHitBuilder;
|
||||
|
||||
public NestedQueryBuilder(String path, QueryBuilder query) {
|
||||
if (path == null) {
|
||||
|
@ -68,10 +64,14 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
this.query = query;
|
||||
}
|
||||
|
||||
public NestedQueryBuilder(String path, QueryBuilder query, ScoreMode scoreMode, QueryInnerHits queryInnerHits) {
|
||||
public NestedQueryBuilder(String path, QueryBuilder query, ScoreMode scoreMode, InnerHitBuilder innerHitBuilder) {
|
||||
this(path, query);
|
||||
scoreMode(scoreMode);
|
||||
this.queryInnerHits = queryInnerHits;
|
||||
this.innerHitBuilder = innerHitBuilder;
|
||||
if (this.innerHitBuilder != null) {
|
||||
this.innerHitBuilder.setNestedPath(path);
|
||||
this.innerHitBuilder.setQuery(query);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -88,8 +88,10 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
/**
|
||||
* Sets inner hit definition in the scope of this nested query and reusing the defined path and query.
|
||||
*/
|
||||
public NestedQueryBuilder innerHit(QueryInnerHits innerHit) {
|
||||
this.queryInnerHits = innerHit;
|
||||
public NestedQueryBuilder innerHit(InnerHitBuilder innerHit) {
|
||||
this.innerHitBuilder = Objects.requireNonNull(innerHit);
|
||||
this.innerHitBuilder.setNestedPath(path);
|
||||
this.innerHitBuilder.setQuery(query);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -103,8 +105,8 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
/**
|
||||
* Returns inner hit definition in the scope of this query and reusing the defined type and query.
|
||||
*/
|
||||
public QueryInnerHits innerHit() {
|
||||
return queryInnerHits;
|
||||
public InnerHitBuilder innerHit() {
|
||||
return innerHitBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -124,8 +126,8 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
builder.field(NestedQueryParser.SCORE_MODE_FIELD.getPreferredName(), HasChildQueryParser.scoreModeAsString(scoreMode));
|
||||
}
|
||||
printBoostAndQueryName(builder);
|
||||
if (queryInnerHits != null) {
|
||||
queryInnerHits.toXContent(builder, params);
|
||||
if (innerHitBuilder != null) {
|
||||
builder.field(NestedQueryParser.INNER_HITS_FIELD.getPreferredName(), innerHitBuilder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -140,12 +142,12 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
return Objects.equals(query, that.query)
|
||||
&& Objects.equals(path, that.path)
|
||||
&& Objects.equals(scoreMode, that.scoreMode)
|
||||
&& Objects.equals(queryInnerHits, that.queryInnerHits);
|
||||
&& Objects.equals(innerHitBuilder, that.innerHitBuilder);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(query, path, scoreMode, queryInnerHits);
|
||||
return Objects.hash(query, path, scoreMode, innerHitBuilder);
|
||||
}
|
||||
|
||||
private NestedQueryBuilder(StreamInput in) throws IOException {
|
||||
|
@ -153,9 +155,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
final int ordinal = in.readVInt();
|
||||
scoreMode = ScoreMode.values()[ordinal];
|
||||
query = in.readQuery();
|
||||
if (in.readBoolean()) {
|
||||
queryInnerHits = new QueryInnerHits(in);
|
||||
}
|
||||
innerHitBuilder = InnerHitBuilder.optionalReadFromStream(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -163,9 +163,9 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
out.writeString(path);
|
||||
out.writeVInt(scoreMode.ordinal());
|
||||
out.writeQuery(query);
|
||||
if (queryInnerHits != null) {
|
||||
if (innerHitBuilder != null) {
|
||||
out.writeBoolean(true);
|
||||
queryInnerHits.writeTo(out);
|
||||
innerHitBuilder.writeTo(out);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
|
@ -187,17 +187,19 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
}
|
||||
final BitSetProducer parentFilter;
|
||||
final Query childFilter;
|
||||
final ObjectMapper parentObjectMapper;
|
||||
final Query innerQuery;
|
||||
ObjectMapper objectMapper = context.nestedScope().getObjectMapper();
|
||||
if (innerHitBuilder != null) {
|
||||
context.addInnerHit(innerHitBuilder);
|
||||
}
|
||||
if (objectMapper == null) {
|
||||
parentFilter = context.bitsetFilter(Queries.newNonNestedFilter());
|
||||
} else {
|
||||
parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter());
|
||||
}
|
||||
childFilter = nestedObjectMapper.nestedTypeFilter();
|
||||
try {
|
||||
if (objectMapper == null) {
|
||||
parentFilter = context.bitsetFilter(Queries.newNonNestedFilter());
|
||||
} else {
|
||||
parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter());
|
||||
}
|
||||
childFilter = nestedObjectMapper.nestedTypeFilter();
|
||||
parentObjectMapper = context.nestedScope().nextLevel(nestedObjectMapper);
|
||||
context.nestedScope().nextLevel(nestedObjectMapper);
|
||||
innerQuery = this.query.toQuery(context);
|
||||
if (innerQuery == null) {
|
||||
return null;
|
||||
|
@ -205,23 +207,6 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
} finally {
|
||||
context.nestedScope().previousLevel();
|
||||
}
|
||||
|
||||
if (queryInnerHits != null) {
|
||||
try (XContentParser parser = queryInnerHits.getXcontentParser()) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalStateException("start object expected but was: [" + token + "]");
|
||||
}
|
||||
InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
|
||||
InnerHitsContext.NestedInnerHits nestedInnerHits = new InnerHitsContext.NestedInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, parentObjectMapper, nestedObjectMapper);
|
||||
String name = innerHits.getName() != null ? innerHits.getName() : path;
|
||||
context.addInnerHits(name, nestedInnerHits);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new ToParentBlockJoinQuery(Queries.filtered(innerQuery, childFilter), parentFilter, scoreMode);
|
||||
}
|
||||
|
||||
|
@ -229,7 +214,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
|
||||
QueryBuilder rewrite = query.rewrite(queryRewriteContext);
|
||||
if (rewrite != query) {
|
||||
return new NestedQueryBuilder(path, rewrite).scoreMode(scoreMode);
|
||||
return new NestedQueryBuilder(path, rewrite).scoreMode(scoreMode).innerHit(innerHitBuilder);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.lucene.search.join.ScoreMode;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
|
||||
public class NestedQueryParser implements QueryParser<NestedQueryBuilder> {
|
||||
|
||||
|
@ -49,7 +49,7 @@ public class NestedQueryParser implements QueryParser<NestedQueryBuilder> {
|
|||
QueryBuilder query = null;
|
||||
String path = null;
|
||||
String currentFieldName = null;
|
||||
QueryInnerHits queryInnerHits = null;
|
||||
InnerHitBuilder innerHitBuilder = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -58,7 +58,7 @@ public class NestedQueryParser implements QueryParser<NestedQueryBuilder> {
|
|||
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
query = parseContext.parseInnerQueryBuilder();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
|
||||
queryInnerHits = new QueryInnerHits(parser);
|
||||
innerHitBuilder = InnerHitBuilder.fromXContent(parser, parseContext);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ public class NestedQueryParser implements QueryParser<NestedQueryBuilder> {
|
|||
}
|
||||
}
|
||||
}
|
||||
return new NestedQueryBuilder(path, query, scoreMode, queryInnerHits).queryName(queryName).boost(boost);
|
||||
return new NestedQueryBuilder(path, query, scoreMode, innerHitBuilder).queryName(queryName).boost(boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -47,13 +47,13 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.percolator.PercolatorQueryCache;
|
||||
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
import org.elasticsearch.index.query.support.InnerHitsBuilder;
|
||||
import org.elasticsearch.index.query.support.NestedScope;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
|
@ -141,10 +141,6 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
this.parseContext.reset(jp);
|
||||
}
|
||||
|
||||
public InnerHitsSubSearchContext getInnerHitsContext(XContentParser parser) throws IOException {
|
||||
return InnerHitsQueryParserHelper.parse(parser);
|
||||
}
|
||||
|
||||
public AnalysisService getAnalysisService() {
|
||||
return mapperService.analysisService();
|
||||
}
|
||||
|
@ -208,14 +204,14 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
return isFilter;
|
||||
}
|
||||
|
||||
public void addInnerHits(String name, InnerHitsContext.BaseInnerHits context) {
|
||||
public void addInnerHit(InnerHitBuilder innerHitBuilder) throws IOException {
|
||||
SearchContext sc = SearchContext.current();
|
||||
if (sc == null) {
|
||||
throw new QueryShardException(this, "inner_hits unsupported");
|
||||
}
|
||||
|
||||
InnerHitsContext innerHitsContext = sc.innerHits();
|
||||
innerHitsContext.addInnerHitDefinition(name, context);
|
||||
innerHitsContext.addInnerHitDefinition(innerHitBuilder.buildInline(sc, this));
|
||||
}
|
||||
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
|
|
|
@ -0,0 +1,597 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.END_OBJECT;
|
||||
|
||||
public final class InnerHitBuilder extends ToXContentToBytes implements Writeable<InnerHitBuilder> {
|
||||
|
||||
public static final ParseField NAME_FIELD = new ParseField("name");
|
||||
public static final ParseField NESTED_PATH_FIELD = new ParseField("path");
|
||||
public static final ParseField PARENT_CHILD_TYPE_FIELD = new ParseField("type");
|
||||
|
||||
private final static ObjectParser<InnerHitBuilder, QueryParseContext> PARSER = new ObjectParser<>("inner_hits", InnerHitBuilder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString(InnerHitBuilder::setName, NAME_FIELD);
|
||||
PARSER.declareString(InnerHitBuilder::setNestedPath, NESTED_PATH_FIELD);
|
||||
PARSER.declareString(InnerHitBuilder::setParentChildType, PARENT_CHILD_TYPE_FIELD);
|
||||
PARSER.declareInt(InnerHitBuilder::setFrom, SearchSourceBuilder.FROM_FIELD);
|
||||
PARSER.declareInt(InnerHitBuilder::setSize, SearchSourceBuilder.SIZE_FIELD);
|
||||
PARSER.declareBoolean(InnerHitBuilder::setExplain, SearchSourceBuilder.EXPLAIN_FIELD);
|
||||
PARSER.declareBoolean(InnerHitBuilder::setVersion, SearchSourceBuilder.VERSION_FIELD);
|
||||
PARSER.declareBoolean(InnerHitBuilder::setTrackScores, SearchSourceBuilder.TRACK_SCORES_FIELD);
|
||||
PARSER.declareStringArray(InnerHitBuilder::setFieldNames, SearchSourceBuilder.FIELDS_FIELD);
|
||||
PARSER.declareStringArray(InnerHitBuilder::setFieldDataFields, SearchSourceBuilder.FIELDDATA_FIELDS_FIELD);
|
||||
PARSER.declareField((p, i, c) -> {
|
||||
try {
|
||||
List<ScriptField> scriptFields = new ArrayList<>();
|
||||
for (XContentParser.Token token = p.nextToken(); token != END_OBJECT; token = p.nextToken()) {
|
||||
scriptFields.add(new ScriptField(p, c));
|
||||
}
|
||||
i.setScriptFields(scriptFields);
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(p.getTokenLocation(), "Could not parse inner script definition", e);
|
||||
}
|
||||
}, SearchSourceBuilder.SCRIPT_FIELDS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
PARSER.declareField((p, i, c) -> i.setSorts(SortBuilder.fromXContent(c)), SearchSourceBuilder.SORT_FIELD,
|
||||
ObjectParser.ValueType.OBJECT_ARRAY);
|
||||
PARSER.declareField((p, i, c) -> {
|
||||
try {
|
||||
i.setFetchSourceContext(FetchSourceContext.parse(p, c));
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(p.getTokenLocation(), "Could not parse inner _source definition", e);
|
||||
}
|
||||
}, SearchSourceBuilder._SOURCE_FIELD, ObjectParser.ValueType.OBJECT_OR_BOOLEAN);
|
||||
PARSER.declareObject(InnerHitBuilder::setHighlightBuilder, (p, c) -> {
|
||||
try {
|
||||
return HighlightBuilder.PROTOTYPE.fromXContent(c);
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(p.getTokenLocation(), "Could not parse inner highlight definition", e);
|
||||
}
|
||||
}, SearchSourceBuilder.HIGHLIGHT_FIELD);
|
||||
PARSER.declareObject(InnerHitBuilder::setQuery, (p, c) ->{
|
||||
try {
|
||||
return c.parseInnerQueryBuilder();
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(p.getTokenLocation(), "Could not parse inner query definition", e);
|
||||
}
|
||||
}, SearchSourceBuilder.QUERY_FIELD);
|
||||
PARSER.declareObject(InnerHitBuilder::setInnerHitsBuilder, (p, c) -> {
|
||||
try {
|
||||
return InnerHitsBuilder.fromXContent(p, c);
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(p.getTokenLocation(), "Could not parse inner query definition", e);
|
||||
}
|
||||
}, SearchSourceBuilder.INNER_HITS_FIELD);
|
||||
}
|
||||
|
||||
public static InnerHitBuilder optionalReadFromStream(StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
return new InnerHitBuilder(in);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String name;
|
||||
private String nestedPath;
|
||||
private String parentChildType;
|
||||
|
||||
private int from;
|
||||
private int size = 3;
|
||||
private boolean explain;
|
||||
private boolean version;
|
||||
private boolean trackScores;
|
||||
|
||||
private List<String> fieldNames;
|
||||
private QueryBuilder<?> query = new MatchAllQueryBuilder();
|
||||
private List<SortBuilder<?>> sorts;
|
||||
private List<String> fieldDataFields;
|
||||
private List<ScriptField> scriptFields;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private InnerHitsBuilder innerHitsBuilder;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
|
||||
// pkg protected, because is used in InnerHitsBuilder
|
||||
InnerHitBuilder(StreamInput in) throws IOException {
|
||||
name = in.readOptionalString();
|
||||
nestedPath = in.readOptionalString();
|
||||
parentChildType = in.readOptionalString();
|
||||
from = in.readVInt();
|
||||
size = in.readVInt();
|
||||
explain = in.readBoolean();
|
||||
version = in.readBoolean();
|
||||
trackScores = in.readBoolean();
|
||||
fieldNames = (List<String>) in.readGenericValue();
|
||||
fieldDataFields = (List<String>) in.readGenericValue();
|
||||
if (in.readBoolean()) {
|
||||
scriptFields = in.readList(t -> ScriptField.PROTOTYPE.readFrom(in));
|
||||
}
|
||||
fetchSourceContext = FetchSourceContext.optionalReadFromStream(in);
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
sorts = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
sorts.add(in.readSortBuilder());
|
||||
}
|
||||
}
|
||||
highlightBuilder = in.readOptionalWriteable(HighlightBuilder.PROTOTYPE::readFrom);
|
||||
query = in.readQuery();
|
||||
innerHitsBuilder = in.readOptionalWriteable(InnerHitsBuilder.PROTO::readFrom);
|
||||
}
|
||||
|
||||
public InnerHitBuilder() {
|
||||
}
|
||||
|
||||
public InnerHitBuilder setParentChildType(String parentChildType) {
|
||||
this.parentChildType = parentChildType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setNestedPath(String nestedPath) {
|
||||
this.nestedPath = nestedPath;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setName(String name) {
|
||||
this.name = Objects.requireNonNull(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
public int getFrom() {
|
||||
return from;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setFrom(int from) {
|
||||
if (from < 0) {
|
||||
throw new IllegalArgumentException("illegal from value, at least 0 or higher");
|
||||
}
|
||||
this.from = from;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setSize(int size) {
|
||||
if (size < 0) {
|
||||
throw new IllegalArgumentException("illegal size value, at least 0 or higher");
|
||||
}
|
||||
this.size = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isExplain() {
|
||||
return explain;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setExplain(boolean explain) {
|
||||
this.explain = explain;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setVersion(boolean version) {
|
||||
this.version = version;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isTrackScores() {
|
||||
return trackScores;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setTrackScores(boolean trackScores) {
|
||||
this.trackScores = trackScores;
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<String> getFieldNames() {
|
||||
return fieldNames;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setFieldNames(List<String> fieldNames) {
|
||||
this.fieldNames = fieldNames;
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<String> getFieldDataFields() {
|
||||
return fieldDataFields;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setFieldDataFields(List<String> fieldDataFields) {
|
||||
this.fieldDataFields = fieldDataFields;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHitBuilder addFieldDataField(String field) {
|
||||
if (fieldDataFields == null) {
|
||||
fieldDataFields = new ArrayList<>();
|
||||
}
|
||||
fieldDataFields.add(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<ScriptField> getScriptFields() {
|
||||
return scriptFields;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setScriptFields(List<ScriptField> scriptFields) {
|
||||
this.scriptFields = scriptFields;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHitBuilder addScriptField(String name, Script script) {
|
||||
if (scriptFields == null) {
|
||||
scriptFields = new ArrayList<>();
|
||||
}
|
||||
scriptFields.add(new ScriptField(name, script, false));
|
||||
return this;
|
||||
}
|
||||
|
||||
public FetchSourceContext getFetchSourceContext() {
|
||||
return fetchSourceContext;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setFetchSourceContext(FetchSourceContext fetchSourceContext) {
|
||||
this.fetchSourceContext = fetchSourceContext;
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<SortBuilder<?>> getSorts() {
|
||||
return sorts;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setSorts(List<SortBuilder<?>> sorts) {
|
||||
this.sorts = sorts;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHitBuilder addSort(SortBuilder sort) {
|
||||
if (sorts == null) {
|
||||
sorts = new ArrayList<>();
|
||||
}
|
||||
sorts.add(sort);
|
||||
return this;
|
||||
}
|
||||
|
||||
public HighlightBuilder getHighlightBuilder() {
|
||||
return highlightBuilder;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setHighlightBuilder(HighlightBuilder highlightBuilder) {
|
||||
this.highlightBuilder = highlightBuilder;
|
||||
return this;
|
||||
}
|
||||
|
||||
public QueryBuilder<?> getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setQuery(QueryBuilder<?> query) {
|
||||
this.query = Objects.requireNonNull(query);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setInnerHitsBuilder(InnerHitsBuilder innerHitsBuilder) {
|
||||
this.innerHitsBuilder = innerHitsBuilder;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHitsContext.BaseInnerHits buildInline(SearchContext parentSearchContext, QueryShardContext context) throws IOException {
|
||||
InnerHitsContext.BaseInnerHits innerHitsContext;
|
||||
if (nestedPath != null) {
|
||||
ObjectMapper nestedObjectMapper = context.getObjectMapper(nestedPath);
|
||||
ObjectMapper parentObjectMapper = context.nestedScope().getObjectMapper();
|
||||
innerHitsContext = new InnerHitsContext.NestedInnerHits(
|
||||
name, parentSearchContext, parentObjectMapper, nestedObjectMapper
|
||||
);
|
||||
} else if (parentChildType != null) {
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(parentChildType);
|
||||
innerHitsContext = new InnerHitsContext.ParentChildInnerHits(
|
||||
name, parentSearchContext, context.getMapperService(), documentMapper
|
||||
);
|
||||
} else {
|
||||
throw new IllegalStateException("Neither a nested or parent/child inner hit");
|
||||
}
|
||||
setupInnerHitsContext(context, innerHitsContext);
|
||||
return innerHitsContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Top level inner hits are different than inline inner hits:
|
||||
* 1) Nesting. Top level inner hits can be hold nested inner hits, that why this method is recursive (via buildChildInnerHits)
|
||||
* 2) Top level inner hits query is an option, whereas with inline inner hits that is based on the nested, has_child
|
||||
* or has_parent's inner query.
|
||||
*
|
||||
* Because of these changes there are different methods for building inline (which is simpler) and top level inner
|
||||
* hits. Also top level inner hits will soon be deprecated.
|
||||
*/
|
||||
public InnerHitsContext.BaseInnerHits buildTopLevel(SearchContext parentSearchContext, QueryShardContext context,
|
||||
InnerHitsContext innerHitsContext) throws IOException {
|
||||
if (nestedPath != null) {
|
||||
ObjectMapper nestedObjectMapper = context.getObjectMapper(nestedPath);
|
||||
ObjectMapper parentObjectMapper = context.nestedScope().nextLevel(nestedObjectMapper);
|
||||
InnerHitsContext.NestedInnerHits nestedInnerHits = new InnerHitsContext.NestedInnerHits(
|
||||
name, parentSearchContext, parentObjectMapper, nestedObjectMapper
|
||||
);
|
||||
setupInnerHitsContext(context, nestedInnerHits);
|
||||
if (innerHitsBuilder != null) {
|
||||
buildChildInnerHits(parentSearchContext, context, nestedInnerHits);
|
||||
}
|
||||
context.nestedScope().previousLevel();
|
||||
innerHitsContext.addInnerHitDefinition(nestedInnerHits);
|
||||
return nestedInnerHits;
|
||||
} else if (parentChildType != null) {
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(parentChildType);
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(
|
||||
name, parentSearchContext, context.getMapperService(), documentMapper
|
||||
);
|
||||
setupInnerHitsContext(context, parentChildInnerHits);
|
||||
if (innerHitsBuilder != null) {
|
||||
buildChildInnerHits(parentSearchContext, context, parentChildInnerHits);
|
||||
}
|
||||
innerHitsContext.addInnerHitDefinition( parentChildInnerHits);
|
||||
return parentChildInnerHits;
|
||||
} else {
|
||||
throw new IllegalStateException("Neither a nested or parent/child inner hit");
|
||||
}
|
||||
}
|
||||
|
||||
private void buildChildInnerHits(SearchContext parentSearchContext, QueryShardContext context,
|
||||
InnerHitsContext.BaseInnerHits innerHits) throws IOException {
|
||||
Map<String, InnerHitsContext.BaseInnerHits> childInnerHits = new HashMap<>();
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : innerHitsBuilder.getInnerHitsBuilders().entrySet()) {
|
||||
InnerHitsContext.BaseInnerHits childInnerHit = entry.getValue().buildTopLevel(
|
||||
parentSearchContext, context, new InnerHitsContext()
|
||||
);
|
||||
childInnerHits.put(entry.getKey(), childInnerHit);
|
||||
}
|
||||
innerHits.setChildInnerHits(childInnerHits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InnerHitBuilder readFrom(StreamInput in) throws IOException {
|
||||
return new InnerHitBuilder(in);
|
||||
}
|
||||
|
||||
private void setupInnerHitsContext(QueryShardContext context, InnerHitsContext.BaseInnerHits innerHitsContext) throws IOException {
|
||||
innerHitsContext.from(from);
|
||||
innerHitsContext.size(size);
|
||||
innerHitsContext.explain(explain);
|
||||
innerHitsContext.version(version);
|
||||
innerHitsContext.trackScores(trackScores);
|
||||
if (fieldNames != null) {
|
||||
if (fieldNames.isEmpty()) {
|
||||
innerHitsContext.emptyFieldNames();
|
||||
} else {
|
||||
for (String fieldName : fieldNames) {
|
||||
innerHitsContext.fieldNames().add(fieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fieldDataFields != null) {
|
||||
FieldDataFieldsContext fieldDataFieldsContext = innerHitsContext
|
||||
.getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY);
|
||||
for (String field : fieldDataFields) {
|
||||
fieldDataFieldsContext.add(new FieldDataFieldsContext.FieldDataField(field));
|
||||
}
|
||||
fieldDataFieldsContext.setHitExecutionNeeded(true);
|
||||
}
|
||||
if (scriptFields != null) {
|
||||
for (ScriptField field : scriptFields) {
|
||||
SearchScript searchScript = innerHitsContext.scriptService().search(innerHitsContext.lookup(), field.script(),
|
||||
ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
innerHitsContext.scriptFields().add(new org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField(
|
||||
field.fieldName(), searchScript, field.ignoreFailure()));
|
||||
}
|
||||
}
|
||||
if (fetchSourceContext != null) {
|
||||
innerHitsContext.fetchSourceContext(fetchSourceContext);
|
||||
}
|
||||
if (sorts != null) {
|
||||
Optional<Sort> optionalSort = SortBuilder.buildSort(sorts, context);
|
||||
if (optionalSort.isPresent()) {
|
||||
innerHitsContext.sort(optionalSort.get());
|
||||
}
|
||||
}
|
||||
if (highlightBuilder != null) {
|
||||
innerHitsContext.highlight(highlightBuilder.build(context));
|
||||
}
|
||||
ParsedQuery parsedQuery = new ParsedQuery(query.toQuery(context), context.copyNamedQueries());
|
||||
innerHitsContext.parsedQuery(parsedQuery);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalString(name);
|
||||
out.writeOptionalString(nestedPath);
|
||||
out.writeOptionalString(parentChildType);
|
||||
out.writeVInt(from);
|
||||
out.writeVInt(size);
|
||||
out.writeBoolean(explain);
|
||||
out.writeBoolean(version);
|
||||
out.writeBoolean(trackScores);
|
||||
out.writeGenericValue(fieldNames);
|
||||
out.writeGenericValue(fieldDataFields);
|
||||
boolean hasScriptFields = scriptFields != null;
|
||||
out.writeBoolean(hasScriptFields);
|
||||
if (hasScriptFields) {
|
||||
out.writeList(scriptFields);
|
||||
}
|
||||
FetchSourceContext.optionalWriteToStream(fetchSourceContext, out);
|
||||
boolean hasSorts = sorts != null;
|
||||
out.writeBoolean(hasSorts);
|
||||
if (hasSorts) {
|
||||
out.writeVInt(sorts.size());
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
out.writeSortBuilder(sort);
|
||||
}
|
||||
}
|
||||
out.writeOptionalWriteable(highlightBuilder);
|
||||
out.writeQuery(query);
|
||||
out.writeOptionalWriteable(innerHitsBuilder);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
||||
if (nestedPath != null) {
|
||||
builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath);
|
||||
}
|
||||
if (parentChildType != null) {
|
||||
builder.field(PARENT_CHILD_TYPE_FIELD.getPreferredName(), parentChildType);
|
||||
}
|
||||
if (name != null) {
|
||||
builder.field(NAME_FIELD.getPreferredName(), name);
|
||||
}
|
||||
builder.field(SearchSourceBuilder.FROM_FIELD.getPreferredName(), from);
|
||||
builder.field(SearchSourceBuilder.SIZE_FIELD.getPreferredName(), size);
|
||||
builder.field(SearchSourceBuilder.VERSION_FIELD.getPreferredName(), version);
|
||||
builder.field(SearchSourceBuilder.EXPLAIN_FIELD.getPreferredName(), explain);
|
||||
builder.field(SearchSourceBuilder.TRACK_SCORES_FIELD.getPreferredName(), trackScores);
|
||||
if (fetchSourceContext != null) {
|
||||
builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), fetchSourceContext, params);
|
||||
}
|
||||
if (fieldNames != null) {
|
||||
if (fieldNames.size() == 1) {
|
||||
builder.field(SearchSourceBuilder.FIELDS_FIELD.getPreferredName(), fieldNames.get(0));
|
||||
} else {
|
||||
builder.startArray(SearchSourceBuilder.FIELDS_FIELD.getPreferredName());
|
||||
for (String fieldName : fieldNames) {
|
||||
builder.value(fieldName);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
}
|
||||
if (fieldDataFields != null) {
|
||||
builder.startArray(SearchSourceBuilder.FIELDDATA_FIELDS_FIELD.getPreferredName());
|
||||
for (String fieldDataField : fieldDataFields) {
|
||||
builder.value(fieldDataField);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if (scriptFields != null) {
|
||||
builder.startObject(SearchSourceBuilder.SCRIPT_FIELDS_FIELD.getPreferredName());
|
||||
for (ScriptField scriptField : scriptFields) {
|
||||
scriptField.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
if (sorts != null) {
|
||||
builder.startArray(SearchSourceBuilder.SORT_FIELD.getPreferredName());
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
sort.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if (highlightBuilder != null) {
|
||||
builder.field(SearchSourceBuilder.HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder, params);
|
||||
}
|
||||
builder.field(SearchSourceBuilder.QUERY_FIELD.getPreferredName(), query, params);
|
||||
if (innerHitsBuilder != null) {
|
||||
builder.field(SearchSourceBuilder.INNER_HITS_FIELD.getPreferredName(), innerHitsBuilder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
InnerHitBuilder that = (InnerHitBuilder) o;
|
||||
return Objects.equals(name, that.name) &&
|
||||
Objects.equals(nestedPath, that.nestedPath) &&
|
||||
Objects.equals(parentChildType, that.parentChildType) &&
|
||||
Objects.equals(from, that.from) &&
|
||||
Objects.equals(size, that.size) &&
|
||||
Objects.equals(explain, that.explain) &&
|
||||
Objects.equals(version, that.version) &&
|
||||
Objects.equals(trackScores, that.trackScores) &&
|
||||
Objects.equals(fieldNames, that.fieldNames) &&
|
||||
Objects.equals(fieldDataFields, that.fieldDataFields) &&
|
||||
Objects.equals(scriptFields, that.scriptFields) &&
|
||||
Objects.equals(fetchSourceContext, that.fetchSourceContext) &&
|
||||
Objects.equals(sorts, that.sorts) &&
|
||||
Objects.equals(highlightBuilder, that.highlightBuilder) &&
|
||||
Objects.equals(query, that.query) &&
|
||||
Objects.equals(innerHitsBuilder, that.innerHitsBuilder);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, fieldNames,
|
||||
fieldDataFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, innerHitsBuilder);
|
||||
}
|
||||
|
||||
public static InnerHitBuilder fromXContent(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(parser, new InnerHitBuilder(), context);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class InnerHitsBuilder extends ToXContentToBytes implements Writeable<InnerHitsBuilder> {
|
||||
|
||||
public final static InnerHitsBuilder PROTO = new InnerHitsBuilder(Collections.emptyMap());
|
||||
|
||||
private final Map<String, InnerHitBuilder> innerHitsBuilders;
|
||||
|
||||
public InnerHitsBuilder() {
|
||||
this.innerHitsBuilders = new HashMap<>();
|
||||
}
|
||||
|
||||
public InnerHitsBuilder(Map<String, InnerHitBuilder> innerHitsBuilders) {
|
||||
this.innerHitsBuilders = Objects.requireNonNull(innerHitsBuilders);
|
||||
}
|
||||
|
||||
public InnerHitsBuilder addInnerHit(String name, InnerHitBuilder builder) {
|
||||
Objects.requireNonNull(name);
|
||||
Objects.requireNonNull(builder);
|
||||
this.innerHitsBuilders.put(name, builder.setName(name));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Map<String, InnerHitBuilder> getInnerHitsBuilders() {
|
||||
return innerHitsBuilders;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InnerHitsBuilder readFrom(StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
Map<String, InnerHitBuilder> innerHitsBuilders = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
innerHitsBuilders.put(in.readString(), new InnerHitBuilder(in));
|
||||
}
|
||||
return new InnerHitsBuilder(innerHitsBuilders);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : innerHitsBuilders.entrySet()) {
|
||||
builder.field(entry.getKey(), entry.getValue(), params);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(innerHitsBuilders.size());
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : innerHitsBuilders.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
InnerHitsBuilder that = (InnerHitsBuilder) o;
|
||||
return innerHitsBuilders.equals(that.innerHitsBuilders);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return innerHitsBuilders.hashCode();
|
||||
}
|
||||
|
||||
public static InnerHitsBuilder fromXContent(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
String innerHitName = null;
|
||||
for (Token token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) {
|
||||
switch (token) {
|
||||
case START_OBJECT:
|
||||
InnerHitBuilder innerHitBuilder = InnerHitBuilder.fromXContent(parser, context);
|
||||
innerHitBuilder.setName(innerHitName);
|
||||
innerHitBuilders.put(innerHitName, innerHitBuilder);
|
||||
break;
|
||||
case FIELD_NAME:
|
||||
innerHitName = parser.currentName();
|
||||
break;
|
||||
default:
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] in ["
|
||||
+ parser.currentName() + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
return new InnerHitsBuilder(innerHitBuilders);
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,133 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceParseElement;
|
||||
import org.elasticsearch.search.highlight.HighlighterParseElement;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class InnerHitsQueryParserHelper {
|
||||
|
||||
public static final InnerHitsQueryParserHelper INSTANCE = new InnerHitsQueryParserHelper();
|
||||
|
||||
private static final FetchSourceParseElement sourceParseElement = new FetchSourceParseElement();
|
||||
private static final HighlighterParseElement highlighterParseElement = new HighlighterParseElement();
|
||||
private static final ScriptFieldsParseElement scriptFieldsParseElement = new ScriptFieldsParseElement();
|
||||
private static final FieldDataFieldsParseElement fieldDataFieldsParseElement = new FieldDataFieldsParseElement();
|
||||
|
||||
public static InnerHitsSubSearchContext parse(XContentParser parser) throws IOException {
|
||||
String fieldName = null;
|
||||
XContentParser.Token token;
|
||||
String innerHitName = null;
|
||||
SubSearchContext subSearchContext = new SubSearchContext(SearchContext.current());
|
||||
try {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if ("name".equals(fieldName)) {
|
||||
innerHitName = parser.textOrNull();
|
||||
} else {
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
}
|
||||
} else {
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new IOException("Failed to parse [_inner_hits]", e);
|
||||
}
|
||||
return new InnerHitsSubSearchContext(innerHitName, subSearchContext);
|
||||
}
|
||||
|
||||
public static void parseCommonInnerHitOptions(XContentParser parser, XContentParser.Token token, String fieldName, SubSearchContext subSearchContext,
|
||||
FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement,
|
||||
ScriptFieldsParseElement scriptFieldsParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement) throws Exception {
|
||||
if ("sort".equals(fieldName)) {
|
||||
SortBuilder.parseSort(parser, subSearchContext);
|
||||
} else if ("_source".equals(fieldName)) {
|
||||
sourceParseElement.parse(parser, subSearchContext);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
switch (fieldName) {
|
||||
case "highlight":
|
||||
highlighterParseElement.parse(parser, subSearchContext);
|
||||
break;
|
||||
case "scriptFields":
|
||||
case "script_fields":
|
||||
scriptFieldsParseElement.parse(parser, subSearchContext);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown key for a " + token + " for nested query: [" + fieldName + "].");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
switch (fieldName) {
|
||||
case "fielddataFields":
|
||||
case "fielddata_fields":
|
||||
fieldDataFieldsParseElement.parse(parser, subSearchContext);
|
||||
break;
|
||||
case "fields":
|
||||
boolean added = false;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
String name = parser.text();
|
||||
added = true;
|
||||
subSearchContext.fieldNames().add(name);
|
||||
}
|
||||
if (!added) {
|
||||
subSearchContext.emptyFieldNames();
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown key for a " + token + " for nested query: [" + fieldName + "].");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
switch (fieldName) {
|
||||
case "from":
|
||||
subSearchContext.from(parser.intValue());
|
||||
break;
|
||||
case "size":
|
||||
subSearchContext.size(parser.intValue());
|
||||
break;
|
||||
case "track_scores":
|
||||
case "trackScores":
|
||||
subSearchContext.trackScores(parser.booleanValue());
|
||||
break;
|
||||
case "version":
|
||||
subSearchContext.version(parser.booleanValue());
|
||||
break;
|
||||
case "explain":
|
||||
subSearchContext.explain(parser.booleanValue());
|
||||
break;
|
||||
case "fields":
|
||||
subSearchContext.fieldNames().add(parser.text());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown key for a " + token + " for nested query: [" + fieldName + "].");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,113 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class QueryInnerHits extends ToXContentToBytes implements Writeable<QueryInnerHits> {
|
||||
private final BytesReference queryInnerHitsSearchSource;
|
||||
|
||||
public QueryInnerHits(StreamInput input) throws IOException {
|
||||
queryInnerHitsSearchSource = input.readBytesReference();
|
||||
}
|
||||
|
||||
public QueryInnerHits(XContentParser parser) throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
try (XContentBuilder builder = XContentFactory.cborBuilder(out)) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
queryInnerHitsSearchSource = builder.bytes();
|
||||
}
|
||||
}
|
||||
|
||||
public QueryInnerHits() {
|
||||
this(null, null);
|
||||
}
|
||||
|
||||
public QueryInnerHits(String name, InnerHitsBuilder.InnerHit innerHit) {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
try (XContentBuilder builder = XContentFactory.cborBuilder(out)) {
|
||||
builder.startObject();
|
||||
if (name != null) {
|
||||
builder.field("name", name);
|
||||
}
|
||||
if (innerHit != null) {
|
||||
innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
}
|
||||
builder.endObject();
|
||||
this.queryInnerHitsSearchSource = builder.bytes();
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("failed to build xcontent", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryInnerHits readFrom(StreamInput in) throws IOException {
|
||||
return new QueryInnerHits(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field("inner_hits");
|
||||
try (XContentParser parser = XContentType.CBOR.xContent().createParser(queryInnerHitsSearchSource)) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeBytesReference(queryInnerHitsSearchSource);
|
||||
}
|
||||
|
||||
public XContentParser getXcontentParser() throws IOException {
|
||||
return XContentType.CBOR.xContent().createParser(queryInnerHitsSearchSource);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
QueryInnerHits that = (QueryInnerHits) o;
|
||||
|
||||
return queryInnerHitsSearchSource.equals(that.queryInnerHitsSearchSource);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return queryInnerHitsSearchSource.hashCode();
|
||||
}
|
||||
}
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.common.collect.MapBuilder;
|
|||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.index.SearchSlowLog;
|
||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
@ -35,17 +35,12 @@ import static java.util.Collections.emptyMap;
|
|||
|
||||
/**
|
||||
*/
|
||||
public final class ShardSearchStats {
|
||||
public final class ShardSearchStats implements SearchOperationListener {
|
||||
|
||||
private final SearchSlowLog slowLogSearchService;
|
||||
private final StatsHolder totalStats = new StatsHolder();
|
||||
private final CounterMetric openContexts = new CounterMetric();
|
||||
private volatile Map<String, StatsHolder> groupsStats = emptyMap();
|
||||
|
||||
public ShardSearchStats(SearchSlowLog searchSlowLog) {
|
||||
this.slowLogSearchService = searchSlowLog;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the stats, including group specific stats. If the groups are null/0 length, then nothing
|
||||
* is returned for them. If they are set, then only groups provided will be returned, or
|
||||
|
@ -71,6 +66,7 @@ public final class ShardSearchStats {
|
|||
return new SearchStats(total, openContexts.count(), groupsSt);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPreQueryPhase(SearchContext searchContext) {
|
||||
computeStats(searchContext, statsHolder -> {
|
||||
if (searchContext.hasOnlySuggest()) {
|
||||
|
@ -81,6 +77,7 @@ public final class ShardSearchStats {
|
|||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailedQueryPhase(SearchContext searchContext) {
|
||||
computeStats(searchContext, statsHolder -> {
|
||||
if (searchContext.hasOnlySuggest()) {
|
||||
|
@ -91,6 +88,7 @@ public final class ShardSearchStats {
|
|||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onQueryPhase(SearchContext searchContext, long tookInNanos) {
|
||||
computeStats(searchContext, statsHolder -> {
|
||||
if (searchContext.hasOnlySuggest()) {
|
||||
|
@ -101,23 +99,24 @@ public final class ShardSearchStats {
|
|||
statsHolder.queryCurrent.dec();
|
||||
}
|
||||
});
|
||||
slowLogSearchService.onQueryPhase(searchContext, tookInNanos);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPreFetchPhase(SearchContext searchContext) {
|
||||
computeStats(searchContext, statsHolder -> statsHolder.fetchCurrent.inc());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailedFetchPhase(SearchContext searchContext) {
|
||||
computeStats(searchContext, statsHolder -> statsHolder.fetchCurrent.dec());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPhase(SearchContext searchContext, long tookInNanos) {
|
||||
computeStats(searchContext, statsHolder -> {
|
||||
statsHolder.fetchMetric.inc(tookInNanos);
|
||||
statsHolder.fetchCurrent.dec();
|
||||
});
|
||||
slowLogSearchService.onFetchPhase(searchContext, tookInNanos);
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
|
@ -159,18 +158,22 @@ public final class ShardSearchStats {
|
|||
return stats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNewContext(SearchContext context) {
|
||||
openContexts.inc();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFreeContext(SearchContext context) {
|
||||
openContexts.dec();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNewScrollContext(SearchContext context) {
|
||||
totalStats.scrollCurrent.inc();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFreeScrollContext(SearchContext context) {
|
||||
totalStats.scrollCurrent.dec();
|
||||
totalStats.scrollMetric.inc(System.nanoTime() - context.getOriginNanoTime());
|
||||
|
|
|
@ -128,7 +128,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
private final IndexCache indexCache;
|
||||
private final Store store;
|
||||
private final InternalIndexingStats internalIndexingStats;
|
||||
private final ShardSearchStats searchService;
|
||||
private final ShardSearchStats searchStats = new ShardSearchStats();
|
||||
private final ShardGetService getService;
|
||||
private final ShardIndexWarmerService shardWarmerService;
|
||||
private final ShardRequestCache shardQueryCache;
|
||||
|
@ -151,6 +151,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
* being indexed/deleted.
|
||||
*/
|
||||
private final AtomicLong writingBytes = new AtomicLong();
|
||||
private final SearchOperationListener searchOperationListener;
|
||||
|
||||
protected volatile ShardRouting shardRouting;
|
||||
protected volatile IndexShardState state;
|
||||
|
@ -195,7 +196,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
MapperService mapperService, SimilarityService similarityService, IndexFieldDataService indexFieldDataService,
|
||||
@Nullable EngineFactory engineFactory,
|
||||
IndexEventListener indexEventListener, IndexSearcherWrapper indexSearcherWrapper, ThreadPool threadPool, BigArrays bigArrays,
|
||||
SearchSlowLog slowLog, Engine.Warmer warmer, IndexingOperationListener... listeners) {
|
||||
Engine.Warmer warmer, List<SearchOperationListener> searchOperationListener, List<IndexingOperationListener> listeners) {
|
||||
super(shardId, indexSettings);
|
||||
final Settings settings = indexSettings.getSettings();
|
||||
this.codecService = new CodecService(mapperService, logger);
|
||||
|
@ -210,11 +211,13 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
this.mapperService = mapperService;
|
||||
this.indexCache = indexCache;
|
||||
this.internalIndexingStats = new InternalIndexingStats();
|
||||
final List<IndexingOperationListener> listenersList = new ArrayList<>(Arrays.asList(listeners));
|
||||
final List<IndexingOperationListener> listenersList = new ArrayList<>(listeners);
|
||||
listenersList.add(internalIndexingStats);
|
||||
this.indexingOperationListeners = new IndexingOperationListener.CompositeListener(listenersList, logger);
|
||||
final List<SearchOperationListener> searchListenersList = new ArrayList<>(searchOperationListener);
|
||||
searchListenersList.add(searchStats);
|
||||
this.searchOperationListener = new SearchOperationListener.CompositeListener(searchListenersList, logger);
|
||||
this.getService = new ShardGetService(indexSettings, this, mapperService);
|
||||
this.searchService = new ShardSearchStats(slowLog);
|
||||
this.shardWarmerService = new ShardIndexWarmerService(shardId, indexSettings);
|
||||
this.shardQueryCache = new ShardRequestCache();
|
||||
this.shardFieldData = new ShardFieldData();
|
||||
|
@ -270,8 +273,8 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
return mapperService;
|
||||
}
|
||||
|
||||
public ShardSearchStats searchService() {
|
||||
return this.searchService;
|
||||
public SearchOperationListener getSearchOperationListener() {
|
||||
return this.searchOperationListener;
|
||||
}
|
||||
|
||||
public ShardIndexWarmerService warmerService() {
|
||||
|
@ -483,6 +486,11 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
*/
|
||||
public boolean index(Engine.Index index) {
|
||||
ensureWriteAllowed(index);
|
||||
Engine engine = getEngine();
|
||||
return index(engine, index);
|
||||
}
|
||||
|
||||
private boolean index(Engine engine, Engine.Index index) {
|
||||
active.set(true);
|
||||
index = indexingOperationListeners.preIndex(index);
|
||||
final boolean created;
|
||||
|
@ -490,7 +498,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("index [{}][{}]{}", index.type(), index.id(), index.docs());
|
||||
}
|
||||
Engine engine = getEngine();
|
||||
created = engine.index(index);
|
||||
index.endTime(System.nanoTime());
|
||||
} catch (Throwable ex) {
|
||||
|
@ -521,13 +528,17 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
|
||||
public void delete(Engine.Delete delete) {
|
||||
ensureWriteAllowed(delete);
|
||||
Engine engine = getEngine();
|
||||
delete(engine, delete);
|
||||
}
|
||||
|
||||
private void delete(Engine engine, Engine.Delete delete) {
|
||||
active.set(true);
|
||||
delete = indexingOperationListeners.preDelete(delete);
|
||||
try {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("delete [{}]", delete.uid().text());
|
||||
}
|
||||
Engine engine = getEngine();
|
||||
engine.delete(delete);
|
||||
delete.endTime(System.nanoTime());
|
||||
} catch (Throwable ex) {
|
||||
|
@ -613,7 +624,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
}
|
||||
|
||||
public SearchStats searchStats(String... groups) {
|
||||
return searchService.stats(groups);
|
||||
return searchStats.stats(groups);
|
||||
}
|
||||
|
||||
public GetStats getStats() {
|
||||
|
@ -1379,6 +1390,16 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
translogStats.totalOperationsOnStart(snapshot.totalOperations());
|
||||
return super.recoveryFromSnapshot(engine, snapshot);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void index(Engine engine, Engine.Index engineIndex) {
|
||||
IndexShard.this.index(engine, engineIndex);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void delete(Engine engine, Engine.Delete engineDelete) {
|
||||
IndexShard.this.delete(engine, engineDelete);
|
||||
}
|
||||
};
|
||||
return new EngineConfig(shardId,
|
||||
threadPool, indexSettings, warmer, store, deletionPolicy, indexSettings.getMergePolicy(),
|
||||
|
|
|
@ -66,50 +66,63 @@ final class InternalIndexingStats implements IndexingOperationListener {
|
|||
|
||||
@Override
|
||||
public Engine.Index preIndex(Engine.Index operation) {
|
||||
totalStats.indexCurrent.inc();
|
||||
typeStats(operation.type()).indexCurrent.inc();
|
||||
if (operation.origin() != Engine.Operation.Origin.RECOVERY) {
|
||||
totalStats.indexCurrent.inc();
|
||||
typeStats(operation.type()).indexCurrent.inc();
|
||||
}
|
||||
return operation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postIndex(Engine.Index index, boolean created) {
|
||||
long took = index.endTime() - index.startTime();
|
||||
totalStats.indexMetric.inc(took);
|
||||
totalStats.indexCurrent.dec();
|
||||
StatsHolder typeStats = typeStats(index.type());
|
||||
typeStats.indexMetric.inc(took);
|
||||
typeStats.indexCurrent.dec();
|
||||
if (index.origin() != Engine.Operation.Origin.RECOVERY) {
|
||||
long took = index.endTime() - index.startTime();
|
||||
totalStats.indexMetric.inc(took);
|
||||
totalStats.indexCurrent.dec();
|
||||
StatsHolder typeStats = typeStats(index.type());
|
||||
typeStats.indexMetric.inc(took);
|
||||
typeStats.indexCurrent.dec();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postIndex(Engine.Index index, Throwable ex) {
|
||||
totalStats.indexCurrent.dec();
|
||||
typeStats(index.type()).indexCurrent.dec();
|
||||
totalStats.indexFailed.inc();
|
||||
typeStats(index.type()).indexFailed.inc();
|
||||
if (index.origin() != Engine.Operation.Origin.RECOVERY) {
|
||||
totalStats.indexCurrent.dec();
|
||||
typeStats(index.type()).indexCurrent.dec();
|
||||
totalStats.indexFailed.inc();
|
||||
typeStats(index.type()).indexFailed.inc();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Engine.Delete preDelete(Engine.Delete delete) {
|
||||
totalStats.deleteCurrent.inc();
|
||||
typeStats(delete.type()).deleteCurrent.inc();
|
||||
if (delete.origin() != Engine.Operation.Origin.RECOVERY) {
|
||||
totalStats.deleteCurrent.inc();
|
||||
typeStats(delete.type()).deleteCurrent.inc();
|
||||
}
|
||||
return delete;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postDelete(Engine.Delete delete) {
|
||||
long took = delete.endTime() - delete.startTime();
|
||||
totalStats.deleteMetric.inc(took);
|
||||
totalStats.deleteCurrent.dec();
|
||||
StatsHolder typeStats = typeStats(delete.type());
|
||||
typeStats.deleteMetric.inc(took);
|
||||
typeStats.deleteCurrent.dec();
|
||||
if (delete.origin() != Engine.Operation.Origin.RECOVERY) {
|
||||
long took = delete.endTime() - delete.startTime();
|
||||
totalStats.deleteMetric.inc(took);
|
||||
totalStats.deleteCurrent.dec();
|
||||
StatsHolder typeStats = typeStats(delete.type());
|
||||
typeStats.deleteMetric.inc(took);
|
||||
typeStats.deleteCurrent.dec();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postDelete(Engine.Delete delete, Throwable ex) {
|
||||
totalStats.deleteCurrent.dec();
|
||||
typeStats(delete.type()).deleteCurrent.dec();
|
||||
if (delete.origin() != Engine.Operation.Origin.RECOVERY) {
|
||||
totalStats.deleteCurrent.dec();
|
||||
typeStats(delete.type()).deleteCurrent.dec();
|
||||
}
|
||||
}
|
||||
|
||||
public void noopUpdate(String type) {
|
||||
|
|
|
@ -0,0 +1,227 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.shard;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* An listener for search, fetch and context events.
|
||||
*/
|
||||
public interface SearchOperationListener {
|
||||
|
||||
/**
|
||||
* Executed before the query phase is executed
|
||||
* @param searchContext the current search context
|
||||
*/
|
||||
default void onPreQueryPhase(SearchContext searchContext) {};
|
||||
|
||||
/**
|
||||
* Executed if a query phased failed.
|
||||
* @param searchContext the current search context
|
||||
*/
|
||||
default void onFailedQueryPhase(SearchContext searchContext) {};
|
||||
|
||||
/**
|
||||
* Executed after the query phase successfully finished.
|
||||
* Note: this is not invoked if the query phase execution failed.
|
||||
* @param searchContext the current search context
|
||||
* @param tookInNanos the number of nanoseconds the query execution took
|
||||
*
|
||||
* @see #onFailedQueryPhase(SearchContext)
|
||||
*/
|
||||
default void onQueryPhase(SearchContext searchContext, long tookInNanos) {};
|
||||
|
||||
/**
|
||||
* Executed before the fetch phase is executed
|
||||
* @param searchContext the current search context
|
||||
*/
|
||||
default void onPreFetchPhase(SearchContext searchContext) {};
|
||||
|
||||
/**
|
||||
* Executed if a fetch phased failed.
|
||||
* @param searchContext the current search context
|
||||
*/
|
||||
default void onFailedFetchPhase(SearchContext searchContext) {};
|
||||
|
||||
/**
|
||||
* Executed after the fetch phase successfully finished.
|
||||
* Note: this is not invoked if the fetch phase execution failed.
|
||||
* @param searchContext the current search context
|
||||
* @param tookInNanos the number of nanoseconds the fetch execution took
|
||||
*
|
||||
* @see #onFailedFetchPhase(SearchContext)
|
||||
*/
|
||||
default void onFetchPhase(SearchContext searchContext, long tookInNanos) {};
|
||||
|
||||
/**
|
||||
* Executed when a new search context was created
|
||||
* @param context the created context
|
||||
*/
|
||||
default void onNewContext(SearchContext context) {};
|
||||
|
||||
/**
|
||||
* Executed when a previously created search context is freed.
|
||||
* This happens either when the search execution finishes, if the
|
||||
* execution failed or if the search context as idle for and needs to be
|
||||
* cleaned up.
|
||||
* @param context the freed search context
|
||||
*/
|
||||
default void onFreeContext(SearchContext context) {};
|
||||
|
||||
/**
|
||||
* Executed when a new scroll search {@link SearchContext} was created
|
||||
* @param context the created search context
|
||||
*/
|
||||
default void onNewScrollContext(SearchContext context) {};
|
||||
|
||||
/**
|
||||
* Executed when a scroll search {@link SearchContext} is freed.
|
||||
* This happens either when the scroll search execution finishes, if the
|
||||
* execution failed or if the search context as idle for and needs to be
|
||||
* cleaned up.
|
||||
* @param context the freed search context
|
||||
*/
|
||||
default void onFreeScrollContext(SearchContext context) {};
|
||||
|
||||
/**
|
||||
* A Composite listener that multiplexes calls to each of the listeners methods.
|
||||
*/
|
||||
final class CompositeListener implements SearchOperationListener {
|
||||
private final List<SearchOperationListener> listeners;
|
||||
private final ESLogger logger;
|
||||
|
||||
public CompositeListener(List<SearchOperationListener> listeners, ESLogger logger) {
|
||||
this.listeners = listeners;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPreQueryPhase(SearchContext searchContext) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onPreQueryPhase(searchContext);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onPreQueryPhase listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailedQueryPhase(SearchContext searchContext) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onFailedQueryPhase(searchContext);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onFailedQueryPhase listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onQueryPhase(SearchContext searchContext, long tookInNanos) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onQueryPhase(searchContext, tookInNanos);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onQueryPhase listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPreFetchPhase(SearchContext searchContext) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onPreFetchPhase(searchContext);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onPreFetchPhase listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailedFetchPhase(SearchContext searchContext) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onFailedFetchPhase(searchContext);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onFailedFetchPhase listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPhase(SearchContext searchContext, long tookInNanos) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onFetchPhase(searchContext, tookInNanos);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onFetchPhase listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNewContext(SearchContext context) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onNewContext(context);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onNewContext listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFreeContext(SearchContext context) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onFreeContext(context);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onFreeContext listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNewScrollContext(SearchContext context) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onNewScrollContext(context);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onNewScrollContext listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFreeScrollContext(SearchContext context) {
|
||||
for (SearchOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.onFreeScrollContext(context);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("onFreeScrollContext listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -36,6 +36,8 @@ import org.elasticsearch.index.translog.TranslogStats;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* ShadowIndexShard extends {@link IndexShard} to add file synchronization
|
||||
|
@ -48,10 +50,10 @@ public final class ShadowIndexShard extends IndexShard {
|
|||
public ShadowIndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexCache indexCache,
|
||||
MapperService mapperService, SimilarityService similarityService, IndexFieldDataService indexFieldDataService,
|
||||
@Nullable EngineFactory engineFactory, IndexEventListener indexEventListener, IndexSearcherWrapper wrapper,
|
||||
ThreadPool threadPool, BigArrays bigArrays, SearchSlowLog searchSlowLog, Engine.Warmer engineWarmer)
|
||||
throws IOException {
|
||||
ThreadPool threadPool, BigArrays bigArrays, Engine.Warmer engineWarmer,
|
||||
List<SearchOperationListener> searchOperationListeners) throws IOException {
|
||||
super(shardId, indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldDataService, engineFactory,
|
||||
indexEventListener, wrapper, threadPool, bigArrays, searchSlowLog, engineWarmer);
|
||||
indexEventListener, wrapper, threadPool, bigArrays, engineWarmer, searchOperationListeners, Collections.emptyList());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -158,7 +158,7 @@ public class TranslogRecoveryPerformer {
|
|||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("[translog] recover [index] op of [{}][{}]", index.type(), index.id());
|
||||
}
|
||||
engine.index(engineIndex);
|
||||
index(engine, engineIndex);
|
||||
break;
|
||||
case DELETE:
|
||||
Translog.Delete delete = (Translog.Delete) operation;
|
||||
|
@ -166,8 +166,9 @@ public class TranslogRecoveryPerformer {
|
|||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("[translog] recover [delete] op of [{}][{}]", uid.type(), uid.id());
|
||||
}
|
||||
engine.delete(new Engine.Delete(uid.type(), uid.id(), delete.uid(), delete.version(),
|
||||
delete.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY, System.nanoTime(), false));
|
||||
final Engine.Delete engineDelete = new Engine.Delete(uid.type(), uid.id(), delete.uid(), delete.version(),
|
||||
delete.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY, System.nanoTime(), false);
|
||||
delete(engine, engineDelete);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("No operation defined for [" + operation + "]");
|
||||
|
@ -193,6 +194,14 @@ public class TranslogRecoveryPerformer {
|
|||
operationProcessed();
|
||||
}
|
||||
|
||||
protected void index(Engine engine, Engine.Index engineIndex) {
|
||||
engine.index(engineIndex);
|
||||
}
|
||||
|
||||
protected void delete(Engine engine, Engine.Delete engineDelete) {
|
||||
engine.delete(engineDelete);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Called once for every processed operation by this recovery performer.
|
||||
|
|
|
@ -377,13 +377,14 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
idxSettings.isShadowReplicaIndex() ? "s" : "", reason);
|
||||
|
||||
final IndexModule indexModule = new IndexModule(idxSettings, indexStoreConfig, analysisRegistry);
|
||||
for (IndexingOperationListener operationListener : indexingOperationListeners) {
|
||||
indexModule.addIndexOperationListener(operationListener);
|
||||
}
|
||||
pluginsService.onIndexModule(indexModule);
|
||||
for (IndexEventListener listener : builtInListeners) {
|
||||
indexModule.addIndexEventListener(listener);
|
||||
}
|
||||
final IndexEventListener listener = indexModule.freeze();
|
||||
listener.beforeIndexCreated(index, idxSettings.getSettings());
|
||||
return indexModule.newIndexService(nodeEnv, this, nodeServicesProvider, indicesQueryCache, mapperRegistry, indicesFieldDataCache, indexingOperationListeners);
|
||||
return indexModule.newIndexService(nodeEnv, this, nodeServicesProvider, indicesQueryCache, mapperRegistry, indicesFieldDataCache);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -299,7 +299,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent<IndicesTTLServ
|
|||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("failed to execute bulk", e);
|
||||
} else {
|
||||
logger.warn("failed to execute bulk: [{}]", e.getMessage());
|
||||
logger.warn("failed to execute bulk: ", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -131,8 +131,6 @@ public class Node implements Closeable {
|
|||
|
||||
public static final Setting<Boolean> WRITE_PORTS_FIELD_SETTING =
|
||||
Setting.boolSetting("node.portsfile", false, Property.NodeScope);
|
||||
public static final Setting<Boolean> NODE_CLIENT_SETTING =
|
||||
Setting.boolSetting("node.client", false, Property.NodeScope);
|
||||
public static final Setting<Boolean> NODE_DATA_SETTING = Setting.boolSetting("node.data", true, Property.NodeScope);
|
||||
public static final Setting<Boolean> NODE_MASTER_SETTING =
|
||||
Setting.boolSetting("node.master", true, Property.NodeScope);
|
||||
|
@ -143,9 +141,7 @@ public class Node implements Closeable {
|
|||
public static final Setting<Boolean> NODE_INGEST_SETTING =
|
||||
Setting.boolSetting("node.ingest", true, Property.NodeScope);
|
||||
public static final Setting<String> NODE_NAME_SETTING = Setting.simpleString("node.name", Property.NodeScope);
|
||||
// this sucks that folks can mistype data, master or ingest and get away with it.
|
||||
// TODO: we should move this to node.attribute.${name} = ${value} instead.
|
||||
public static final Setting<Settings> NODE_ATTRIBUTES = Setting.groupSetting("node.", Property.NodeScope);
|
||||
public static final Setting<Settings> NODE_ATTRIBUTES = Setting.groupSetting("node.attr.", Property.NodeScope);
|
||||
|
||||
|
||||
private static final String CLIENT_TYPE = "node";
|
||||
|
|
|
@ -51,10 +51,11 @@ import org.elasticsearch.index.engine.Engine;
|
|||
import org.elasticsearch.index.fieldstats.FieldStatsProvider;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.search.stats.ShardSearchStats;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
import org.elasticsearch.index.search.stats.StatsGroupsParseElement;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
|
@ -76,6 +77,7 @@ import org.elasticsearch.search.fetch.ShardFetchRequest;
|
|||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext.FieldDataField;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.DefaultSearchContext;
|
||||
|
@ -274,9 +276,9 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
|
||||
public QuerySearchResultProvider executeQueryPhase(ShardSearchRequest request) throws IOException {
|
||||
final SearchContext context = createAndPutContext(request);
|
||||
final ShardSearchStats shardSearchStats = context.indexShard().searchService();
|
||||
final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
|
||||
try {
|
||||
shardSearchStats.onPreQueryPhase(context);
|
||||
operationListener.onPreQueryPhase(context);
|
||||
long time = System.nanoTime();
|
||||
contextProcessing(context);
|
||||
|
||||
|
@ -287,7 +289,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
}
|
||||
shardSearchStats.onQueryPhase(context, System.nanoTime() - time);
|
||||
operationListener.onQueryPhase(context, System.nanoTime() - time);
|
||||
|
||||
return context.queryResult();
|
||||
} catch (Throwable e) {
|
||||
|
@ -295,7 +297,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
if (e instanceof ExecutionException) {
|
||||
e = e.getCause();
|
||||
}
|
||||
shardSearchStats.onFailedQueryPhase(context);
|
||||
operationListener.onFailedQueryPhase(context);
|
||||
logger.trace("Query phase failed", e);
|
||||
processFailure(context, e);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
|
@ -306,18 +308,18 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
|
||||
public ScrollQuerySearchResult executeQueryPhase(InternalScrollSearchRequest request) {
|
||||
final SearchContext context = findContext(request.id());
|
||||
ShardSearchStats shardSearchStats = context.indexShard().searchService();
|
||||
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
|
||||
try {
|
||||
shardSearchStats.onPreQueryPhase(context);
|
||||
operationListener.onPreQueryPhase(context);
|
||||
long time = System.nanoTime();
|
||||
contextProcessing(context);
|
||||
processScroll(request, context);
|
||||
queryPhase.execute(context);
|
||||
contextProcessedSuccessfully(context);
|
||||
shardSearchStats.onQueryPhase(context, System.nanoTime() - time);
|
||||
operationListener.onQueryPhase(context, System.nanoTime() - time);
|
||||
return new ScrollQuerySearchResult(context.queryResult(), context.shardTarget());
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedQueryPhase(context);
|
||||
operationListener.onFailedQueryPhase(context);
|
||||
logger.trace("Query phase failed", e);
|
||||
processFailure(context, e);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
|
@ -331,9 +333,9 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
contextProcessing(context);
|
||||
context.searcher().setAggregatedDfs(request.dfs());
|
||||
IndexShard indexShard = context.indexShard();
|
||||
ShardSearchStats shardSearchStats = indexShard.searchService();
|
||||
SearchOperationListener operationListener = indexShard.getSearchOperationListener();
|
||||
try {
|
||||
shardSearchStats.onPreQueryPhase(context);
|
||||
operationListener.onPreQueryPhase(context);
|
||||
long time = System.nanoTime();
|
||||
queryPhase.execute(context);
|
||||
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scrollContext() == null) {
|
||||
|
@ -342,10 +344,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
}
|
||||
shardSearchStats.onQueryPhase(context, System.nanoTime() - time);
|
||||
operationListener.onQueryPhase(context, System.nanoTime() - time);
|
||||
return context.queryResult();
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedQueryPhase(context);
|
||||
operationListener.onFailedQueryPhase(context);
|
||||
logger.trace("Query phase failed", e);
|
||||
processFailure(context, e);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
|
@ -368,18 +370,18 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
final SearchContext context = createAndPutContext(request);
|
||||
contextProcessing(context);
|
||||
try {
|
||||
ShardSearchStats shardSearchStats = context.indexShard().searchService();
|
||||
shardSearchStats.onPreQueryPhase(context);
|
||||
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
|
||||
operationListener.onPreQueryPhase(context);
|
||||
long time = System.nanoTime();
|
||||
try {
|
||||
loadOrExecuteQueryPhase(request, context, queryPhase);
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedQueryPhase(context);
|
||||
operationListener.onFailedQueryPhase(context);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
}
|
||||
long time2 = System.nanoTime();
|
||||
shardSearchStats.onQueryPhase(context, time2 - time);
|
||||
shardSearchStats.onPreFetchPhase(context);
|
||||
operationListener.onQueryPhase(context, time2 - time);
|
||||
operationListener.onPreFetchPhase(context);
|
||||
try {
|
||||
shortcutDocIdsToLoad(context);
|
||||
fetchPhase.execute(context);
|
||||
|
@ -389,10 +391,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
contextProcessedSuccessfully(context);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedFetchPhase(context);
|
||||
operationListener.onFailedFetchPhase(context);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
}
|
||||
shardSearchStats.onFetchPhase(context, System.nanoTime() - time2);
|
||||
operationListener.onFetchPhase(context, System.nanoTime() - time2);
|
||||
return new QueryFetchSearchResult(context.queryResult(), context.fetchResult());
|
||||
} catch (Throwable e) {
|
||||
logger.trace("Fetch phase failed", e);
|
||||
|
@ -408,18 +410,18 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
contextProcessing(context);
|
||||
context.searcher().setAggregatedDfs(request.dfs());
|
||||
try {
|
||||
ShardSearchStats shardSearchStats = context.indexShard().searchService();
|
||||
shardSearchStats.onPreQueryPhase(context);
|
||||
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
|
||||
operationListener.onPreQueryPhase(context);
|
||||
long time = System.nanoTime();
|
||||
try {
|
||||
queryPhase.execute(context);
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedQueryPhase(context);
|
||||
operationListener.onFailedQueryPhase(context);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
}
|
||||
long time2 = System.nanoTime();
|
||||
shardSearchStats.onQueryPhase(context, time2 - time);
|
||||
shardSearchStats.onPreFetchPhase(context);
|
||||
operationListener.onQueryPhase(context, time2 - time);
|
||||
operationListener.onPreFetchPhase(context);
|
||||
try {
|
||||
shortcutDocIdsToLoad(context);
|
||||
fetchPhase.execute(context);
|
||||
|
@ -429,10 +431,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
contextProcessedSuccessfully(context);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedFetchPhase(context);
|
||||
operationListener.onFailedFetchPhase(context);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
}
|
||||
shardSearchStats.onFetchPhase(context, System.nanoTime() - time2);
|
||||
operationListener.onFetchPhase(context, System.nanoTime() - time2);
|
||||
return new QueryFetchSearchResult(context.queryResult(), context.fetchResult());
|
||||
} catch (Throwable e) {
|
||||
logger.trace("Fetch phase failed", e);
|
||||
|
@ -447,19 +449,19 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
final SearchContext context = findContext(request.id());
|
||||
contextProcessing(context);
|
||||
try {
|
||||
ShardSearchStats shardSearchStats = context.indexShard().searchService();
|
||||
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
|
||||
processScroll(request, context);
|
||||
shardSearchStats.onPreQueryPhase(context);
|
||||
operationListener.onPreQueryPhase(context);
|
||||
long time = System.nanoTime();
|
||||
try {
|
||||
queryPhase.execute(context);
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedQueryPhase(context);
|
||||
operationListener.onFailedQueryPhase(context);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
}
|
||||
long time2 = System.nanoTime();
|
||||
shardSearchStats.onQueryPhase(context, time2 - time);
|
||||
shardSearchStats.onPreFetchPhase(context);
|
||||
operationListener.onQueryPhase(context, time2 - time);
|
||||
operationListener.onPreFetchPhase(context);
|
||||
try {
|
||||
shortcutDocIdsToLoad(context);
|
||||
fetchPhase.execute(context);
|
||||
|
@ -469,10 +471,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
contextProcessedSuccessfully(context);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedFetchPhase(context);
|
||||
operationListener.onFailedFetchPhase(context);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
}
|
||||
shardSearchStats.onFetchPhase(context, System.nanoTime() - time2);
|
||||
operationListener.onFetchPhase(context, System.nanoTime() - time2);
|
||||
return new ScrollQueryFetchSearchResult(new QueryFetchSearchResult(context.queryResult(), context.fetchResult()), context.shardTarget());
|
||||
} catch (Throwable e) {
|
||||
logger.trace("Fetch phase failed", e);
|
||||
|
@ -486,13 +488,13 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
public FetchSearchResult executeFetchPhase(ShardFetchRequest request) {
|
||||
final SearchContext context = findContext(request.id());
|
||||
contextProcessing(context);
|
||||
final ShardSearchStats shardSearchStats = context.indexShard().searchService();
|
||||
final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
|
||||
try {
|
||||
if (request.lastEmittedDoc() != null) {
|
||||
context.scrollContext().lastEmittedDoc = request.lastEmittedDoc();
|
||||
}
|
||||
context.docIdsToLoad(request.docIds(), 0, request.docIdsSize());
|
||||
shardSearchStats.onPreFetchPhase(context);
|
||||
operationListener.onPreFetchPhase(context);
|
||||
long time = System.nanoTime();
|
||||
fetchPhase.execute(context);
|
||||
if (fetchPhaseShouldFreeContext(context)) {
|
||||
|
@ -500,10 +502,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
}
|
||||
shardSearchStats.onFetchPhase(context, System.nanoTime() - time);
|
||||
operationListener.onFetchPhase(context, System.nanoTime() - time);
|
||||
return context.fetchResult();
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedFetchPhase(context);
|
||||
operationListener.onFailedFetchPhase(context);
|
||||
logger.trace("Fetch phase failed", e);
|
||||
processFailure(context, e);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
|
@ -527,9 +529,9 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
try {
|
||||
putContext(context);
|
||||
if (request.scroll() != null) {
|
||||
context.indexShard().searchService().onNewScrollContext(context);
|
||||
context.indexShard().getSearchOperationListener().onNewScrollContext(context);
|
||||
}
|
||||
context.indexShard().searchService().onNewContext(context);
|
||||
context.indexShard().getSearchOperationListener().onNewContext(context);
|
||||
success = true;
|
||||
return context;
|
||||
} finally {
|
||||
|
@ -617,9 +619,9 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
final SearchContext context = removeContext(id);
|
||||
if (context != null) {
|
||||
try {
|
||||
context.indexShard().searchService().onFreeContext(context);
|
||||
context.indexShard().getSearchOperationListener().onFreeContext(context);
|
||||
if (context.scrollContext() != null) {
|
||||
context.indexShard().searchService().onFreeScrollContext(context);
|
||||
context.indexShard().getSearchOperationListener().onFreeScrollContext(context);
|
||||
}
|
||||
} finally {
|
||||
context.close();
|
||||
|
@ -754,20 +756,22 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
}
|
||||
}
|
||||
if (source.innerHits() != null) {
|
||||
XContentParser innerHitsParser = null;
|
||||
try {
|
||||
innerHitsParser = XContentFactory.xContent(source.innerHits()).createParser(source.innerHits());
|
||||
innerHitsParser.nextToken();
|
||||
this.elementParsers.get("inner_hits").parse(innerHitsParser, context);
|
||||
} catch (Exception e) {
|
||||
String sSource = "_na_";
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : source.innerHits().getInnerHitsBuilders().entrySet()) {
|
||||
try {
|
||||
sSource = source.toString();
|
||||
} catch (Throwable e1) {
|
||||
// ignore
|
||||
// This is the same logic in QueryShardContext#toQuery() where we reset also twice.
|
||||
// Personally I think a reset at the end is sufficient, but I kept the logic consistent with this method.
|
||||
|
||||
// The reason we need to invoke reset at all here is because inner hits may modify the QueryShardContext#nestedScope,
|
||||
// so we need to reset at the end.
|
||||
queryShardContext.reset();
|
||||
InnerHitBuilder innerHitBuilder = entry.getValue();
|
||||
InnerHitsContext innerHitsContext = context.innerHits();
|
||||
innerHitBuilder.buildTopLevel(context, queryShardContext, innerHitsContext);
|
||||
} catch (IOException e) {
|
||||
throw new SearchContextException(context, "failed to create InnerHitsContext", e);
|
||||
} finally {
|
||||
queryShardContext.reset();
|
||||
}
|
||||
XContentLocation location = innerHitsParser != null ? innerHitsParser.getTokenLocation() : null;
|
||||
throw new SearchParseException(context, "failed to parse suggest source [" + sSource + "]", location, e);
|
||||
}
|
||||
}
|
||||
if (source.scriptFields() != null) {
|
||||
|
|
|
@ -79,6 +79,7 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
|
|||
public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
SubSearchContext subSearchContext = new SubSearchContext(context.searchContext());
|
||||
subSearchContext.parsedQuery(context.searchContext().parsedQuery());
|
||||
subSearchContext.explain(explain);
|
||||
subSearchContext.version(version);
|
||||
subSearchContext.trackScores(trackScores);
|
||||
|
|
|
@ -46,7 +46,7 @@ import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
|||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
import org.elasticsearch.index.query.support.InnerHitsBuilder;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -162,7 +162,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
|
||||
private SuggestBuilder suggestBuilder;
|
||||
|
||||
private BytesReference innerHitsBuilder;
|
||||
private InnerHitsBuilder innerHitsBuilder;
|
||||
|
||||
private List<RescoreBuilder<?>> rescoreBuilders;
|
||||
|
||||
|
@ -457,22 +457,11 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
|
||||
public SearchSourceBuilder innerHits(InnerHitsBuilder innerHitsBuilder) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
innerHitsBuilder.innerXContent(builder, EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
this.innerHitsBuilder = builder.bytes();
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
this.innerHitsBuilder = innerHitsBuilder;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the bytes representing the inner hits builder for this request.
|
||||
*/
|
||||
public BytesReference innerHits() {
|
||||
public InnerHitsBuilder innerHits() {
|
||||
return innerHitsBuilder;
|
||||
}
|
||||
|
||||
|
@ -857,40 +846,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
} else if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) {
|
||||
scriptFields = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
String scriptFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
Script script = null;
|
||||
boolean ignoreFailure = false;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELD)) {
|
||||
script = Script.parse(parser, context.parseFieldMatcher());
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, IGNORE_FAILURE_FIELD)) {
|
||||
ignoreFailure = parser.booleanValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
|
||||
+ "].", parser.getTokenLocation());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELD)) {
|
||||
script = Script.parse(parser, context.parseFieldMatcher());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
|
||||
+ "].", parser.getTokenLocation());
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
|
||||
+ "].", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
scriptFields.add(new ScriptField(scriptFieldName, script, ignoreFailure));
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] in ["
|
||||
+ currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
scriptFields.add(new ScriptField(parser, context));
|
||||
}
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) {
|
||||
indexBoost = new ObjectFloatHashMap<String>();
|
||||
|
@ -909,8 +865,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
} else if (context.parseFieldMatcher().match(currentFieldName, HIGHLIGHT_FIELD)) {
|
||||
highlightBuilder = HighlightBuilder.PROTOTYPE.fromXContent(context);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
|
||||
innerHitsBuilder = xContentBuilder.bytes();
|
||||
innerHitsBuilder = InnerHitsBuilder.fromXContent(parser, context);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, SUGGEST_FIELD)) {
|
||||
suggestBuilder = SuggestBuilder.fromXContent(context, suggesters);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
|
||||
|
@ -1094,10 +1049,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
|
||||
if (innerHitsBuilder != null) {
|
||||
builder.field(INNER_HITS_FIELD.getPreferredName());
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(innerHitsBuilder);
|
||||
parser.nextToken();
|
||||
builder.copyCurrentStructure(parser);
|
||||
builder.field(INNER_HITS_FIELD.getPreferredName(), innerHitsBuilder, params);
|
||||
}
|
||||
|
||||
if (suggestBuilder != null) {
|
||||
|
@ -1126,7 +1078,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
|
||||
public static class ScriptField implements Writeable<ScriptField>, ToXContent {
|
||||
|
||||
public static final ScriptField PROTOTYPE = new ScriptField(null, null);
|
||||
public static final ScriptField PROTOTYPE = new ScriptField((String) null, (Script) null);
|
||||
|
||||
private final boolean ignoreFailure;
|
||||
private final String fieldName;
|
||||
|
@ -1142,6 +1094,48 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
this.ignoreFailure = ignoreFailure;
|
||||
}
|
||||
|
||||
public ScriptField(XContentParser parser, QueryParseContext context) throws IOException {
|
||||
boolean ignoreFailure = false;
|
||||
String scriptFieldName = parser.currentName();
|
||||
Script script = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELD)) {
|
||||
script = Script.parse(parser, context.parseFieldMatcher());
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, IGNORE_FAILURE_FIELD)) {
|
||||
ignoreFailure = parser.booleanValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
|
||||
+ "].", parser.getTokenLocation());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELD)) {
|
||||
script = Script.parse(parser, context.parseFieldMatcher());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
|
||||
+ "].", parser.getTokenLocation());
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
|
||||
+ "].", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
this.ignoreFailure = ignoreFailure;
|
||||
this.fieldName = scriptFieldName;
|
||||
this.script = script;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] in ["
|
||||
+ parser.currentName() + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
|
||||
public String fieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
@ -1235,7 +1229,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
builder.indexBoost = indexBoost;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
builder.innerHitsBuilder = in.readBytesReference();
|
||||
builder.innerHitsBuilder = InnerHitsBuilder.PROTO.readFrom(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
builder.minScore = in.readFloat();
|
||||
|
@ -1343,7 +1337,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
boolean hasInnerHitsBuilder = innerHitsBuilder != null;
|
||||
out.writeBoolean(hasInnerHitsBuilder);
|
||||
if (hasInnerHitsBuilder) {
|
||||
out.writeBytesReference(innerHitsBuilder);
|
||||
innerHitsBuilder.writeTo(out);
|
||||
}
|
||||
boolean hasMinScore = minScore != null;
|
||||
out.writeBoolean(hasMinScore);
|
||||
|
|
|
@ -1,305 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.fetch.innerhits;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class InnerHitsBuilder implements ToXContent {
|
||||
|
||||
private final Map<String, InnerHitsHolder> innerHits = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("inner_hits");
|
||||
innerXContent(builder, params);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
public void innerXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
for (Map.Entry<String, InnerHitsHolder> entry : innerHits.entrySet()) {
|
||||
builder.startObject(entry.getKey());
|
||||
entry.getValue().toXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* For nested inner hits the path to collect child nested docs for.
|
||||
* @param name the name / key of the inner hits in the response
|
||||
* @param path the path into the nested to collect inner hits for
|
||||
* @param innerHit the inner hits definition
|
||||
*/
|
||||
public void addNestedInnerHits(String name, String path, InnerHit innerHit) {
|
||||
if (innerHits.containsKey(name)) {
|
||||
throw new IllegalArgumentException("inner hits for name: [" + name +"] is already registered");
|
||||
}
|
||||
innerHits.put(name, new NestedInnerHitsHolder(path, innerHit));
|
||||
}
|
||||
|
||||
/**
|
||||
* For parent/child inner hits the type to collect inner hits for.
|
||||
* @param name the name / key of the inner hits in the response
|
||||
* @param type the document type to collect inner hits for
|
||||
* @param innerHit the inner hits definition
|
||||
*/
|
||||
public void addParentChildInnerHits(String name, String type, InnerHit innerHit) {
|
||||
innerHits.put(name, new ParentChildInnerHitsHolder(type, innerHit));
|
||||
}
|
||||
|
||||
private static class InnerHitsHolder implements ToXContent{
|
||||
private final InnerHit hits;
|
||||
|
||||
private InnerHitsHolder(InnerHit hits) {
|
||||
this.hits = hits;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return hits.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
||||
private static class ParentChildInnerHitsHolder extends InnerHitsHolder {
|
||||
|
||||
private final String type;
|
||||
|
||||
private ParentChildInnerHitsHolder(String type, InnerHit hits) {
|
||||
super(hits);
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("type").startObject(type);
|
||||
super.toXContent(builder, params);
|
||||
return builder.endObject().endObject();
|
||||
}
|
||||
}
|
||||
|
||||
private static class NestedInnerHitsHolder extends InnerHitsHolder {
|
||||
|
||||
private final String path;
|
||||
|
||||
private NestedInnerHitsHolder(String path, InnerHit hits) {
|
||||
super(hits);
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("path").startObject(path);
|
||||
super.toXContent(builder, params);
|
||||
return builder.endObject().endObject();
|
||||
}
|
||||
}
|
||||
|
||||
public static class InnerHit implements ToXContent {
|
||||
|
||||
private SearchSourceBuilder sourceBuilder;
|
||||
private String path;
|
||||
private String type;
|
||||
|
||||
/**
|
||||
* The index to start to return hits from. Defaults to <tt>0</tt>.
|
||||
*/
|
||||
public InnerHit setFrom(int from) {
|
||||
sourceBuilder().from(from);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of search hits to return. Defaults to <tt>10</tt>.
|
||||
*/
|
||||
public InnerHit setSize(int size) {
|
||||
sourceBuilder().size(size);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies when sorting, and controls if scores will be tracked as well. Defaults to
|
||||
* <tt>false</tt>.
|
||||
*/
|
||||
public InnerHit setTrackScores(boolean trackScores) {
|
||||
sourceBuilder().trackScores(trackScores);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should each {@link org.elasticsearch.search.SearchHit} be returned with an
|
||||
* explanation of the hit (ranking).
|
||||
*/
|
||||
public InnerHit setExplain(boolean explain) {
|
||||
sourceBuilder().explain(explain);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should each {@link org.elasticsearch.search.SearchHit} be returned with its
|
||||
* version.
|
||||
*/
|
||||
public InnerHit setVersion(boolean version) {
|
||||
sourceBuilder().version(version);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a stored field to be loaded and returned with the inner hit.
|
||||
*/
|
||||
public InnerHit field(String name) {
|
||||
sourceBuilder().field(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets no fields to be loaded, resulting in only id and type to be returned per field.
|
||||
*/
|
||||
public InnerHit setNoFields() {
|
||||
sourceBuilder().noFields();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates whether the response should contain the stored _source for every hit
|
||||
*/
|
||||
public InnerHit setFetchSource(boolean fetch) {
|
||||
sourceBuilder().fetchSource(fetch);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard
|
||||
* elements.
|
||||
*
|
||||
* @param include An optional include (optionally wildcarded) pattern to filter the returned _source
|
||||
* @param exclude An optional exclude (optionally wildcarded) pattern to filter the returned _source
|
||||
*/
|
||||
public InnerHit setFetchSource(@Nullable String include, @Nullable String exclude) {
|
||||
sourceBuilder().fetchSource(include, exclude);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard
|
||||
* elements.
|
||||
*
|
||||
* @param includes An optional list of include (optionally wildcarded) pattern to filter the returned _source
|
||||
* @param excludes An optional list of exclude (optionally wildcarded) pattern to filter the returned _source
|
||||
*/
|
||||
public InnerHit setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) {
|
||||
sourceBuilder().fetchSource(includes, excludes);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field data based field to load and return. The field does not have to be stored,
|
||||
* but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name The field to get from the field data cache
|
||||
*/
|
||||
public InnerHit addFieldDataField(String name) {
|
||||
sourceBuilder().fieldDataField(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a script based field to load and return. The field does not have to be stored,
|
||||
* but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name The name that will represent this value in the return hit
|
||||
* @param script The script to use
|
||||
*/
|
||||
public InnerHit addScriptField(String name, Script script) {
|
||||
sourceBuilder().scriptField(name, script);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a sort against the given field name and the sort ordering.
|
||||
*
|
||||
* @param field The name of the field
|
||||
* @param order The sort ordering
|
||||
*/
|
||||
public InnerHit addSort(String field, SortOrder order) {
|
||||
sourceBuilder().sort(field, order);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a generic sort builder.
|
||||
*
|
||||
* @see org.elasticsearch.search.sort.SortBuilders
|
||||
*/
|
||||
public InnerHit addSort(SortBuilder sort) {
|
||||
sourceBuilder().sort(sort);
|
||||
return this;
|
||||
}
|
||||
|
||||
public HighlightBuilder highlighter() {
|
||||
return sourceBuilder().highlighter();
|
||||
}
|
||||
|
||||
public InnerHit highlighter(HighlightBuilder highlightBuilder) {
|
||||
sourceBuilder().highlighter(highlightBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
protected SearchSourceBuilder sourceBuilder() {
|
||||
if (sourceBuilder == null) {
|
||||
sourceBuilder = new SearchSourceBuilder();
|
||||
}
|
||||
return sourceBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the query to run for collecting the inner hits.
|
||||
*/
|
||||
public InnerHit setQuery(QueryBuilder query) {
|
||||
sourceBuilder().query(query);
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHit innerHits(InnerHitsBuilder innerHitsBuilder) {
|
||||
sourceBuilder().innerHits(innerHitsBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (sourceBuilder != null) {
|
||||
sourceBuilder.innerToXContent(builder, params);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -48,16 +48,16 @@ import org.elasticsearch.index.mapper.Uid;
|
|||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.FilteredSearchContext;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -70,59 +70,46 @@ public final class InnerHitsContext {
|
|||
}
|
||||
|
||||
public InnerHitsContext(Map<String, BaseInnerHits> innerHits) {
|
||||
this.innerHits = innerHits;
|
||||
this.innerHits = Objects.requireNonNull(innerHits);
|
||||
}
|
||||
|
||||
public Map<String, BaseInnerHits> getInnerHits() {
|
||||
return innerHits;
|
||||
}
|
||||
|
||||
public void addInnerHitDefinition(String name, BaseInnerHits innerHit) {
|
||||
if (innerHits.containsKey(name)) {
|
||||
throw new IllegalArgumentException("inner_hit definition with the name [" + name + "] already exists. Use a different inner_hit name");
|
||||
public void addInnerHitDefinition(BaseInnerHits innerHit) {
|
||||
if (innerHits.containsKey(innerHit.getName())) {
|
||||
throw new IllegalArgumentException("inner_hit definition with the name [" + innerHit.getName() +
|
||||
"] already exists. Use a different inner_hit name");
|
||||
}
|
||||
|
||||
innerHits.put(name, innerHit);
|
||||
innerHits.put(innerHit.getName(), innerHit);
|
||||
}
|
||||
|
||||
public void addInnerHitDefinitions(Map<String, BaseInnerHits> innerHits) {
|
||||
for (Map.Entry<String, BaseInnerHits> entry : innerHits.entrySet()) {
|
||||
addInnerHitDefinition(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
public static abstract class BaseInnerHits extends SubSearchContext {
|
||||
|
||||
public static abstract class BaseInnerHits extends FilteredSearchContext {
|
||||
private final String name;
|
||||
private InnerHitsContext childInnerHits;
|
||||
|
||||
protected final ParsedQuery query;
|
||||
private final InnerHitsContext childInnerHits;
|
||||
|
||||
protected BaseInnerHits(SearchContext context, ParsedQuery query, Map<String, BaseInnerHits> childInnerHits) {
|
||||
protected BaseInnerHits(String name, SearchContext context) {
|
||||
super(context);
|
||||
this.query = query;
|
||||
if (childInnerHits != null && !childInnerHits.isEmpty()) {
|
||||
this.childInnerHits = new InnerHitsContext(childInnerHits);
|
||||
} else {
|
||||
this.childInnerHits = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query query() {
|
||||
return query.query();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ParsedQuery parsedQuery() {
|
||||
return query;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public abstract TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InnerHitsContext innerHits() {
|
||||
return childInnerHits;
|
||||
}
|
||||
|
||||
public void setChildInnerHits(Map<String, InnerHitsContext.BaseInnerHits> childInnerHits) {
|
||||
this.childInnerHits = new InnerHitsContext(childInnerHits);
|
||||
}
|
||||
}
|
||||
|
||||
public static final class NestedInnerHits extends BaseInnerHits {
|
||||
|
@ -130,8 +117,8 @@ public final class InnerHitsContext {
|
|||
private final ObjectMapper parentObjectMapper;
|
||||
private final ObjectMapper childObjectMapper;
|
||||
|
||||
public NestedInnerHits(SearchContext context, ParsedQuery query, Map<String, BaseInnerHits> childInnerHits, ObjectMapper parentObjectMapper, ObjectMapper childObjectMapper) {
|
||||
super(context, query, childInnerHits);
|
||||
public NestedInnerHits(String name, SearchContext context, ObjectMapper parentObjectMapper, ObjectMapper childObjectMapper) {
|
||||
super(name != null ? name : childObjectMapper.fullPath(), context);
|
||||
this.parentObjectMapper = parentObjectMapper;
|
||||
this.childObjectMapper = childObjectMapper;
|
||||
}
|
||||
|
@ -146,7 +133,7 @@ public final class InnerHitsContext {
|
|||
}
|
||||
BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
|
||||
Query childFilter = childObjectMapper.nestedTypeFilter();
|
||||
Query q = Queries.filtered(query.query(), new NestedChildrenQuery(parentFilter, childFilter, hitContext));
|
||||
Query q = Queries.filtered(query(), new NestedChildrenQuery(parentFilter, childFilter, hitContext));
|
||||
|
||||
if (size() == 0) {
|
||||
return new TopDocs(context.searcher().count(q), Lucene.EMPTY_SCORE_DOCS, 0);
|
||||
|
@ -292,8 +279,8 @@ public final class InnerHitsContext {
|
|||
private final MapperService mapperService;
|
||||
private final DocumentMapper documentMapper;
|
||||
|
||||
public ParentChildInnerHits(SearchContext context, ParsedQuery query, Map<String, BaseInnerHits> childInnerHits, MapperService mapperService, DocumentMapper documentMapper) {
|
||||
super(context, query, childInnerHits);
|
||||
public ParentChildInnerHits(String name, SearchContext context, MapperService mapperService, DocumentMapper documentMapper) {
|
||||
super(name != null ? name : documentMapper.type(), context);
|
||||
this.mapperService = mapperService;
|
||||
this.documentMapper = documentMapper;
|
||||
}
|
||||
|
@ -317,7 +304,7 @@ public final class InnerHitsContext {
|
|||
}
|
||||
|
||||
BooleanQuery q = new BooleanQuery.Builder()
|
||||
.add(query.query(), Occur.MUST)
|
||||
.add(query(), Occur.MUST)
|
||||
// Only include docs that have the current hit as parent
|
||||
.add(hitQuery, Occur.FILTER)
|
||||
// Only include docs that have this inner hits type
|
||||
|
|
|
@ -31,33 +31,30 @@ import org.elasticsearch.search.fetch.FetchSubPhase;
|
|||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement;
|
||||
import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceParseElement;
|
||||
import org.elasticsearch.search.highlight.HighlighterParseElement;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHits;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class InnerHitsFetchSubPhase implements FetchSubPhase {
|
||||
private final Map<String, ? extends SearchParseElement> parseElements;
|
||||
|
||||
private FetchPhase fetchPhase;
|
||||
|
||||
@Inject
|
||||
public InnerHitsFetchSubPhase(FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement, ScriptFieldsParseElement scriptFieldsParseElement) {
|
||||
parseElements = singletonMap("inner_hits", new InnerHitsParseElement(sourceParseElement, highlighterParseElement,
|
||||
fieldDataFieldsParseElement, scriptFieldsParseElement));
|
||||
public InnerHitsFetchSubPhase() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return parseElements;
|
||||
// SearchParse elements needed because everything is parsed by InnerHitBuilder and eventually put
|
||||
// into the search context.
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,221 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.fetch.innerhits;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement;
|
||||
import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceParseElement;
|
||||
import org.elasticsearch.search.highlight.HighlighterParseElement;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.query.support.InnerHitsQueryParserHelper.parseCommonInnerHitOptions;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class InnerHitsParseElement implements SearchParseElement {
|
||||
|
||||
private final FetchSourceParseElement sourceParseElement;
|
||||
private final HighlighterParseElement highlighterParseElement;
|
||||
private final FieldDataFieldsParseElement fieldDataFieldsParseElement;
|
||||
private final ScriptFieldsParseElement scriptFieldsParseElement;
|
||||
|
||||
public InnerHitsParseElement(FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement, ScriptFieldsParseElement scriptFieldsParseElement) {
|
||||
this.sourceParseElement = sourceParseElement;
|
||||
this.highlighterParseElement = highlighterParseElement;
|
||||
this.fieldDataFieldsParseElement = fieldDataFieldsParseElement;
|
||||
this.scriptFieldsParseElement = scriptFieldsParseElement;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext searchContext) throws Exception {
|
||||
QueryShardContext context = searchContext.getQueryShardContext();
|
||||
context.reset(parser);
|
||||
Map<String, InnerHitsContext.BaseInnerHits> topLevelInnerHits = parseInnerHits(parser, context, searchContext);
|
||||
if (topLevelInnerHits != null) {
|
||||
InnerHitsContext innerHitsContext = searchContext.innerHits();
|
||||
innerHitsContext.addInnerHitDefinitions(topLevelInnerHits);
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, InnerHitsContext.BaseInnerHits> parseInnerHits(XContentParser parser, QueryShardContext context, SearchContext searchContext) throws Exception {
|
||||
XContentParser.Token token;
|
||||
Map<String, InnerHitsContext.BaseInnerHits> innerHitsMap = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new IllegalArgumentException("Unexpected token " + token + " in [inner_hits]: inner_hit definitions must start with the name of the inner_hit.");
|
||||
}
|
||||
final String innerHitName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("Inner hit definition for [" + innerHitName + " starts with a [" + token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
|
||||
}
|
||||
InnerHitsContext.BaseInnerHits innerHits = parseInnerHit(parser, context, searchContext, innerHitName);
|
||||
if (innerHitsMap == null) {
|
||||
innerHitsMap = new HashMap<>();
|
||||
}
|
||||
innerHitsMap.put(innerHitName, innerHits);
|
||||
}
|
||||
return innerHitsMap;
|
||||
}
|
||||
|
||||
private InnerHitsContext.BaseInnerHits parseInnerHit(XContentParser parser, QueryShardContext context, SearchContext searchContext, String innerHitName) throws Exception {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new IllegalArgumentException("Unexpected token " + token + " inside inner hit definition. Either specify [path] or [type] object");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("Inner hit definition for [" + innerHitName + " starts with a [" + token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
|
||||
}
|
||||
|
||||
String nestedPath = null;
|
||||
String type = null;
|
||||
switch (fieldName) {
|
||||
case "path":
|
||||
nestedPath = parser.currentName();
|
||||
break;
|
||||
case "type":
|
||||
type = parser.currentName();
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Either path or type object must be defined");
|
||||
}
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new IllegalArgumentException("Unexpected token " + token + " inside inner hit definition. Either specify [path] or [type] object");
|
||||
}
|
||||
fieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("Inner hit definition for [" + innerHitName + " starts with a [" + token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
|
||||
}
|
||||
|
||||
final InnerHitsContext.BaseInnerHits innerHits;
|
||||
if (nestedPath != null) {
|
||||
innerHits = parseNested(parser, context, searchContext, fieldName);
|
||||
} else if (type != null) {
|
||||
innerHits = parseParentChild(parser, context, searchContext, fieldName);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Either [path] or [type] must be defined");
|
||||
}
|
||||
|
||||
// Completely consume all json objects:
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new IllegalArgumentException("Expected [" + XContentParser.Token.END_OBJECT + "] token, but got a [" + token + "] token.");
|
||||
}
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new IllegalArgumentException("Expected [" + XContentParser.Token.END_OBJECT + "] token, but got a [" + token + "] token.");
|
||||
}
|
||||
|
||||
return innerHits;
|
||||
}
|
||||
|
||||
private InnerHitsContext.ParentChildInnerHits parseParentChild(XContentParser parser, QueryShardContext context, SearchContext searchContext, String type) throws Exception {
|
||||
ParseResult parseResult = parseSubSearchContext(searchContext, context, parser);
|
||||
DocumentMapper documentMapper = searchContext.mapperService().documentMapper(type);
|
||||
if (documentMapper == null) {
|
||||
throw new IllegalArgumentException("type [" + type + "] doesn't exist");
|
||||
}
|
||||
return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), context.getMapperService(), documentMapper);
|
||||
}
|
||||
|
||||
private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryShardContext context, SearchContext searchContext, String nestedPath) throws Exception {
|
||||
ObjectMapper objectMapper = searchContext.getObjectMapper(nestedPath);
|
||||
if (objectMapper == null) {
|
||||
throw new IllegalArgumentException("path [" + nestedPath +"] doesn't exist");
|
||||
}
|
||||
if (objectMapper.nested().isNested() == false) {
|
||||
throw new IllegalArgumentException("path [" + nestedPath +"] isn't nested");
|
||||
}
|
||||
ObjectMapper parentObjectMapper = context.nestedScope().nextLevel(objectMapper);
|
||||
ParseResult parseResult = parseSubSearchContext(searchContext, context, parser);
|
||||
context.nestedScope().previousLevel();
|
||||
|
||||
return new InnerHitsContext.NestedInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), parentObjectMapper, objectMapper);
|
||||
}
|
||||
|
||||
private ParseResult parseSubSearchContext(SearchContext searchContext, QueryShardContext context, XContentParser parser) throws Exception {
|
||||
ParsedQuery query = null;
|
||||
Map<String, InnerHitsContext.BaseInnerHits> childInnerHits = null;
|
||||
SubSearchContext subSearchContext = new SubSearchContext(searchContext);
|
||||
String fieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("query".equals(fieldName)) {
|
||||
Query q = context.parseInnerQuery();
|
||||
query = new ParsedQuery(q, context.copyNamedQueries());
|
||||
} else if ("inner_hits".equals(fieldName)) {
|
||||
childInnerHits = parseInnerHits(parser, context, searchContext);
|
||||
} else {
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
}
|
||||
} else {
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
}
|
||||
}
|
||||
|
||||
if (query == null) {
|
||||
query = ParsedQuery.parsedMatchAllQuery();
|
||||
}
|
||||
return new ParseResult(subSearchContext, query, childInnerHits);
|
||||
}
|
||||
|
||||
private static final class ParseResult {
|
||||
|
||||
private final SubSearchContext context;
|
||||
private final ParsedQuery query;
|
||||
private final Map<String, InnerHitsContext.BaseInnerHits> childInnerHits;
|
||||
|
||||
private ParseResult(SubSearchContext context, ParsedQuery query, Map<String, InnerHitsContext.BaseInnerHits> childInnerHits) {
|
||||
this.context = context;
|
||||
this.query = query;
|
||||
this.childInnerHits = childInnerHits;
|
||||
}
|
||||
|
||||
public SubSearchContext context() {
|
||||
return context;
|
||||
}
|
||||
|
||||
public ParsedQuery query() {
|
||||
return query;
|
||||
}
|
||||
|
||||
public Map<String, InnerHitsContext.BaseInnerHits> childInnerHits() {
|
||||
return childInnerHits;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.fetch.innerhits;
|
||||
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
|
||||
public class InnerHitsSubSearchContext {
|
||||
private final String name;
|
||||
private final SubSearchContext subSearchContext;
|
||||
|
||||
public InnerHitsSubSearchContext(String name, SubSearchContext subSearchContext) {
|
||||
this.name = name;
|
||||
this.subSearchContext = subSearchContext;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public SubSearchContext getSubSearchContext() {
|
||||
return subSearchContext;
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.highlight;
|
|||
|
||||
import org.apache.lucene.search.highlight.SimpleFragmenter;
|
||||
import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -42,7 +43,7 @@ import java.util.Objects;
|
|||
* This abstract class holds parameters shared by {@link HighlightBuilder} and {@link HighlightBuilder.Field}
|
||||
* and provides the common setters, equality, hashCode calculation and common serialization
|
||||
*/
|
||||
public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterBuilder<?>> {
|
||||
public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterBuilder<?>> extends ToXContentToBytes {
|
||||
|
||||
public static final ParseField PRE_TAGS_FIELD = new ParseField("pre_tags");
|
||||
public static final ParseField POST_TAGS_FIELD = new ParseField("post_tags");
|
||||
|
@ -363,7 +364,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
|
|||
* @return the value set by {@link #phraseLimit(Integer)}
|
||||
*/
|
||||
public Integer phraseLimit() {
|
||||
return this.noMatchSize;
|
||||
return this.phraseLimit;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -382,6 +383,16 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
|
|||
return this.forceSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
innerXContent(builder);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected abstract void innerXContent(XContentBuilder builder) throws IOException;
|
||||
|
||||
void commonOptionsToXContent(XContentBuilder builder) throws IOException {
|
||||
if (preTags != null) {
|
||||
builder.array(PRE_TAGS_FIELD.getPreferredName(), preTags);
|
||||
|
|
|
@ -21,21 +21,17 @@ package org.elasticsearch.search.highlight;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions;
|
||||
import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions.Builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -54,7 +50,7 @@ import java.util.Set;
|
|||
*
|
||||
* @see org.elasticsearch.search.builder.SearchSourceBuilder#highlight()
|
||||
*/
|
||||
public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilder> implements Writeable<HighlightBuilder>, ToXContent {
|
||||
public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilder> implements Writeable<HighlightBuilder> {
|
||||
|
||||
public static final HighlightBuilder PROTOTYPE = new HighlightBuilder();
|
||||
|
||||
|
@ -92,16 +88,15 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
public static final String[] DEFAULT_STYLED_POST_TAGS = {"</em>"};
|
||||
|
||||
/**
|
||||
* a {@link FieldOptions.Builder} with default settings
|
||||
* a {@link FieldOptions} with default settings
|
||||
*/
|
||||
public final static Builder defaultFieldOptions() {
|
||||
return new SearchContextHighlight.FieldOptions.Builder()
|
||||
.preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED).highlightFilter(DEFAULT_HIGHLIGHT_FILTER)
|
||||
.requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH).forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE).numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS)
|
||||
.encoder(DEFAULT_ENCODER).boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN)
|
||||
.boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS)
|
||||
.noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT);
|
||||
}
|
||||
final static FieldOptions defaultOptions = new SearchContextHighlight.FieldOptions.Builder()
|
||||
.preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED)
|
||||
.highlightFilter(DEFAULT_HIGHLIGHT_FILTER).requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH)
|
||||
.forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE)
|
||||
.numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS).encoder(DEFAULT_ENCODER)
|
||||
.boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN).boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS)
|
||||
.noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT).build();
|
||||
|
||||
private final List<Field> fields = new ArrayList<>();
|
||||
|
||||
|
@ -222,14 +217,6 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
return this.useExplicitFieldOrder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
innerXContent(builder);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* parse options only present in top level highlight builder (`tags_schema`, `encoder` and nested `fields`)
|
||||
*/
|
||||
|
@ -279,7 +266,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
transferOptions(this, globalOptionsBuilder, context);
|
||||
|
||||
// overwrite unset global options by default values
|
||||
globalOptionsBuilder.merge(defaultFieldOptions().build());
|
||||
globalOptionsBuilder.merge(defaultOptions);
|
||||
|
||||
// create field options
|
||||
Collection<org.elasticsearch.search.highlight.SearchContextHighlight.Field> fieldOptions = new ArrayList<>();
|
||||
|
@ -292,7 +279,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
fieldOptionsBuilder.matchedFields(matchedFields);
|
||||
}
|
||||
transferOptions(field, fieldOptionsBuilder, context);
|
||||
fieldOptions.add(new SearchContextHighlight.Field(field.name(), fieldOptionsBuilder.merge(globalOptionsBuilder.build()).build()));
|
||||
fieldOptions.add(new SearchContextHighlight.Field(field.name(), fieldOptionsBuilder
|
||||
.merge(globalOptionsBuilder.build()).build()));
|
||||
}
|
||||
return new SearchContextHighlight(fieldOptions);
|
||||
}
|
||||
|
@ -306,7 +294,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
* @throws IOException on errors parsing any optional nested highlight query
|
||||
*/
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private static void transferOptions(AbstractHighlighterBuilder highlighterBuilder, SearchContextHighlight.FieldOptions.Builder targetOptionsBuilder, QueryShardContext context) throws IOException {
|
||||
private static void transferOptions(AbstractHighlighterBuilder highlighterBuilder,
|
||||
SearchContextHighlight.FieldOptions.Builder targetOptionsBuilder, QueryShardContext context) throws IOException {
|
||||
if (highlighterBuilder.preTags != null) {
|
||||
targetOptionsBuilder.preTags(highlighterBuilder.preTags);
|
||||
}
|
||||
|
@ -357,7 +346,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
}
|
||||
}
|
||||
|
||||
private static Character[] convertCharArray(char[] array) {
|
||||
static Character[] convertCharArray(char[] array) {
|
||||
if (array == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -368,6 +357,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
return charArray;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void innerXContent(XContentBuilder builder) throws IOException {
|
||||
// first write common options
|
||||
commonOptionsToXContent(builder);
|
||||
|
@ -398,18 +388,6 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.prettyPrint();
|
||||
toXContent(builder, EMPTY_PARAMS);
|
||||
return builder.string();
|
||||
} catch (Exception e) {
|
||||
return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}";
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected HighlightBuilder createInstance(XContentParser parser) {
|
||||
return new HighlightBuilder();
|
||||
|
@ -483,6 +461,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void innerXContent(XContentBuilder builder) throws IOException {
|
||||
builder.startObject(name);
|
||||
// write common options
|
||||
|
@ -525,7 +504,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
String fieldname = parser.currentName();
|
||||
return new Field(fieldname);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unknown token type [{}], expected field name", parser.currentToken());
|
||||
throw new ParsingException(parser.getTokenLocation(), "unknown token type [{}], expected field name",
|
||||
parser.currentToken());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -39,15 +39,11 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
||||
private static final List<String> STANDARD_HIGHLIGHTERS_BY_PRECEDENCE = Arrays.asList("fvh", "postings", "plain");
|
||||
private static final Map<String, ? extends SearchParseElement> PARSE_ELEMENTS = singletonMap("highlight",
|
||||
new HighlighterParseElement());
|
||||
|
||||
private final Highlighters highlighters;
|
||||
|
||||
|
@ -57,9 +53,13 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
|||
this.highlighters = highlighters;
|
||||
}
|
||||
|
||||
/**
|
||||
* highlighters do not have a parse element, they use
|
||||
* {@link HighlightBuilder#fromXContent(org.elasticsearch.index.query.QueryParseContext)} for parsing instead.
|
||||
*/
|
||||
@Override
|
||||
public Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return PARSE_ELEMENTS;
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,246 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.highlight;
|
||||
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* highlight : {
|
||||
* tags_schema : "styled",
|
||||
* pre_tags : ["tag1", "tag2"],
|
||||
* post_tags : ["tag1", "tag2"],
|
||||
* order : "score",
|
||||
* highlight_filter : true,
|
||||
* fields : {
|
||||
* field1 : { },
|
||||
* field2 : { fragment_size : 100, number_of_fragments : 2 },
|
||||
* field3 : { number_of_fragments : 5, order : "simple", tags_schema : "styled" },
|
||||
* field4 : { number_of_fragments: 0, pre_tags : ["openingTagA", "openingTagB"], post_tags : ["closingTag"] }
|
||||
* }
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public class HighlighterParseElement implements SearchParseElement {
|
||||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
try {
|
||||
context.highlight(parse(parser, context.getQueryShardContext()));
|
||||
} catch (IllegalArgumentException ex) {
|
||||
throw new SearchParseException(context, "Error while trying to parse Highlighter element in request", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
|
||||
public SearchContextHighlight parse(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String topLevelFieldName = null;
|
||||
final List<Tuple<String, SearchContextHighlight.FieldOptions.Builder>> fieldsOptions = new ArrayList<>();
|
||||
|
||||
final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = HighlightBuilder.defaultFieldOptions();
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
topLevelFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if ("pre_tags".equals(topLevelFieldName) || "preTags".equals(topLevelFieldName)) {
|
||||
List<String> preTagsList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
preTagsList.add(parser.text());
|
||||
}
|
||||
globalOptionsBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()]));
|
||||
} else if ("post_tags".equals(topLevelFieldName) || "postTags".equals(topLevelFieldName)) {
|
||||
List<String> postTagsList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
postTagsList.add(parser.text());
|
||||
}
|
||||
globalOptionsBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()]));
|
||||
} else if ("fields".equals(topLevelFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String highlightFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
if (highlightFieldName != null) {
|
||||
throw new IllegalArgumentException("If highlighter fields is an array it must contain objects containing a single field");
|
||||
}
|
||||
highlightFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("If highlighter fields is an array it must contain objects containing a single field");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("order".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.scoreOrdered("score".equals(parser.text()));
|
||||
} else if ("tags_schema".equals(topLevelFieldName) || "tagsSchema".equals(topLevelFieldName)) {
|
||||
String schema = parser.text();
|
||||
if ("styled".equals(schema)) {
|
||||
globalOptionsBuilder.preTags(HighlightBuilder.DEFAULT_STYLED_PRE_TAG);
|
||||
globalOptionsBuilder.postTags(HighlightBuilder.DEFAULT_STYLED_POST_TAGS);
|
||||
}
|
||||
} else if ("highlight_filter".equals(topLevelFieldName) || "highlightFilter".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.highlightFilter(parser.booleanValue());
|
||||
} else if ("fragment_size".equals(topLevelFieldName) || "fragmentSize".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.fragmentCharSize(parser.intValue());
|
||||
} else if ("number_of_fragments".equals(topLevelFieldName) || "numberOfFragments".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.numberOfFragments(parser.intValue());
|
||||
} else if ("encoder".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.encoder(parser.text());
|
||||
} else if ("require_field_match".equals(topLevelFieldName) || "requireFieldMatch".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.requireFieldMatch(parser.booleanValue());
|
||||
} else if ("boundary_max_scan".equals(topLevelFieldName) || "boundaryMaxScan".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.boundaryMaxScan(parser.intValue());
|
||||
} else if ("boundary_chars".equals(topLevelFieldName) || "boundaryChars".equals(topLevelFieldName)) {
|
||||
char[] charsArr = parser.text().toCharArray();
|
||||
Character[] globalBoundaryChars = new Character[charsArr.length];
|
||||
for (int i = 0; i < charsArr.length; i++) {
|
||||
globalBoundaryChars[i] = charsArr[i];
|
||||
}
|
||||
globalOptionsBuilder.boundaryChars(globalBoundaryChars);
|
||||
} else if ("type".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.highlighterType(parser.text());
|
||||
} else if ("fragmenter".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.fragmenter(parser.text());
|
||||
} else if ("no_match_size".equals(topLevelFieldName) || "noMatchSize".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.noMatchSize(parser.intValue());
|
||||
} else if ("force_source".equals(topLevelFieldName) || "forceSource".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.forceSource(parser.booleanValue());
|
||||
} else if ("phrase_limit".equals(topLevelFieldName) || "phraseLimit".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.phraseLimit(parser.intValue());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT && "options".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.options(parser.map());
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("fields".equals(topLevelFieldName)) {
|
||||
String highlightFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
highlightFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext)));
|
||||
}
|
||||
}
|
||||
} else if ("highlight_query".equals(topLevelFieldName) || "highlightQuery".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final SearchContextHighlight.FieldOptions globalOptions = globalOptionsBuilder.build();
|
||||
if (globalOptions.preTags() != null && globalOptions.postTags() == null) {
|
||||
throw new IllegalArgumentException("Highlighter global preTags are set, but global postTags are not set");
|
||||
}
|
||||
|
||||
final List<SearchContextHighlight.Field> fields = new ArrayList<>();
|
||||
// now, go over and fill all fieldsOptions with default values from the global state
|
||||
for (final Tuple<String, SearchContextHighlight.FieldOptions.Builder> tuple : fieldsOptions) {
|
||||
fields.add(new SearchContextHighlight.Field(tuple.v1(), tuple.v2().merge(globalOptions).build()));
|
||||
}
|
||||
return new SearchContextHighlight(fields);
|
||||
}
|
||||
|
||||
private static SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
XContentParser.Token token;
|
||||
|
||||
final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder();
|
||||
String fieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if ("pre_tags".equals(fieldName) || "preTags".equals(fieldName)) {
|
||||
List<String> preTagsList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
preTagsList.add(parser.text());
|
||||
}
|
||||
fieldOptionsBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()]));
|
||||
} else if ("post_tags".equals(fieldName) || "postTags".equals(fieldName)) {
|
||||
List<String> postTagsList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
postTagsList.add(parser.text());
|
||||
}
|
||||
fieldOptionsBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()]));
|
||||
} else if ("matched_fields".equals(fieldName) || "matchedFields".equals(fieldName)) {
|
||||
Set<String> matchedFields = new HashSet<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
matchedFields.add(parser.text());
|
||||
}
|
||||
fieldOptionsBuilder.matchedFields(matchedFields);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("fragment_size".equals(fieldName) || "fragmentSize".equals(fieldName)) {
|
||||
fieldOptionsBuilder.fragmentCharSize(parser.intValue());
|
||||
} else if ("number_of_fragments".equals(fieldName) || "numberOfFragments".equals(fieldName)) {
|
||||
fieldOptionsBuilder.numberOfFragments(parser.intValue());
|
||||
} else if ("fragment_offset".equals(fieldName) || "fragmentOffset".equals(fieldName)) {
|
||||
fieldOptionsBuilder.fragmentOffset(parser.intValue());
|
||||
} else if ("highlight_filter".equals(fieldName) || "highlightFilter".equals(fieldName)) {
|
||||
fieldOptionsBuilder.highlightFilter(parser.booleanValue());
|
||||
} else if ("order".equals(fieldName)) {
|
||||
fieldOptionsBuilder.scoreOrdered("score".equals(parser.text()));
|
||||
} else if ("require_field_match".equals(fieldName) || "requireFieldMatch".equals(fieldName)) {
|
||||
fieldOptionsBuilder.requireFieldMatch(parser.booleanValue());
|
||||
} else if ("boundary_max_scan".equals(fieldName) || "boundaryMaxScan".equals(fieldName)) {
|
||||
fieldOptionsBuilder.boundaryMaxScan(parser.intValue());
|
||||
} else if ("boundary_chars".equals(fieldName) || "boundaryChars".equals(fieldName)) {
|
||||
char[] charsArr = parser.text().toCharArray();
|
||||
Character[] boundaryChars = new Character[charsArr.length];
|
||||
for (int i = 0; i < charsArr.length; i++) {
|
||||
boundaryChars[i] = charsArr[i];
|
||||
}
|
||||
fieldOptionsBuilder.boundaryChars(boundaryChars);
|
||||
} else if ("type".equals(fieldName)) {
|
||||
fieldOptionsBuilder.highlighterType(parser.text());
|
||||
} else if ("fragmenter".equals(fieldName)) {
|
||||
fieldOptionsBuilder.fragmenter(parser.text());
|
||||
} else if ("no_match_size".equals(fieldName) || "noMatchSize".equals(fieldName)) {
|
||||
fieldOptionsBuilder.noMatchSize(parser.intValue());
|
||||
} else if ("force_source".equals(fieldName) || "forceSource".equals(fieldName)) {
|
||||
fieldOptionsBuilder.forceSource(parser.booleanValue());
|
||||
} else if ("phrase_limit".equals(fieldName) || "phraseLimit".equals(fieldName)) {
|
||||
fieldOptionsBuilder.phraseLimit(parser.intValue());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("highlight_query".equals(fieldName) || "highlightQuery".equals(fieldName)) {
|
||||
fieldOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query());
|
||||
} else if ("options".equals(fieldName)) {
|
||||
fieldOptionsBuilder.options(parser.map());
|
||||
}
|
||||
}
|
||||
}
|
||||
return fieldOptionsBuilder;
|
||||
}
|
||||
}
|
|
@ -354,7 +354,6 @@ public class SearchContextHighlight {
|
|||
if (fieldOptions.phraseLimit == -1) {
|
||||
fieldOptions.phraseLimit = globalOptions.phraseLimit;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,6 +49,8 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
private int from;
|
||||
private int size = DEFAULT_SIZE;
|
||||
private Sort sort;
|
||||
private ParsedQuery parsedQuery;
|
||||
private Query query;
|
||||
|
||||
private final FetchSearchResult fetchSearchResult;
|
||||
private final QuerySearchResult querySearchResult;
|
||||
|
@ -185,6 +187,25 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
return sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext parsedQuery(ParsedQuery parsedQuery) {
|
||||
this.parsedQuery = parsedQuery;
|
||||
if (parsedQuery != null) {
|
||||
this.query = parsedQuery.query();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ParsedQuery parsedQuery() {
|
||||
return parsedQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query query() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext trackScores(boolean trackScores) {
|
||||
this.trackScores = trackScores;
|
||||
|
|
|
@ -44,7 +44,7 @@ public class MinDocQueryTests extends ESTestCase {
|
|||
final int numDocs = randomIntBetween(10, 200);
|
||||
final Document doc = new Document();
|
||||
final Directory dir = newDirectory();
|
||||
final RandomIndexWriter w = new RandomIndexWriter(getRandom(), dir);
|
||||
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
|
|
@ -339,9 +339,9 @@ public class ESExceptionTests extends ESTestCase {
|
|||
}
|
||||
assertArrayEquals(e.getStackTrace(), ex.getStackTrace());
|
||||
assertTrue(e.getStackTrace().length > 1);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(getRandom()), t);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(getRandom()), ex);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(getRandom()), e);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(random()), t);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(random()), ex);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(random()), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -203,7 +203,7 @@ public class BulkProcessorIT extends ESIntegTestCase {
|
|||
//let's make sure that the bulk action limit trips, one single execution will index all the documents
|
||||
.setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs)
|
||||
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(randomIntBetween(1, 10),
|
||||
RandomPicks.randomFrom(getRandom(), ByteSizeUnit.values())))
|
||||
RandomPicks.randomFrom(random(), ByteSizeUnit.values())))
|
||||
.build();
|
||||
|
||||
MultiGetRequestBuilder multiGetRequestBuilder = indexDocs(client(), processor, numDocs);
|
||||
|
|
|
@ -26,7 +26,7 @@ import static org.apache.lucene.util.TestUtil.randomSimpleString;
|
|||
|
||||
public class BulkShardRequestTests extends ESTestCase {
|
||||
public void testToString() {
|
||||
String index = randomSimpleString(getRandom(), 10);
|
||||
String index = randomSimpleString(random(), 10);
|
||||
int count = between(1, 100);
|
||||
BulkShardRequest r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), false, new BulkItemRequest[count]);
|
||||
assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests", r.toString());
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class TransportSearchIT extends ESIntegTestCase {
|
||||
|
||||
public void testShardCountLimit() throws Exception {
|
||||
try {
|
||||
final int numPrimaries1 = randomIntBetween(2, 10);
|
||||
final int numPrimaries2 = randomIntBetween(1, 10);
|
||||
assertAcked(prepareCreate("test1")
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries1));
|
||||
assertAcked(prepareCreate("test2")
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries2));
|
||||
ensureYellow("test1", "test2");
|
||||
|
||||
// no exception
|
||||
client().prepareSearch("test1").get();
|
||||
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings()
|
||||
.setTransientSettings(Collections.singletonMap(
|
||||
TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1 - 1)));
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("test1").get());
|
||||
assertThat(e.getMessage(), containsString("Trying to query " + numPrimaries1
|
||||
+ " shards, which is over the limit of " + (numPrimaries1 - 1)));
|
||||
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings()
|
||||
.setTransientSettings(Collections.singletonMap(
|
||||
TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1)));
|
||||
|
||||
// no exception
|
||||
client().prepareSearch("test1").get();
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("test1", "test2").get());
|
||||
assertThat(e.getMessage(), containsString("Trying to query " + (numPrimaries1 + numPrimaries2)
|
||||
+ " shards, which is over the limit of " + numPrimaries1));
|
||||
|
||||
} finally {
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings()
|
||||
.setTransientSettings(Collections.singletonMap(
|
||||
TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), null)));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -163,7 +163,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
public void testSearchingFilteringAliasesSingleIndex() throws Exception {
|
||||
logger.info("--> creating index [test]");
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "id", "type=text", "name", "type=text"));
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "id", "type=text", "name", "type=text,fielddata=true"));
|
||||
|
||||
ensureGreen();
|
||||
|
||||
|
|
|
@ -70,7 +70,7 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
|
|||
// cause differences when the random string generated contains these complex characters. To mitigate
|
||||
// the problem, we skip any strings containing these characters.
|
||||
// TODO: only skip strings containing complex chars when comparing against ES <= 1.3.x
|
||||
input = TestUtil.randomAnalysisString(getRandom(), 100, false);
|
||||
input = TestUtil.randomAnalysisString(random(), 100, false);
|
||||
matcher = complexUnicodeChars.matcher(input);
|
||||
} while (matcher.find());
|
||||
|
||||
|
@ -104,7 +104,7 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
|
|||
}
|
||||
|
||||
private String randomAnalyzer() {
|
||||
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values());
|
||||
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values());
|
||||
return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
|
||||
|
|
|
@ -318,7 +318,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
|
||||
String[] indexForDoc = new String[docs.length];
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
docs[i] = client().prepareIndex(indexForDoc[i] = RandomPicks.randomFrom(getRandom(), indices), "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "num_int", randomInt(), "num_double", randomDouble());
|
||||
docs[i] = client().prepareIndex(indexForDoc[i] = RandomPicks.randomFrom(random(), indices), "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "num_int", randomInt(), "num_double", randomDouble());
|
||||
}
|
||||
indexRandom(true, docs);
|
||||
for (String index : indices) {
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TransportClientNodesServiceTests extends ESTestCase {
|
|||
|
||||
TestIteration() {
|
||||
threadPool = new ThreadPool("transport-client-nodes-service-tests");
|
||||
transport = new FailAndRetryMockTransport<TestResponse>(getRandom()) {
|
||||
transport = new FailAndRetryMockTransport<TestResponse>(random()) {
|
||||
@Override
|
||||
public List<String> getLocalAddresses() {
|
||||
return Collections.emptyList();
|
||||
|
|
|
@ -492,7 +492,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
|
|||
public IndexMetaData randomCreate(String name) {
|
||||
IndexMetaData.Builder builder = IndexMetaData.builder(name);
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
setRandomIndexSettings(getRandom(), settingsBuilder);
|
||||
setRandomIndexSettings(random(), settingsBuilder);
|
||||
settingsBuilder.put(randomSettings(Settings.EMPTY)).put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion(random()));
|
||||
builder.settings(settingsBuilder);
|
||||
builder.numberOfShards(randomIntBetween(1, 10)).numberOfReplicas(randomInt(10));
|
||||
|
@ -672,6 +672,6 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
|
|||
* Generates a random name that starts with the given prefix
|
||||
*/
|
||||
private String randomName(String prefix) {
|
||||
return prefix + Strings.randomBase64UUID(getRandom());
|
||||
return prefix + Strings.randomBase64UUID(random());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ public class AwarenessAllocationIT extends ESIntegTestCase {
|
|||
|
||||
|
||||
logger.info("--> starting 2 nodes on the same rack");
|
||||
internalCluster().startNodesAsync(2, Settings.settingsBuilder().put(commonSettings).put("node.rack_id", "rack_1").build()).get();
|
||||
internalCluster().startNodesAsync(2, Settings.settingsBuilder().put(commonSettings).put("node.attr.rack_id", "rack_1").build()).get();
|
||||
|
||||
createIndex("test1");
|
||||
createIndex("test2");
|
||||
|
@ -74,7 +74,7 @@ public class AwarenessAllocationIT extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
logger.info("--> starting 1 node on a different rack");
|
||||
final String node3 = internalCluster().startNode(Settings.settingsBuilder().put(commonSettings).put("node.rack_id", "rack_2").build());
|
||||
final String node3 = internalCluster().startNode(Settings.settingsBuilder().put(commonSettings).put("node.attr.rack_id", "rack_2").build());
|
||||
|
||||
// On slow machines the initial relocation might be delayed
|
||||
assertThat(awaitBusy(
|
||||
|
@ -113,10 +113,10 @@ public class AwarenessAllocationIT extends ESIntegTestCase {
|
|||
|
||||
logger.info("--> starting 4 nodes on different zones");
|
||||
List<String> nodes = internalCluster().startNodesAsync(
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.zone", "a").build(),
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.zone", "b").build(),
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.zone", "b").build(),
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.zone", "a").build()
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.attr.zone", "a").build(),
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.attr.zone", "b").build(),
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.attr.zone", "b").build(),
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.attr.zone", "a").build()
|
||||
).get();
|
||||
String A_0 = nodes.get(0);
|
||||
String B_0 = nodes.get(1);
|
||||
|
@ -159,8 +159,8 @@ public class AwarenessAllocationIT extends ESIntegTestCase {
|
|||
|
||||
logger.info("--> starting 2 nodes on zones 'a' & 'b'");
|
||||
List<String> nodes = internalCluster().startNodesAsync(
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.zone", "a").build(),
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.zone", "b").build()
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.attr.zone", "a").build(),
|
||||
Settings.settingsBuilder().put(commonSettings).put("node.attr.zone", "b").build()
|
||||
).get();
|
||||
String A_0 = nodes.get(0);
|
||||
String B_0 = nodes.get(1);
|
||||
|
@ -183,7 +183,7 @@ public class AwarenessAllocationIT extends ESIntegTestCase {
|
|||
assertThat(counts.get(B_0), equalTo(5));
|
||||
logger.info("--> starting another node in zone 'b'");
|
||||
|
||||
String B_1 = internalCluster().startNode(Settings.settingsBuilder().put(commonSettings).put("node.zone", "b").build());
|
||||
String B_1 = internalCluster().startNode(Settings.settingsBuilder().put(commonSettings).put("node.attr.zone", "b").build());
|
||||
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("3").execute().actionGet();
|
||||
assertThat(health.isTimedOut(), equalTo(false));
|
||||
client().admin().cluster().prepareReroute().get();
|
||||
|
|
|
@ -33,23 +33,12 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
|||
|
||||
public class DiscoveryNodeServiceTests extends ESTestCase {
|
||||
|
||||
public void testClientNodeSettingIsProhibited() {
|
||||
Settings settings = Settings.builder().put("node.client", randomBoolean()).build();
|
||||
try {
|
||||
new DiscoveryNodeService(settings, Version.CURRENT).buildLocalNode(DummyTransportAddress.INSTANCE);
|
||||
fail("build attributes should have failed");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("node.client setting is no longer supported, use node.master, " +
|
||||
"node.data and node.ingest explicitly instead"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testBuildLocalNode() {
|
||||
Map<String, String> expectedAttributes = new HashMap<>();
|
||||
int numCustomSettings = randomIntBetween(0, 5);
|
||||
Settings.Builder builder = Settings.builder();
|
||||
for (int i = 0; i < numCustomSettings; i++) {
|
||||
builder.put("node.attr" + i, "value" + i);
|
||||
builder.put("node.attr.attr" + i, "value" + i);
|
||||
expectedAttributes.put("attr" + i, "value" + i);
|
||||
}
|
||||
Set<DiscoveryNode.Role> selectedRoles = new HashSet<>();
|
||||
|
@ -76,7 +65,7 @@ public class DiscoveryNodeServiceTests extends ESTestCase {
|
|||
int numCustomSettings = randomIntBetween(0, 5);
|
||||
Settings.Builder builder = Settings.builder();
|
||||
for (int i = 0; i < numCustomSettings; i++) {
|
||||
builder.put("node.attr" + i, "value" + i);
|
||||
builder.put("node.attr.attr" + i, "value" + i);
|
||||
expectedAttributes.put("attr" + i, "value" + i);
|
||||
}
|
||||
DiscoveryNodeService discoveryNodeService = new DiscoveryNodeService(builder.build(), Version.CURRENT);
|
||||
|
|
|
@ -72,7 +72,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
}
|
||||
|
||||
public void testSerialization() throws Exception {
|
||||
UnassignedInfo meta = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), UnassignedInfo.Reason.values()), randomBoolean() ? randomAsciiOfLength(4) : null);
|
||||
UnassignedInfo meta = new UnassignedInfo(RandomPicks.randomFrom(random(), UnassignedInfo.Reason.values()), randomBoolean() ? randomAsciiOfLength(4) : null);
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
meta.writeTo(out);
|
||||
out.close();
|
||||
|
@ -273,7 +273,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
public void testUnassignedDelayOnlyNodeLeftNonNodeLeftReason() throws Exception {
|
||||
EnumSet<UnassignedInfo.Reason> reasons = EnumSet.allOf(UnassignedInfo.Reason.class);
|
||||
reasons.remove(UnassignedInfo.Reason.NODE_LEFT);
|
||||
UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), reasons), null);
|
||||
UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(random(), reasons), null);
|
||||
long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay
|
||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY);
|
||||
assertThat(delay, equalTo(0L));
|
||||
|
|
|
@ -313,7 +313,7 @@ public class BalanceConfigurationTests extends ESAllocationTestCase {
|
|||
public void testNoRebalanceOnPrimaryOverload() {
|
||||
Settings.Builder settings = settingsBuilder();
|
||||
AllocationService strategy = new AllocationService(settings.build(), randomAllocationDeciders(settings.build(),
|
||||
new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()),
|
||||
new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), random()),
|
||||
NoopGatewayAllocator.INSTANCE, new ShardsAllocator() {
|
||||
|
||||
public Map<DiscoveryNode, Float> weighShard(RoutingAllocation allocation, ShardRouting shard) {
|
||||
|
|
|
@ -56,7 +56,7 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase {
|
|||
* already allocated on a node and balances the cluster to gain optimal
|
||||
* balance.*/
|
||||
public void testRandomDecisions() {
|
||||
RandomAllocationDecider randomAllocationDecider = new RandomAllocationDecider(getRandom());
|
||||
RandomAllocationDecider randomAllocationDecider = new RandomAllocationDecider(random());
|
||||
AllocationService strategy = new AllocationService(settingsBuilder().build(), new AllocationDeciders(Settings.EMPTY,
|
||||
new HashSet<>(Arrays.asList(new SameShardAllocationDecider(Settings.EMPTY), new ReplicaAfterPrimaryActiveAllocationDecider(Settings.EMPTY),
|
||||
randomAllocationDecider))), NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE);
|
||||
|
|
|
@ -155,14 +155,14 @@ public class EnableAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
public void testEnableClusterBalance() {
|
||||
final boolean useClusterSetting = randomBoolean();
|
||||
final Rebalance allowedOnes = RandomPicks.randomFrom(getRandom(), EnumSet.of(Rebalance.PRIMARIES, Rebalance.REPLICAS, Rebalance.ALL));
|
||||
final Rebalance allowedOnes = RandomPicks.randomFrom(random(), EnumSet.of(Rebalance.PRIMARIES, Rebalance.REPLICAS, Rebalance.ALL));
|
||||
Settings build = settingsBuilder()
|
||||
.put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings
|
||||
.put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(random(), Rebalance.values())) // index settings override cluster settings
|
||||
.put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3)
|
||||
.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10)
|
||||
.build();
|
||||
ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
||||
AllocationService strategy = createAllocationService(build, clusterSettings, getRandom());
|
||||
AllocationService strategy = createAllocationService(build, clusterSettings, random());
|
||||
Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE).build();
|
||||
|
||||
logger.info("Building initial routing table");
|
||||
|
@ -260,11 +260,11 @@ public class EnableAllocationTests extends ESAllocationTestCase {
|
|||
public void testEnableClusterBalanceNoReplicas() {
|
||||
final boolean useClusterSetting = randomBoolean();
|
||||
Settings build = settingsBuilder()
|
||||
.put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings
|
||||
.put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(random(), Rebalance.values())) // index settings override cluster settings
|
||||
.put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3)
|
||||
.build();
|
||||
ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
||||
AllocationService strategy = createAllocationService(build, clusterSettings, getRandom());
|
||||
AllocationService strategy = createAllocationService(build, clusterSettings, random());
|
||||
Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE).build();
|
||||
|
||||
logger.info("Building initial routing table");
|
||||
|
|
|
@ -46,9 +46,9 @@ public class ClusterSearchShardsIT extends ESIntegTestCase {
|
|||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
switch(nodeOrdinal) {
|
||||
case 1:
|
||||
return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("node.tag", "B").build();
|
||||
return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("node.attr.tag", "B").build();
|
||||
case 0:
|
||||
return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("node.tag", "A").build();
|
||||
return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("node.attr.tag", "A").build();
|
||||
}
|
||||
return super.nodeSettings(nodeOrdinal);
|
||||
}
|
||||
|
|
|
@ -30,10 +30,10 @@ public class BytesReferenceTests extends ESTestCase {
|
|||
final int len = randomIntBetween(0, randomBoolean() ? 10: 100000);
|
||||
final int offset1 = randomInt(5);
|
||||
final byte[] array1 = new byte[offset1 + len + randomInt(5)];
|
||||
getRandom().nextBytes(array1);
|
||||
random().nextBytes(array1);
|
||||
final int offset2 = randomInt(offset1);
|
||||
final byte[] array2 = Arrays.copyOfRange(array1, offset1 - offset2, array1.length);
|
||||
|
||||
|
||||
final BytesArray b1 = new BytesArray(array1, offset1, len);
|
||||
final BytesArray b2 = new BytesArray(array2, offset2, len);
|
||||
assertTrue(BytesReference.Helper.bytesEqual(b1, b2));
|
||||
|
|
|
@ -115,7 +115,7 @@ public class PagedBytesReferenceTests extends ESTestCase {
|
|||
|
||||
// buffer for bulk reads
|
||||
byte[] origBuf = new byte[length];
|
||||
getRandom().nextBytes(origBuf);
|
||||
random().nextBytes(origBuf);
|
||||
byte[] targetBuf = Arrays.copyOf(origBuf, origBuf.length);
|
||||
|
||||
// bulk-read 0 bytes: must not modify buffer
|
||||
|
@ -172,7 +172,7 @@ public class PagedBytesReferenceTests extends ESTestCase {
|
|||
byte[] pbrBytesWithOffset = Arrays.copyOfRange(pbr.toBytes(), offset, length);
|
||||
// randomized target buffer to ensure no stale slots
|
||||
byte[] targetBytes = new byte[pbrBytesWithOffset.length];
|
||||
getRandom().nextBytes(targetBytes);
|
||||
random().nextBytes(targetBytes);
|
||||
|
||||
// bulk-read all
|
||||
si.readFully(targetBytes);
|
||||
|
@ -574,7 +574,7 @@ public class PagedBytesReferenceTests extends ESTestCase {
|
|||
ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays);
|
||||
try {
|
||||
for (int i = 0; i < length; i++) {
|
||||
out.writeByte((byte) getRandom().nextInt(1 << 8));
|
||||
out.writeByte((byte) random().nextInt(1 << 8));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
fail("should not happen " + e.getMessage());
|
||||
|
|
|
@ -46,7 +46,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRandom() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
byte bytes[] = new byte[TestUtil.nextInt(r, 1, 100000)];
|
||||
r.nextBytes(bytes);
|
||||
|
@ -55,7 +55,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRandomThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -86,7 +86,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testLineDocs() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
LineFileDocs lineFileDocs = new LineFileDocs(r);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int numDocs = TestUtil.nextInt(r, 1, 200);
|
||||
|
@ -101,7 +101,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testLineDocsThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -138,7 +138,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsL() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int numLongs = TestUtil.nextInt(r, 1, 10000);
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
@ -161,7 +161,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsLThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -206,7 +206,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsI() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int numInts = TestUtil.nextInt(r, 1, 20000);
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
@ -225,7 +225,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsIThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -266,7 +266,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsS() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int numShorts = TestUtil.nextInt(r, 1, 40000);
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
@ -283,7 +283,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testMixed() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
LineFileDocs lineFileDocs = new LineFileDocs(r);
|
||||
for (int i = 0; i < 2; ++i) {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
@ -349,7 +349,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsSThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -396,8 +396,8 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
OutputStreamStreamOutput rawOs = new OutputStreamStreamOutput(bos);
|
||||
StreamOutput os = c.streamOutput(rawOs);
|
||||
|
||||
Random r = getRandom();
|
||||
int bufferSize = r.nextBoolean() ? 65535 : TestUtil.nextInt(getRandom(), 1, 70000);
|
||||
Random r = random();
|
||||
int bufferSize = r.nextBoolean() ? 65535 : TestUtil.nextInt(random(), 1, 70000);
|
||||
int prepadding = r.nextInt(70000);
|
||||
int postpadding = r.nextInt(70000);
|
||||
byte buffer[] = new byte[prepadding + bufferSize + postpadding];
|
||||
|
@ -417,7 +417,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
StreamInput in = c.streamInput(compressedIn);
|
||||
|
||||
// randomize constants again
|
||||
bufferSize = r.nextBoolean() ? 65535 : TestUtil.nextInt(getRandom(), 1, 70000);
|
||||
bufferSize = r.nextBoolean() ? 65535 : TestUtil.nextInt(random(), 1, 70000);
|
||||
prepadding = r.nextInt(70000);
|
||||
postpadding = r.nextInt(70000);
|
||||
buffer = new byte[prepadding + bufferSize + postpadding];
|
||||
|
|
|
@ -72,7 +72,7 @@ public abstract class AbstractCompressedXContentTestCase extends ESTestCase {
|
|||
Compressor defaultCompressor = CompressorFactory.defaultCompressor();
|
||||
try {
|
||||
CompressorFactory.setDefaultCompressor(compressor);
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
String string = TestUtil.randomUnicodeString(r, 10000);
|
||||
// hack to make it detected as YAML
|
||||
|
|
|
@ -72,7 +72,7 @@ public class EnvelopeBuilderTests extends AbstractShapeBuilderTestCase<EnvelopeB
|
|||
}
|
||||
|
||||
static EnvelopeBuilder createRandomShape() {
|
||||
Rectangle box = RandomShapeGenerator.xRandomRectangle(getRandom(), RandomShapeGenerator.xRandomPoint(getRandom()));
|
||||
Rectangle box = RandomShapeGenerator.xRandomRectangle(random(), RandomShapeGenerator.xRandomPoint(random()));
|
||||
EnvelopeBuilder envelope = new EnvelopeBuilder(new Coordinate(box.getMinX(), box.getMaxY()),
|
||||
new Coordinate(box.getMaxX(), box.getMinY()));
|
||||
return envelope;
|
||||
|
|
|
@ -100,7 +100,7 @@ public class GeometryCollectionBuilderTests extends AbstractShapeBuilderTestCase
|
|||
}
|
||||
mutation.shapes.set(shapePosition, shapeToChange);
|
||||
} else {
|
||||
mutation.shape(RandomShapeGenerator.createShape(getRandom()));
|
||||
mutation.shape(RandomShapeGenerator.createShape(random()));
|
||||
}
|
||||
return mutation;
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ public class LineStringBuilderTests extends AbstractShapeBuilderTestCase<LineStr
|
|||
}
|
||||
|
||||
static LineStringBuilder createRandomShape() {
|
||||
LineStringBuilder lsb = (LineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.LINESTRING);
|
||||
LineStringBuilder lsb = (LineStringBuilder) RandomShapeGenerator.createShape(random(), ShapeType.LINESTRING);
|
||||
if (randomBoolean()) {
|
||||
lsb.close();
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ public class MultiLineStringBuilderTests extends AbstractShapeBuilderTestCase<Mu
|
|||
}
|
||||
}
|
||||
} else {
|
||||
mutation.linestring((LineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.LINESTRING));
|
||||
mutation.linestring((LineStringBuilder) RandomShapeGenerator.createShape(random(), ShapeType.LINESTRING));
|
||||
}
|
||||
return mutation;
|
||||
}
|
||||
|
|
|
@ -74,6 +74,6 @@ public class MultiPointBuilderTests extends AbstractShapeBuilderTestCase<MultiPo
|
|||
}
|
||||
|
||||
static MultiPointBuilder createRandomShape() {
|
||||
return (MultiPointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTIPOINT);
|
||||
return (MultiPointBuilder) RandomShapeGenerator.createShape(random(), ShapeType.MULTIPOINT);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCase<Multi
|
|||
int polyToChange = randomInt(mutation.polygons().size() - 1);
|
||||
mutation.polygons().set(polyToChange, PolygonBuilderTests.mutatePolygonBuilder(mutation.polygons().get(polyToChange)));
|
||||
} else {
|
||||
mutation.polygon((PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON));
|
||||
mutation.polygon((PolygonBuilder) RandomShapeGenerator.createShape(random(), ShapeType.POLYGON));
|
||||
}
|
||||
}
|
||||
return mutation;
|
||||
|
@ -60,7 +60,7 @@ public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCase<Multi
|
|||
MultiPolygonBuilder mpb = new MultiPolygonBuilder(randomFrom(Orientation.values()));
|
||||
int polys = randomIntBetween(0, 10);
|
||||
for (int i = 0; i < polys; i++) {
|
||||
PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON);
|
||||
PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(random(), ShapeType.POLYGON);
|
||||
mpb.polygon(pgb);
|
||||
}
|
||||
return mpb;
|
||||
|
|
|
@ -42,7 +42,7 @@ public class PointBuilderTests extends AbstractShapeBuilderTestCase<PointBuilder
|
|||
}
|
||||
|
||||
static PointBuilder createRandomShape() {
|
||||
return (PointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POINT);
|
||||
return (PointBuilder) RandomShapeGenerator.createShape(random(), ShapeType.POINT);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ public class PolygonBuilderTests extends AbstractShapeBuilderTestCase<PolygonBui
|
|||
}
|
||||
|
||||
static PolygonBuilder createRandomShape() {
|
||||
PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON);
|
||||
PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(random(), ShapeType.POLYGON);
|
||||
if (randomBoolean()) {
|
||||
pgb = polyWithOposingOrientation(pgb);
|
||||
}
|
||||
|
|
|
@ -495,7 +495,7 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
// create & fill byte[] with randomized data
|
||||
protected byte[] randomizedByteArrayWithSize(int size) {
|
||||
byte[] data = new byte[size];
|
||||
getRandom().nextBytes(data);
|
||||
random().nextBytes(data);
|
||||
return data;
|
||||
}
|
||||
|
||||
|
|
|
@ -324,7 +324,7 @@ public class LuceneTests extends ESTestCase {
|
|||
|
||||
public void testCount() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(getRandom(), dir);
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
|
||||
try (DirectoryReader reader = w.getReader()) {
|
||||
// match_all does not match anything on an empty index
|
||||
|
|
|
@ -106,7 +106,7 @@ public abstract class AbstractRecyclerTestCase extends ESTestCase {
|
|||
Recycler<byte[]> r = newRecycler(limit);
|
||||
Recycler.V<byte[]> o = r.obtain();
|
||||
assertFresh(o.v());
|
||||
getRandom().nextBytes(o.v());
|
||||
random().nextBytes(o.v());
|
||||
o.close();
|
||||
o = r.obtain();
|
||||
assertRecycled(o.v());
|
||||
|
@ -166,7 +166,7 @@ public abstract class AbstractRecyclerTestCase extends ESTestCase {
|
|||
assertFresh(data);
|
||||
|
||||
// randomize & return to pool
|
||||
getRandom().nextBytes(data);
|
||||
random().nextBytes(data);
|
||||
o.close();
|
||||
|
||||
// verify that recycle() ran
|
||||
|
|
|
@ -31,7 +31,7 @@ public class RegexTests extends ESTestCase {
|
|||
"LITERAL", "COMMENTS", "UNICODE_CHAR_CLASS", "UNICODE_CHARACTER_CLASS"};
|
||||
int[] flags = new int[]{Pattern.CASE_INSENSITIVE, Pattern.MULTILINE, Pattern.DOTALL, Pattern.UNICODE_CASE, Pattern.CANON_EQ,
|
||||
Pattern.UNIX_LINES, Pattern.LITERAL, Pattern.COMMENTS, Regex.UNICODE_CHARACTER_CLASS};
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
int num = 10 + random.nextInt(100);
|
||||
for (int i = 0; i < num; i++) {
|
||||
int numFlags = random.nextInt(flags.length + 1);
|
||||
|
@ -63,4 +63,4 @@ public class RegexTests extends ESTestCase {
|
|||
assertTrue(Regex.simpleMatch("fff*******ddd", "fffabcddd"));
|
||||
assertFalse(Regex.simpleMatch("fff******ddd", "fffabcdd"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -235,7 +235,7 @@ public class BigArraysTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testByteArrayBulkGet() {
|
||||
final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
|
||||
getRandom().nextBytes(array1);
|
||||
random().nextBytes(array1);
|
||||
final ByteArray array2 = bigArrays.newByteArray(array1.length, randomBoolean());
|
||||
for (int i = 0; i < array1.length; ++i) {
|
||||
array2.set(i, array1[i]);
|
||||
|
@ -252,7 +252,7 @@ public class BigArraysTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testByteArrayBulkSet() {
|
||||
final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
|
||||
getRandom().nextBytes(array1);
|
||||
random().nextBytes(array1);
|
||||
final ByteArray array2 = bigArrays.newByteArray(array1.length, randomBoolean());
|
||||
for (int i = 0; i < array1.length; ) {
|
||||
final int len = Math.min(array1.length - i, randomBoolean() ? randomInt(10) : randomInt(3 * BigArrays.BYTE_PAGE_SIZE));
|
||||
|
@ -315,7 +315,7 @@ public class BigArraysTests extends ESSingleNodeTestCase {
|
|||
|
||||
// large arrays should be different
|
||||
final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
|
||||
getRandom().nextBytes(array1);
|
||||
random().nextBytes(array1);
|
||||
final int array1Hash = Arrays.hashCode(array1);
|
||||
final ByteArray array2 = byteArrayWithBytes(array1);
|
||||
final int array2Hash = bigArrays.hashCode(array2);
|
||||
|
|
|
@ -110,7 +110,7 @@ public class BytesRefHashTests extends ESSingleNodeTestCase {
|
|||
for (int i = 0; i < 797; i++) {
|
||||
String str;
|
||||
do {
|
||||
str = TestUtil.randomRealisticUnicodeString(getRandom(), 1000);
|
||||
str = TestUtil.randomRealisticUnicodeString(random(), 1000);
|
||||
} while (str.length() == 0);
|
||||
ref.copyChars(str);
|
||||
long count = hash.size();
|
||||
|
@ -142,7 +142,7 @@ public class BytesRefHashTests extends ESSingleNodeTestCase {
|
|||
for (int i = 0; i < 797; i++) {
|
||||
String str;
|
||||
do {
|
||||
str = TestUtil.randomRealisticUnicodeString(getRandom(), 1000);
|
||||
str = TestUtil.randomRealisticUnicodeString(random(), 1000);
|
||||
} while (str.length() == 0);
|
||||
ref.copyChars(str);
|
||||
long count = hash.size();
|
||||
|
@ -181,7 +181,7 @@ public class BytesRefHashTests extends ESSingleNodeTestCase {
|
|||
for (int i = 0; i < 797; i++) {
|
||||
String str;
|
||||
do {
|
||||
str = TestUtil.randomRealisticUnicodeString(getRandom(), 1000);
|
||||
str = TestUtil.randomRealisticUnicodeString(random(), 1000);
|
||||
} while (str.length() == 0);
|
||||
ref.copyChars(str);
|
||||
long count = hash.size();
|
||||
|
@ -216,7 +216,7 @@ public class BytesRefHashTests extends ESSingleNodeTestCase {
|
|||
for (int i = 0; i < 797; i++) {
|
||||
String str;
|
||||
do {
|
||||
str = TestUtil.randomRealisticUnicodeString(getRandom(), 1000);
|
||||
str = TestUtil.randomRealisticUnicodeString(random(), 1000);
|
||||
} while (str.length() == 0);
|
||||
ref.copyChars(str);
|
||||
long count = hash.size();
|
||||
|
|
|
@ -218,7 +218,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
|
||||
// Simulate a network issue between the unlucky node and elected master node in both directions.
|
||||
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(masterNode, unluckyNode, getRandom());
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(masterNode, unluckyNode, random());
|
||||
setDisruptionScheme(networkDisconnect);
|
||||
networkDisconnect.startDisrupting();
|
||||
|
||||
|
@ -562,7 +562,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
|
||||
String oldMasterNode = internalCluster().getMasterName();
|
||||
// a very long GC, but it's OK as we remove the disruption when it has had an effect
|
||||
SingleNodeDisruption masterNodeDisruption = new IntermittentLongGCDisruption(oldMasterNode, getRandom(), 100, 200, 30000, 60000);
|
||||
SingleNodeDisruption masterNodeDisruption = new IntermittentLongGCDisruption(oldMasterNode, random(), 100, 200, 30000, 60000);
|
||||
internalCluster().setDisruptionScheme(masterNodeDisruption);
|
||||
masterNodeDisruption.startDisrupting();
|
||||
|
||||
|
@ -609,7 +609,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
assertMaster(oldMasterNode, nodes);
|
||||
|
||||
// Simulating a painful gc by suspending all threads for a long time on the current elected master node.
|
||||
SingleNodeDisruption masterNodeDisruption = new LongGCDisruption(getRandom(), oldMasterNode);
|
||||
SingleNodeDisruption masterNodeDisruption = new LongGCDisruption(random(), oldMasterNode);
|
||||
|
||||
// Save the majority side
|
||||
final List<String> majoritySide = new ArrayList<>(nodes);
|
||||
|
@ -779,7 +779,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// Simulate a network issue between the unlucky node and elected master node in both directions.
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(masterNode, isolatedNode, getRandom());
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(masterNode, isolatedNode, random());
|
||||
setDisruptionScheme(networkDisconnect);
|
||||
networkDisconnect.startDisrupting();
|
||||
// Wait until elected master has removed that the unlucky node...
|
||||
|
@ -816,7 +816,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// Simulate a network issue between the unicast target node and the rest of the cluster
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(unicastTargetSide, restOfClusterSide, getRandom());
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(unicastTargetSide, restOfClusterSide, random());
|
||||
setDisruptionScheme(networkDisconnect);
|
||||
networkDisconnect.startDisrupting();
|
||||
// Wait until elected master has removed that the unlucky node...
|
||||
|
@ -955,7 +955,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
public void testClusterFormingWithASlowNode() throws Exception {
|
||||
configureUnicastCluster(3, null, 2);
|
||||
|
||||
SlowClusterStateProcessing disruption = new SlowClusterStateProcessing(getRandom(), 0, 0, 1000, 2000);
|
||||
SlowClusterStateProcessing disruption = new SlowClusterStateProcessing(random(), 0, 0, 1000, 2000);
|
||||
|
||||
// don't wait for initial state, wat want to add the disruption while the cluster is forming..
|
||||
internalCluster().startNodesAsync(3,
|
||||
|
@ -1035,7 +1035,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
indexRequestBuilderList.add(client().prepareIndex().setIndex("test").setType("doc").setSource("{\"int_field\":1}"));
|
||||
}
|
||||
indexRandom(true, indexRequestBuilderList);
|
||||
SingleNodeDisruption disruption = new BlockClusterStateProcessing(node_2, getRandom());
|
||||
SingleNodeDisruption disruption = new BlockClusterStateProcessing(node_2, random());
|
||||
|
||||
internalCluster().setDisruptionScheme(disruption);
|
||||
MockTransportService transportServiceNode2 = (MockTransportService) internalCluster().getInstance(TransportService.class, node_2);
|
||||
|
@ -1095,7 +1095,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
ensureYellow();
|
||||
|
||||
final String masterNode1 = internalCluster().getMasterName();
|
||||
NetworkPartition networkPartition = new NetworkUnresponsivePartition(masterNode1, dataNode.get(), getRandom());
|
||||
NetworkPartition networkPartition = new NetworkUnresponsivePartition(masterNode1, dataNode.get(), random());
|
||||
internalCluster().setDisruptionScheme(networkPartition);
|
||||
networkPartition.startDisrupting();
|
||||
// We know this will time out due to the partition, we check manually below to not proceed until
|
||||
|
@ -1117,9 +1117,9 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
protected NetworkPartition addRandomPartition() {
|
||||
NetworkPartition partition;
|
||||
if (randomBoolean()) {
|
||||
partition = new NetworkUnresponsivePartition(getRandom());
|
||||
partition = new NetworkUnresponsivePartition(random());
|
||||
} else {
|
||||
partition = new NetworkDisconnectPartition(getRandom());
|
||||
partition = new NetworkDisconnectPartition(random());
|
||||
}
|
||||
|
||||
setDisruptionScheme(partition);
|
||||
|
@ -1135,9 +1135,9 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
|
||||
NetworkPartition partition;
|
||||
if (randomBoolean()) {
|
||||
partition = new NetworkUnresponsivePartition(side1, side2, getRandom());
|
||||
partition = new NetworkUnresponsivePartition(side1, side2, random());
|
||||
} else {
|
||||
partition = new NetworkDisconnectPartition(side1, side2, getRandom());
|
||||
partition = new NetworkDisconnectPartition(side1, side2, random());
|
||||
}
|
||||
|
||||
internalCluster().setDisruptionScheme(partition);
|
||||
|
@ -1148,10 +1148,10 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
private ServiceDisruptionScheme addRandomDisruptionScheme() {
|
||||
// TODO: add partial partitions
|
||||
List<ServiceDisruptionScheme> list = Arrays.asList(
|
||||
new NetworkUnresponsivePartition(getRandom()),
|
||||
new NetworkDelaysPartition(getRandom()),
|
||||
new NetworkDisconnectPartition(getRandom()),
|
||||
new SlowClusterStateProcessing(getRandom())
|
||||
new NetworkUnresponsivePartition(random()),
|
||||
new NetworkDelaysPartition(random()),
|
||||
new NetworkDisconnectPartition(random()),
|
||||
new SlowClusterStateProcessing(random())
|
||||
);
|
||||
Collections.shuffle(list, random());
|
||||
setDisruptionScheme(list.get(0));
|
||||
|
|
|
@ -349,7 +349,7 @@ public class MetaDataStateFormatTests extends ESTestCase {
|
|||
|
||||
@Override
|
||||
protected Directory newDirectory(Path dir) throws IOException {
|
||||
MockDirectoryWrapper mock = new MockDirectoryWrapper(getRandom(), super.newDirectory(dir));
|
||||
MockDirectoryWrapper mock = new MockDirectoryWrapper(random(), super.newDirectory(dir));
|
||||
closeAfterSuite(mock);
|
||||
return mock;
|
||||
}
|
||||
|
|
|
@ -105,7 +105,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
|
|||
* and find a better copy for the shard.
|
||||
*/
|
||||
public void testAsyncFetchOnAnythingButIndexCreation() {
|
||||
UnassignedInfo.Reason reason = RandomPicks.randomFrom(getRandom(), EnumSet.complementOf(EnumSet.of(UnassignedInfo.Reason.INDEX_CREATED)));
|
||||
UnassignedInfo.Reason reason = RandomPicks.randomFrom(random(), EnumSet.complementOf(EnumSet.of(UnassignedInfo.Reason.INDEX_CREATED)));
|
||||
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), Settings.EMPTY, reason);
|
||||
testAllocator.clean();
|
||||
testAllocator.allocateUnassigned(allocation);
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.index.AssertingDirectoryReader;
|
|||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.FieldInvertState;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.CollectionStatistics;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.QueryCachingPolicy;
|
||||
|
@ -44,10 +45,13 @@ import org.elasticsearch.index.analysis.AnalysisRegistry;
|
|||
import org.elasticsearch.index.cache.query.QueryCache;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.EngineException;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.shard.IndexingOperationListener;
|
||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
|
@ -65,8 +69,10 @@ import org.elasticsearch.script.ScriptEngineRegistry;
|
|||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.test.TestSearchContext;
|
||||
import org.elasticsearch.test.engine.MockEngineFactory;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
@ -160,14 +166,15 @@ public class IndexModuleTests extends ESTestCase {
|
|||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings);
|
||||
IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment));
|
||||
module.addIndexStore("foo_store", FooStore::new);
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener));
|
||||
assertTrue(indexService.getIndexStore() instanceof FooStore);
|
||||
try {
|
||||
module.addIndexStore("foo_store", FooStore::new);
|
||||
fail("already registered");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
// fine
|
||||
}
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener));
|
||||
assertTrue(indexService.getIndexStore() instanceof FooStore);
|
||||
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
|
@ -215,6 +222,65 @@ public class IndexModuleTests extends ESTestCase {
|
|||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
public void testAddIndexOperationListener() throws IOException {
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment));
|
||||
AtomicBoolean executed = new AtomicBoolean(false);
|
||||
IndexingOperationListener listener = new IndexingOperationListener() {
|
||||
@Override
|
||||
public Engine.Index preIndex(Engine.Index operation) {
|
||||
executed.set(true);
|
||||
return operation;
|
||||
}
|
||||
};
|
||||
module.addIndexOperationListener(listener);
|
||||
|
||||
expectThrows(IllegalArgumentException.class, () -> module.addIndexOperationListener(listener));
|
||||
expectThrows(IllegalArgumentException.class, () -> module.addIndexOperationListener(null));
|
||||
|
||||
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry,
|
||||
new IndicesFieldDataCache(settings, this.listener));
|
||||
assertEquals(2, indexService.getIndexOperationListeners().size());
|
||||
assertEquals(IndexingSlowLog.class, indexService.getIndexOperationListeners().get(0).getClass());
|
||||
assertSame(listener, indexService.getIndexOperationListeners().get(1));
|
||||
|
||||
Engine.Index index = new Engine.Index(new Term("_uid", "1"), null);
|
||||
for (IndexingOperationListener l : indexService.getIndexOperationListeners()) {
|
||||
l.preIndex(index);
|
||||
}
|
||||
assertTrue(executed.get());
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
public void testAddSearchOperationListener() throws IOException {
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment));
|
||||
AtomicBoolean executed = new AtomicBoolean(false);
|
||||
SearchOperationListener listener = new SearchOperationListener() {
|
||||
|
||||
@Override
|
||||
public void onNewContext(SearchContext context) {
|
||||
executed.set(true);
|
||||
}
|
||||
};
|
||||
module.addSearchOperationListener(listener);
|
||||
|
||||
expectThrows(IllegalArgumentException.class, () -> module.addSearchOperationListener(listener));
|
||||
expectThrows(IllegalArgumentException.class, () -> module.addSearchOperationListener(null));
|
||||
|
||||
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry,
|
||||
new IndicesFieldDataCache(settings, this.listener));
|
||||
assertEquals(2, indexService.getSearchOperationListener().size());
|
||||
assertEquals(SearchSlowLog.class, indexService.getSearchOperationListener().get(0).getClass());
|
||||
assertSame(listener, indexService.getSearchOperationListener().get(1));
|
||||
|
||||
for (SearchOperationListener l : indexService.getSearchOperationListener()) {
|
||||
l.onNewContext(new TestSearchContext(null));
|
||||
}
|
||||
assertTrue(executed.get());
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
public void testAddSimilarity() throws IOException {
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
|
@ -245,6 +311,20 @@ public class IndexModuleTests extends ESTestCase {
|
|||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
public void testFrozen() {
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment));
|
||||
module.freeze();
|
||||
String msg = "Can't modify IndexModule once the index service has been created";
|
||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSearchOperationListener(null)).getMessage());
|
||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexEventListener(null)).getMessage());
|
||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexOperationListener(null)).getMessage());
|
||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSimilarity(null, null)).getMessage());
|
||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.setSearcherWrapper(null)).getMessage());
|
||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.forceQueryCacheType("foo")).getMessage());
|
||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addIndexStore("foo", null)).getMessage());
|
||||
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.registerQueryCache("foo", null)).getMessage());
|
||||
}
|
||||
|
||||
public void testSetupUnknownSimilarity() throws IOException {
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put("index.similarity.my_similarity.type", "test_similarity")
|
||||
|
|
|
@ -703,8 +703,8 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
public void testIndexOnSharedFSRecoversToAnyNode() throws Exception {
|
||||
Path dataPath = createTempDir();
|
||||
Settings nodeSettings = nodeSettings(dataPath);
|
||||
Settings fooSettings = Settings.builder().put(nodeSettings).put("node.affinity", "foo").build();
|
||||
Settings barSettings = Settings.builder().put(nodeSettings).put("node.affinity", "bar").build();
|
||||
Settings fooSettings = Settings.builder().put(nodeSettings).put("node.attr.affinity", "foo").build();
|
||||
Settings barSettings = Settings.builder().put(nodeSettings).put("node.attr.affinity", "bar").build();
|
||||
|
||||
final InternalTestCluster.Async<List<String>> fooNodes = internalCluster().startNodesAsync(2, fooSettings);
|
||||
final InternalTestCluster.Async<List<String>> barNodes = internalCluster().startNodesAsync(2, barSettings);
|
||||
|
|
|
@ -51,7 +51,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testDefaultAnalyzers() throws IOException {
|
||||
Version version = VersionUtils.randomVersion(getRandom());
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
Settings settings = Settings
|
||||
.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, version)
|
||||
|
@ -65,7 +65,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOverrideDefaultAnalyzer() throws IOException {
|
||||
Version version = VersionUtils.randomVersion(getRandom());
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
Collections.singletonMap("default", analyzerProvider("default")),
|
||||
|
@ -76,7 +76,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOverrideDefaultIndexAnalyzer() {
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), Version.V_5_0_0_alpha1, Version.CURRENT);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
try {
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
|
@ -90,7 +90,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testBackCompatOverrideDefaultIndexAnalyzer() {
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
Collections.singletonMap("default_index", analyzerProvider("default_index")),
|
||||
|
@ -101,7 +101,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOverrideDefaultSearchAnalyzer() {
|
||||
Version version = VersionUtils.randomVersion(getRandom());
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
Collections.singletonMap("default_search", analyzerProvider("default_search")),
|
||||
|
@ -112,7 +112,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() {
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
Map<String, AnalyzerProvider> analyzers = new HashMap<>();
|
||||
analyzers.put("default_index", analyzerProvider("default_index"));
|
||||
|
|
|
@ -246,11 +246,11 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
// missing value is set to an actual value
|
||||
final String[] values = new String[randomIntBetween(2, 30)];
|
||||
for (int i = 1; i < values.length; ++i) {
|
||||
values[i] = TestUtil.randomUnicodeString(getRandom());
|
||||
values[i] = TestUtil.randomUnicodeString(random());
|
||||
}
|
||||
final int numDocs = scaledRandomIntBetween(10, 3072);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
final String value = RandomPicks.randomFrom(getRandom(), values);
|
||||
final String value = RandomPicks.randomFrom(random(), values);
|
||||
if (value == null) {
|
||||
writer.addDocument(new Document());
|
||||
} else {
|
||||
|
@ -302,11 +302,11 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
public void testSortMissing(boolean first, boolean reverse) throws IOException {
|
||||
final String[] values = new String[randomIntBetween(2, 10)];
|
||||
for (int i = 1; i < values.length; ++i) {
|
||||
values[i] = TestUtil.randomUnicodeString(getRandom());
|
||||
values[i] = TestUtil.randomUnicodeString(random());
|
||||
}
|
||||
final int numDocs = scaledRandomIntBetween(10, 3072);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
final String value = RandomPicks.randomFrom(getRandom(), values);
|
||||
final String value = RandomPicks.randomFrom(random(), values);
|
||||
if (value == null) {
|
||||
writer.addDocument(new Document());
|
||||
} else {
|
||||
|
@ -355,7 +355,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
public void testNestedSorting(MultiValueMode sortMode) throws IOException {
|
||||
final String[] values = new String[randomIntBetween(2, 20)];
|
||||
for (int i = 0; i < values.length; ++i) {
|
||||
values[i] = TestUtil.randomSimpleString(getRandom());
|
||||
values[i] = TestUtil.randomSimpleString(random());
|
||||
}
|
||||
final int numParents = scaledRandomIntBetween(10, 3072);
|
||||
List<Document> docs = new ArrayList<>();
|
||||
|
@ -367,14 +367,14 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
final Document child = new Document();
|
||||
final int numValues = randomInt(3);
|
||||
for (int k = 0; k < numValues; ++k) {
|
||||
final String value = RandomPicks.randomFrom(getRandom(), values);
|
||||
final String value = RandomPicks.randomFrom(random(), values);
|
||||
addField(child, "text", value);
|
||||
}
|
||||
docs.add(child);
|
||||
}
|
||||
final Document parent = new Document();
|
||||
parent.add(new StringField("type", "parent", Store.YES));
|
||||
final String value = RandomPicks.randomFrom(getRandom(), values);
|
||||
final String value = RandomPicks.randomFrom(random(), values);
|
||||
if (value != null) {
|
||||
addField(parent, "text", value);
|
||||
}
|
||||
|
@ -400,10 +400,10 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
missingValue = "_last";
|
||||
break;
|
||||
case 2:
|
||||
missingValue = new BytesRef(RandomPicks.randomFrom(getRandom(), values));
|
||||
missingValue = new BytesRef(RandomPicks.randomFrom(random(), values));
|
||||
break;
|
||||
default:
|
||||
missingValue = new BytesRef(TestUtil.randomSimpleString(getRandom()));
|
||||
missingValue = new BytesRef(TestUtil.randomSimpleString(random()));
|
||||
break;
|
||||
}
|
||||
Query parentFilter = new TermQuery(new Term("type", "parent"));
|
||||
|
|
|
@ -107,7 +107,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase {
|
|||
private byte[] randomBytes() {
|
||||
int size = randomIntBetween(10, 1000);
|
||||
byte[] bytes = new byte[size];
|
||||
getRandom().nextBytes(bytes);
|
||||
random().nextBytes(bytes);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ public class FieldDataLoadingIT extends ESIntegTestCase {
|
|||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "text")
|
||||
.field("fielddata", true)
|
||||
.field("eager_global_ordinals", true)
|
||||
.endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
|
|
@ -39,7 +39,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase {
|
|||
}
|
||||
|
||||
public void testFilterByFrequency() throws Exception {
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("id", "" + i, Field.Store.NO));
|
||||
|
@ -64,6 +64,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase {
|
|||
{
|
||||
ifdService.clear();
|
||||
MappedFieldType ft = new TextFieldMapper.Builder("high_freq")
|
||||
.fielddata(true)
|
||||
.fielddataFrequencyFilter(0, random.nextBoolean() ? 100 : 0.5d, 0)
|
||||
.build(builderCtx).fieldType();
|
||||
IndexOrdinalsFieldData fieldData = ifdService.getForField(ft);
|
||||
|
@ -76,6 +77,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase {
|
|||
{
|
||||
ifdService.clear();
|
||||
MappedFieldType ft = new TextFieldMapper.Builder("high_freq")
|
||||
.fielddata(true)
|
||||
.fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d/200.0d, 201, 100)
|
||||
.build(builderCtx).fieldType();
|
||||
IndexOrdinalsFieldData fieldData = ifdService.getForField(ft);
|
||||
|
@ -88,6 +90,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase {
|
|||
{
|
||||
ifdService.clear(); // test # docs with value
|
||||
MappedFieldType ft = new TextFieldMapper.Builder("med_freq")
|
||||
.fielddata(true)
|
||||
.fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d/200.0d, Integer.MAX_VALUE, 101)
|
||||
.build(builderCtx).fieldType();
|
||||
IndexOrdinalsFieldData fieldData = ifdService.getForField(ft);
|
||||
|
@ -101,6 +104,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase {
|
|||
{
|
||||
ifdService.clear();
|
||||
MappedFieldType ft = new TextFieldMapper.Builder("med_freq")
|
||||
.fielddata(true)
|
||||
.fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d/200.0d, Integer.MAX_VALUE, 101)
|
||||
.build(builderCtx).fieldType();
|
||||
IndexOrdinalsFieldData fieldData = ifdService.getForField(ft);
|
||||
|
|
|
@ -45,7 +45,7 @@ public class MultiOrdinalsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRandomValues() throws IOException {
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
int numDocs = 100 + random.nextInt(1000);
|
||||
int numOrdinals = 1 + random.nextInt(200);
|
||||
int numValues = 100 + random.nextInt(100000);
|
||||
|
|
|
@ -161,4 +161,31 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
|
|||
assertThat(e.getMessage(), containsString("Limit of total fields [1] in index [test2] has been exceeded"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testMappingDepthExceedsLimit() throws Throwable {
|
||||
CompressedXContent simpleMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject().endObject().bytes());
|
||||
IndexService indexService1 = createIndex("test1", Settings.builder().put(MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING.getKey(), 1).build());
|
||||
// no exception
|
||||
indexService1.mapperService().merge("type", simpleMapping, MergeReason.MAPPING_UPDATE, false);
|
||||
|
||||
CompressedXContent objectMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("properties")
|
||||
.startObject("object1")
|
||||
.field("type", "object")
|
||||
.endObject()
|
||||
.endObject().endObject().bytes());
|
||||
|
||||
IndexService indexService2 = createIndex("test2");
|
||||
// no exception
|
||||
indexService2.mapperService().merge("type", objectMapping, MergeReason.MAPPING_UPDATE, false);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> indexService1.mapperService().merge("type2", objectMapping, MergeReason.MAPPING_UPDATE, false));
|
||||
assertThat(e.getMessage(), containsString("Limit of mapping depth [1] in index [test1] has been exceeded"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -102,7 +102,8 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
|
|||
.startObject().startObject("template_all")
|
||||
.field("match", "*")
|
||||
.field("match_mapping_type", "string")
|
||||
.startObject("mapping").field("type", "text").field("copy_to", "{name}_raw").endObject()
|
||||
.startObject("mapping").field("type", "text").field("fielddata", true)
|
||||
.field("copy_to", "{name}_raw").endObject()
|
||||
.endObject().endObject()
|
||||
|
||||
.endArray();
|
||||
|
|
|
@ -83,7 +83,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.bytes());
|
||||
|
||||
try (Directory dir = new RAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(getRandom())))) {
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())))) {
|
||||
w.addDocuments(doc.docs());
|
||||
try (DirectoryReader reader = DirectoryReader.open(w)) {
|
||||
final LeafReader leaf = reader.leaves().get(0).reader();
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue