Merge branch 'master' into feature-suggest-refactoring
Conflicts: core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java
This commit is contained in:
commit
fab3b5568f
|
@ -164,7 +164,8 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||
protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest request) {
|
||||
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
|
||||
IndexShard indexShard = indexService.getShard(request.shardId().id());
|
||||
final QueryShardContext queryShardContext = indexShard.getQueryShardContext();
|
||||
final QueryShardContext queryShardContext = indexService.newQueryShardContext();
|
||||
queryShardContext.setTypes(request.types());
|
||||
|
||||
boolean valid;
|
||||
String explanation = null;
|
||||
|
|
|
@ -121,7 +121,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||
SearchContext.setCurrent(context);
|
||||
|
||||
try {
|
||||
context.parsedQuery(indexShard.getQueryShardContext().toQuery(request.query()));
|
||||
context.parsedQuery(context.getQueryShardContext().toQuery(request.query()));
|
||||
context.preProcess();
|
||||
int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase;
|
||||
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.common.lease.Releasables;
|
|||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.log4j.LogConfigurator;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
|
@ -148,10 +149,11 @@ final class Bootstrap {
|
|||
}
|
||||
|
||||
private void setup(boolean addShutdownHook, Settings settings, Environment environment) throws Exception {
|
||||
initializeNatives(environment.tmpFile(),
|
||||
settings.getAsBoolean("bootstrap.mlockall", false),
|
||||
settings.getAsBoolean("bootstrap.seccomp", true),
|
||||
settings.getAsBoolean("bootstrap.ctrlhandler", true));
|
||||
initializeNatives(
|
||||
environment.tmpFile(),
|
||||
BootstrapSettings.MLOCKALL_SETTING.get(settings),
|
||||
BootstrapSettings.SECCOMP_SETTING.get(settings),
|
||||
BootstrapSettings.CTRLHANDLER_SETTING.get(settings));
|
||||
|
||||
// initialize probes before the security manager is installed
|
||||
initializeProbes();
|
||||
|
@ -186,22 +188,11 @@ final class Bootstrap {
|
|||
node = new Node(nodeSettings);
|
||||
}
|
||||
|
||||
/**
|
||||
* option for elasticsearch.yml etc to turn off our security manager completely,
|
||||
* for example if you want to have your own configuration or just disable.
|
||||
*/
|
||||
// TODO: remove this: http://www.openbsd.org/papers/hackfest2015-pledge/mgp00005.jpg
|
||||
static final String SECURITY_SETTING = "security.manager.enabled";
|
||||
/**
|
||||
* option for elasticsearch.yml to fully respect the system policy, including bad defaults
|
||||
* from java.
|
||||
*/
|
||||
// TODO: remove this hack when insecure defaults are removed from java
|
||||
static final String SECURITY_FILTER_BAD_DEFAULTS_SETTING = "security.manager.filter_bad_defaults";
|
||||
|
||||
|
||||
private void setupSecurity(Settings settings, Environment environment) throws Exception {
|
||||
if (settings.getAsBoolean(SECURITY_SETTING, true)) {
|
||||
Security.configure(environment, settings.getAsBoolean(SECURITY_FILTER_BAD_DEFAULTS_SETTING, true));
|
||||
if (BootstrapSettings.SECURITY_MANAGER_ENABLED_SETTING.get(settings)) {
|
||||
Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Scope;
|
||||
|
||||
public final class BootstrapSettings {
|
||||
|
||||
private BootstrapSettings() {
|
||||
}
|
||||
|
||||
// TODO: remove this: http://www.openbsd.org/papers/hackfest2015-pledge/mgp00005.jpg
|
||||
/**
|
||||
* option to turn off our security manager completely, for example
|
||||
* if you want to have your own configuration or just disable
|
||||
*/
|
||||
public static final Setting<Boolean> SECURITY_MANAGER_ENABLED_SETTING =
|
||||
Setting.boolSetting("security.manager.enabled", true, false, Scope.CLUSTER);
|
||||
|
||||
// TODO: remove this hack when insecure defaults are removed from java
|
||||
public static final Setting<Boolean> SECURITY_FILTER_BAD_DEFAULTS_SETTING =
|
||||
Setting.boolSetting("security.manager.filter_bad_defaults", true, false, Scope.CLUSTER);
|
||||
|
||||
public static final Setting<Boolean> MLOCKALL_SETTING = Setting.boolSetting("bootstrap.mlockall", false, false, Scope.CLUSTER);
|
||||
public static final Setting<Boolean> SECCOMP_SETTING = Setting.boolSetting("bootstrap.seccomp", true, false, Scope.CLUSTER);
|
||||
public static final Setting<Boolean> CTRLHANDLER_SETTING = Setting.boolSetting("bootstrap.ctrlhandler", true, false, Scope.CLUSTER);
|
||||
|
||||
}
|
|
@ -19,28 +19,26 @@
|
|||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
class JavaVersion implements Comparable<JavaVersion> {
|
||||
private final List<Integer> version;
|
||||
|
||||
public List<Integer> getVersion() {
|
||||
return Collections.unmodifiableList(version);
|
||||
return version;
|
||||
}
|
||||
|
||||
private JavaVersion(List<Integer> version) {
|
||||
this.version = version;
|
||||
this.version = Collections.unmodifiableList(version);
|
||||
}
|
||||
|
||||
public static JavaVersion parse(String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException("value");
|
||||
}
|
||||
if ("".equals(value)) {
|
||||
Objects.requireNonNull(value);
|
||||
if (!isValid(value)) {
|
||||
throw new IllegalArgumentException("value");
|
||||
}
|
||||
|
||||
|
@ -79,6 +77,6 @@ class JavaVersion implements Comparable<JavaVersion> {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.collectionToDelimitedString(version, ".");
|
||||
return version.stream().map(v -> Integer.toString(v)).collect(Collectors.joining("."));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,7 +53,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
|
@ -336,7 +335,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
QueryShardContext queryShardContext = indexService.getQueryShardContext();
|
||||
final QueryShardContext queryShardContext = indexService.newQueryShardContext();
|
||||
for (Alias alias : request.aliases()) {
|
||||
if (Strings.hasLength(alias.filter())) {
|
||||
aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext);
|
||||
|
|
|
@ -117,7 +117,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
|
|||
indices.put(indexMetaData.getIndex().getName(), indexService);
|
||||
}
|
||||
|
||||
aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.getQueryShardContext());
|
||||
aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.newQueryShardContext());
|
||||
}
|
||||
AliasMetaData newAliasMd = AliasMetaData.newAliasMetaDataBuilder(
|
||||
aliasAction.alias())
|
||||
|
|
|
@ -451,7 +451,7 @@ public final class ShardRouting implements Streamable, ToXContent {
|
|||
}
|
||||
|
||||
/**
|
||||
* Moves the shard from started to initializing and bumps the version
|
||||
* Moves the shard from started to initializing
|
||||
*/
|
||||
void reinitializeShard() {
|
||||
ensureNotFrozen();
|
||||
|
|
|
@ -88,6 +88,7 @@ import org.elasticsearch.transport.TransportService;
|
|||
import org.elasticsearch.transport.TransportSettings;
|
||||
import org.elasticsearch.transport.netty.NettyTransport;
|
||||
import org.elasticsearch.tribe.TribeService;
|
||||
import org.elasticsearch.bootstrap.BootstrapSettings;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -378,6 +379,11 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
PageCacheRecycler.WEIGHT_LONG_SETTING,
|
||||
PageCacheRecycler.WEIGHT_OBJECTS_SETTING,
|
||||
PageCacheRecycler.TYPE_SETTING,
|
||||
PluginsService.MANDATORY_SETTING
|
||||
PluginsService.MANDATORY_SETTING,
|
||||
BootstrapSettings.SECURITY_MANAGER_ENABLED_SETTING,
|
||||
BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING,
|
||||
BootstrapSettings.MLOCKALL_SETTING,
|
||||
BootstrapSettings.SECCOMP_SETTING,
|
||||
BootstrapSettings.CTRLHANDLER_SETTING
|
||||
)));
|
||||
}
|
||||
|
|
|
@ -117,7 +117,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
|
|||
UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING,
|
||||
EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING,
|
||||
EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING,
|
||||
IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING,
|
||||
IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING,
|
||||
IndexFieldDataService.INDEX_FIELDDATA_CACHE_KEY,
|
||||
FieldMapper.IGNORE_MALFORMED_SETTING,
|
||||
FieldMapper.COERCE_SETTING,
|
||||
|
|
|
@ -256,8 +256,8 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
* this is used for settings that depend on each other... see {@link org.elasticsearch.common.settings.AbstractScopedSettings#addSettingsUpdateConsumer(Setting, Setting, BiConsumer)} and it's
|
||||
* usage for details.
|
||||
*/
|
||||
static <A, B> AbstractScopedSettings.SettingUpdater<Tuple<A, B>> compoundUpdater(final BiConsumer<A,B> consumer, final Setting<A> aSettting, final Setting<B> bSetting, ESLogger logger) {
|
||||
final AbstractScopedSettings.SettingUpdater<A> aSettingUpdater = aSettting.newUpdater(null, logger);
|
||||
static <A, B> AbstractScopedSettings.SettingUpdater<Tuple<A, B>> compoundUpdater(final BiConsumer<A,B> consumer, final Setting<A> aSetting, final Setting<B> bSetting, ESLogger logger) {
|
||||
final AbstractScopedSettings.SettingUpdater<A> aSettingUpdater = aSetting.newUpdater(null, logger);
|
||||
final AbstractScopedSettings.SettingUpdater<B> bSettingUpdater = bSetting.newUpdater(null, logger);
|
||||
return new AbstractScopedSettings.SettingUpdater<Tuple<A, B>>() {
|
||||
@Override
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.elasticsearch.index.store.FsDirectoryService;
|
|||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.fs.FsProbe;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.monitor.process.ProcessProbe;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
@ -221,6 +222,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
|
|||
|
||||
maybeLogPathDetails();
|
||||
maybeLogHeapDetails();
|
||||
maybeWarnFileDescriptors();
|
||||
|
||||
applySegmentInfosTrace(settings);
|
||||
}
|
||||
|
@ -313,6 +315,20 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
|
|||
logger.info("heap size [{}], compressed ordinary object pointers [{}]", maxHeapSize, useCompressedOops);
|
||||
}
|
||||
|
||||
private void maybeWarnFileDescriptors() {
|
||||
long maxFileDescriptorCount = ProcessProbe.getInstance().getMaxFileDescriptorCount();
|
||||
if (maxFileDescriptorCount == -1) {
|
||||
return;
|
||||
}
|
||||
int fileDescriptorCountThreshold = (1 << 16);
|
||||
if (maxFileDescriptorCount < fileDescriptorCountThreshold) {
|
||||
logger.warn(
|
||||
"max file descriptors [{}] for elasticsearch process likely too low, consider increasing to at least [{}]",
|
||||
maxFileDescriptorCount,
|
||||
fileDescriptorCountThreshold);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "System.out.*")
|
||||
static void applySegmentInfosTrace(Settings settings) {
|
||||
if (ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING.get(settings)) {
|
||||
|
|
|
@ -131,7 +131,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
this.indexSettings = indexSettings;
|
||||
this.analysisService = registry.build(indexSettings);
|
||||
this.similarityService = similarityService;
|
||||
this.mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, IndexService.this::getQueryShardContext);
|
||||
this.mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, IndexService.this::newQueryShardContext);
|
||||
this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, nodeServicesProvider.getCircuitBreakerService(), mapperService);
|
||||
this.shardStoreDeleter = shardStoreDeleter;
|
||||
this.eventListener = eventListener;
|
||||
|
@ -417,7 +417,10 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
return indexSettings;
|
||||
}
|
||||
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
/**
|
||||
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via {@link QueryShardContext#setTypes(String...)}
|
||||
*/
|
||||
public QueryShardContext newQueryShardContext() {
|
||||
return new QueryShardContext(indexSettings, nodeServicesProvider.getClient(), indexCache.bitsetFilterCache(), indexFieldData, mapperService(), similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry());
|
||||
}
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ public final class IndexSettings {
|
|||
public static final Setting<Integer> MAX_RESULT_WINDOW_SETTING = Setting.intSetting("index.max_result_window", 10000, 1, true, Setting.Scope.INDEX);
|
||||
public static final TimeValue DEFAULT_REFRESH_INTERVAL = new TimeValue(1, TimeUnit.SECONDS);
|
||||
public static final Setting<TimeValue> INDEX_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.refresh_interval", DEFAULT_REFRESH_INTERVAL, new TimeValue(-1, TimeUnit.MILLISECONDS), true, Setting.Scope.INDEX);
|
||||
public static final Setting<ByteSizeValue> INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING = Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), true, Setting.Scope.INDEX);
|
||||
public static final Setting<ByteSizeValue> INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING = Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), true, Setting.Scope.INDEX);
|
||||
|
||||
|
||||
/**
|
||||
|
@ -197,7 +197,7 @@ public final class IndexSettings {
|
|||
this.durability = scopedSettings.get(INDEX_TRANSLOG_DURABILITY_SETTING);
|
||||
syncInterval = INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.get(settings);
|
||||
refreshInterval = scopedSettings.get(INDEX_REFRESH_INTERVAL_SETTING);
|
||||
flushThresholdSize = scopedSettings.get(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING);
|
||||
flushThresholdSize = scopedSettings.get(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING);
|
||||
mergeSchedulerConfig = new MergeSchedulerConfig(this);
|
||||
gcDeletesInMillis = scopedSettings.get(INDEX_GC_DELETES_SETTING).getMillis();
|
||||
warmerEnabled = scopedSettings.get(INDEX_WARMER_ENABLED_SETTING);
|
||||
|
@ -222,7 +222,7 @@ public final class IndexSettings {
|
|||
scopedSettings.addSettingsUpdateConsumer(MAX_RESULT_WINDOW_SETTING, this::setMaxResultWindow);
|
||||
scopedSettings.addSettingsUpdateConsumer(INDEX_WARMER_ENABLED_SETTING, this::setEnableWarmer);
|
||||
scopedSettings.addSettingsUpdateConsumer(INDEX_GC_DELETES_SETTING, this::setGCDeletes);
|
||||
scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING, this::setTranslogFlushThresholdSize);
|
||||
scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING, this::setTranslogFlushThresholdSize);
|
||||
scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval);
|
||||
}
|
||||
|
||||
|
|
|
@ -136,11 +136,11 @@ public final class SearchSlowLog {
|
|||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], ");
|
||||
if (context.types() == null) {
|
||||
if (context.getQueryShardContext().getTypes() == null) {
|
||||
sb.append("types[], ");
|
||||
} else {
|
||||
sb.append("types[");
|
||||
Strings.arrayToDelimitedString(context.types(), ",", sb);
|
||||
Strings.arrayToDelimitedString(context.getQueryShardContext().getTypes(), ",", sb);
|
||||
sb.append("], ");
|
||||
}
|
||||
if (context.groupStats() == null) {
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.apache.lucene.store.LockObtainFailedException;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cluster.routing.Murmur3HashFunction;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -68,7 +67,6 @@ import org.elasticsearch.index.shard.TranslogRecoveryPerformer;
|
|||
import org.elasticsearch.index.translog.Translog;
|
||||
import org.elasticsearch.index.translog.TranslogConfig;
|
||||
import org.elasticsearch.index.translog.TranslogCorruptedException;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -233,20 +231,7 @@ public class InternalEngine extends Engine {
|
|||
final TranslogRecoveryPerformer handler = engineConfig.getTranslogRecoveryPerformer();
|
||||
try {
|
||||
Translog.Snapshot snapshot = translog.newSnapshot();
|
||||
Translog.Operation operation;
|
||||
while ((operation = snapshot.next()) != null) {
|
||||
try {
|
||||
handler.performRecoveryOperation(this, operation, true);
|
||||
opsRecovered++;
|
||||
} catch (ElasticsearchException e) {
|
||||
if (e.status() == RestStatus.BAD_REQUEST) {
|
||||
// mainly for MapperParsingException and Failure to detect xcontent
|
||||
logger.info("ignoring recovery of a corrupt translog entry", e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
opsRecovered = handler.recoveryFromSnapshot(this, snapshot);
|
||||
} catch (Throwable e) {
|
||||
throw new EngineException(shardId, "failed to recover from translog", e);
|
||||
}
|
||||
|
|
|
@ -49,7 +49,6 @@ public class DocumentMapperParser {
|
|||
|
||||
final MapperService mapperService;
|
||||
final AnalysisService analysisService;
|
||||
private static final ESLogger logger = Loggers.getLogger(DocumentMapperParser.class);
|
||||
private final SimilarityService similarityService;
|
||||
private final Supplier<QueryShardContext> queryShardContextSupplier;
|
||||
|
||||
|
|
|
@ -421,8 +421,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
builder.field("index_options", indexOptionToString(fieldType().indexOptions()));
|
||||
}
|
||||
|
||||
doXContentAnalyzers(builder, includeDefaults);
|
||||
|
||||
if (fieldType().similarity() != null) {
|
||||
builder.field("similarity", fieldType().similarity().name());
|
||||
} else if (includeDefaults) {
|
||||
|
@ -439,15 +437,26 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
}
|
||||
}
|
||||
|
||||
protected void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
protected final void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
if (fieldType.tokenized() == false) {
|
||||
return;
|
||||
}
|
||||
if (fieldType().indexAnalyzer() == null) {
|
||||
if (includeDefaults) {
|
||||
builder.field("analyzer", "default");
|
||||
}
|
||||
} else if (includeDefaults || fieldType().indexAnalyzer().name().startsWith("_") == false && fieldType().indexAnalyzer().name().equals("default") == false) {
|
||||
builder.field("analyzer", fieldType().indexAnalyzer().name());
|
||||
if (fieldType().searchAnalyzer().name().equals(fieldType().indexAnalyzer().name()) == false) {
|
||||
builder.field("search_analyzer", fieldType().searchAnalyzer().name());
|
||||
} else {
|
||||
boolean hasDefaultIndexAnalyzer = fieldType().indexAnalyzer().name().equals("default");
|
||||
boolean hasDifferentSearchAnalyzer = fieldType().searchAnalyzer().name().equals(fieldType().indexAnalyzer().name()) == false;
|
||||
boolean hasDifferentSearchQuoteAnalyzer = fieldType().searchAnalyzer().name().equals(fieldType().searchQuoteAnalyzer().name()) == false;
|
||||
if (includeDefaults || hasDefaultIndexAnalyzer == false || hasDifferentSearchAnalyzer || hasDifferentSearchQuoteAnalyzer) {
|
||||
builder.field("analyzer", fieldType().indexAnalyzer().name());
|
||||
if (hasDifferentSearchAnalyzer || hasDifferentSearchQuoteAnalyzer) {
|
||||
builder.field("search_analyzer", fieldType().searchAnalyzer().name());
|
||||
if (hasDifferentSearchQuoteAnalyzer) {
|
||||
builder.field("search_quote_analyzer", fieldType().searchQuoteAnalyzer().name());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -404,6 +404,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
doXContentAnalyzers(builder, includeDefaults);
|
||||
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
|
@ -417,16 +418,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
|
||||
builder.field("position_increment_gap", positionIncrementGap);
|
||||
}
|
||||
NamedAnalyzer searchQuoteAnalyzer = fieldType().searchQuoteAnalyzer();
|
||||
if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType().searchAnalyzer().name())) {
|
||||
builder.field("search_quote_analyzer", searchQuoteAnalyzer.name());
|
||||
} else if (includeDefaults) {
|
||||
if (searchQuoteAnalyzer == null) {
|
||||
builder.field("search_quote_analyzer", "default");
|
||||
} else {
|
||||
builder.field("search_quote_analyzer", searchQuoteAnalyzer.name());
|
||||
}
|
||||
}
|
||||
|
||||
if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) {
|
||||
builder.field("ignore_above", ignoreAbove);
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.JoinUtil;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -205,13 +206,15 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
String[] previousTypes = QueryShardContext.setTypesWithPrevious(type);
|
||||
Query innerQuery;
|
||||
final String[] previousTypes = context.getTypes();
|
||||
context.setTypes(type);
|
||||
try {
|
||||
innerQuery = query.toQuery(context);
|
||||
} finally {
|
||||
QueryShardContext.setTypes(previousTypes);
|
||||
context.setTypes(previousTypes);
|
||||
}
|
||||
|
||||
if (innerQuery == null) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -119,11 +120,12 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
Query innerQuery;
|
||||
String[] previousTypes = QueryShardContext.setTypesWithPrevious(type);
|
||||
String[] previousTypes = context.getTypes();
|
||||
context.setTypes(type);
|
||||
try {
|
||||
innerQuery = query.toQuery(context);
|
||||
} finally {
|
||||
QueryShardContext.setTypes(previousTypes);
|
||||
context.setTypes(previousTypes);
|
||||
}
|
||||
|
||||
if (innerQuery == null) {
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.client.Client;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -74,7 +75,6 @@ import static java.util.Collections.unmodifiableMap;
|
|||
*/
|
||||
public class QueryShardContext {
|
||||
|
||||
private static final ThreadLocal<String[]> typesContext = new ThreadLocal<>();
|
||||
private final MapperService mapperService;
|
||||
private final ScriptService scriptService;
|
||||
private final SimilarityService similarityService;
|
||||
|
@ -82,23 +82,14 @@ public class QueryShardContext {
|
|||
private final IndexFieldDataService indexFieldDataService;
|
||||
private final IndexSettings indexSettings;
|
||||
private final Client client;
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
|
||||
public static void setTypes(String[] types) {
|
||||
typesContext.set(types);
|
||||
public void setTypes(String... types) {
|
||||
this.types = types;
|
||||
}
|
||||
|
||||
public static String[] getTypes() {
|
||||
return typesContext.get();
|
||||
}
|
||||
|
||||
public static String[] setTypesWithPrevious(String... types) {
|
||||
String[] old = typesContext.get();
|
||||
setTypes(types);
|
||||
return old;
|
||||
}
|
||||
|
||||
public static void removeTypes() {
|
||||
typesContext.remove();
|
||||
public String[] getTypes() {
|
||||
return types;
|
||||
}
|
||||
|
||||
private final Map<String, Query> namedQueries = new HashMap<>();
|
||||
|
@ -126,6 +117,7 @@ public class QueryShardContext {
|
|||
|
||||
public QueryShardContext(QueryShardContext source) {
|
||||
this(source.indexSettings, source.client, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, source.similarityService, source.scriptService, source.indicesQueriesRegistry);
|
||||
this.types = source.getTypes();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ public class NestedInnerQueryParseSupport {
|
|||
private ObjectMapper parentObjectMapper;
|
||||
|
||||
public NestedInnerQueryParseSupport(XContentParser parser, SearchContext searchContext) {
|
||||
shardContext = searchContext.indexShard().getQueryShardContext();
|
||||
shardContext = searchContext.getQueryShardContext();
|
||||
parseContext = shardContext.parseContext();
|
||||
shardContext.reset(parser);
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.QueryCachingPolicy;
|
||||
import org.apache.lucene.search.UsageTrackingQueryCachingPolicy;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.ThreadInterruptedException;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
@ -55,6 +54,7 @@ import org.elasticsearch.gateway.MetaDataStateFormat;
|
|||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.SearchSlowLog;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache;
|
||||
|
@ -89,13 +89,12 @@ import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
|
|||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.recovery.RecoveryStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.SearchSlowLog;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.search.stats.ShardSearchStats;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
||||
import org.elasticsearch.index.store.Store.MetadataSnapshot;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.index.store.Store.MetadataSnapshot;
|
||||
import org.elasticsearch.index.store.StoreFileMetaData;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.index.suggest.stats.ShardSuggestMetric;
|
||||
|
@ -105,8 +104,8 @@ import org.elasticsearch.index.translog.TranslogConfig;
|
|||
import org.elasticsearch.index.translog.TranslogStats;
|
||||
import org.elasticsearch.index.warmer.ShardIndexWarmerService;
|
||||
import org.elasticsearch.index.warmer.WarmerStats;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.IndexingMemoryController;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.recovery.RecoveryFailedException;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.percolator.PercolatorService;
|
||||
|
@ -158,7 +157,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final IndexEventListener indexEventListener;
|
||||
private final IndexSettings idxSettings;
|
||||
private final NodeServicesProvider provider;
|
||||
|
||||
/** How many bytes we are currently moving to disk, via either IndexWriter.flush or refresh. IndexingMemoryController polls this
|
||||
* across all shards to decide if throttling is necessary because moving bytes to disk is falling behind vs incoming documents
|
||||
|
@ -253,9 +251,9 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
|
||||
this.engineConfig = newEngineConfig(translogConfig, cachingPolicy);
|
||||
this.suspendableRefContainer = new SuspendableRefContainer();
|
||||
this.provider = provider;
|
||||
this.searcherWrapper = indexSearcherWrapper;
|
||||
this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, newQueryShardContext());
|
||||
QueryShardContext queryShardContext = new QueryShardContext(idxSettings, provider.getClient(), indexCache.bitsetFilterCache(), indexFieldDataService, mapperService, similarityService, provider.getScriptService(), provider.getIndicesQueriesRegistry());
|
||||
this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, queryShardContext);
|
||||
}
|
||||
|
||||
public Store store() {
|
||||
|
@ -814,7 +812,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
engine.flushAndClose();
|
||||
}
|
||||
} finally { // playing safe here and close the engine even if the above succeeds - close can be called multiple times
|
||||
IOUtils.close(engine, percolatorQueriesRegistry, queryShardContextCache);
|
||||
IOUtils.close(engine, percolatorQueriesRegistry);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -874,6 +872,12 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
* After the store has been recovered, we need to start the engine in order to apply operations
|
||||
*/
|
||||
public void performTranslogRecovery(boolean indexExists) {
|
||||
if (indexExists == false) {
|
||||
// note: these are set when recovering from the translog
|
||||
final RecoveryState.Translog translogStats = recoveryState().getTranslog();
|
||||
translogStats.totalOperations(0);
|
||||
translogStats.totalOperationsOnStart(0);
|
||||
}
|
||||
internalPerformTranslogRecovery(false, indexExists);
|
||||
assert recoveryState.getStage() == RecoveryState.Stage.TRANSLOG : "TRANSLOG stage expected but was: " + recoveryState.getStage();
|
||||
}
|
||||
|
@ -1387,6 +1391,15 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
assert recoveryState != null;
|
||||
recoveryState.getTranslog().incrementRecoveredOperations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int recoveryFromSnapshot(Engine engine, Translog.Snapshot snapshot) throws IOException {
|
||||
assert recoveryState != null;
|
||||
RecoveryState.Translog translogStats = recoveryState.getTranslog();
|
||||
translogStats.totalOperations(snapshot.totalOperations());
|
||||
translogStats.totalOperationsOnStart(snapshot.totalOperations());
|
||||
return super.recoveryFromSnapshot(engine, snapshot);
|
||||
}
|
||||
};
|
||||
return new EngineConfig(shardId,
|
||||
threadPool, indexSettings, warmer, store, deletionPolicy, indexSettings.getMergePolicy(),
|
||||
|
@ -1499,25 +1512,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
}
|
||||
}
|
||||
|
||||
private CloseableThreadLocal<QueryShardContext> queryShardContextCache = new CloseableThreadLocal<QueryShardContext>() {
|
||||
// TODO We should get rid of this threadlocal but I think it should be a sep change
|
||||
@Override
|
||||
protected QueryShardContext initialValue() {
|
||||
return newQueryShardContext();
|
||||
}
|
||||
};
|
||||
|
||||
private QueryShardContext newQueryShardContext() {
|
||||
return new QueryShardContext(idxSettings, provider.getClient(), indexCache.bitsetFilterCache(), indexFieldDataService, mapperService, similarityService, provider.getScriptService(), provider.getIndicesQueriesRegistry());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a threadlocal QueryShardContext for this shard.
|
||||
*/
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
return queryShardContextCache.get();
|
||||
}
|
||||
|
||||
EngineFactory getEngineFactory() {
|
||||
return engineFactory;
|
||||
}
|
||||
|
|
|
@ -203,7 +203,6 @@ final class StoreRecovery {
|
|||
logger.trace("cleaning existing shard, shouldn't exists");
|
||||
IndexWriter writer = new IndexWriter(store.directory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER).setOpenMode(IndexWriterConfig.OpenMode.CREATE));
|
||||
writer.close();
|
||||
recoveryState.getTranslog().totalOperations(0);
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
|
@ -224,10 +223,6 @@ final class StoreRecovery {
|
|||
} catch (IOException e) {
|
||||
logger.debug("failed to list file details", e);
|
||||
}
|
||||
if (indexShouldExists == false) {
|
||||
recoveryState.getTranslog().totalOperations(0);
|
||||
recoveryState.getTranslog().totalOperationsOnStart(0);
|
||||
}
|
||||
indexShard.performTranslogRecovery(indexShouldExists);
|
||||
indexShard.finalizeRecovery();
|
||||
indexShard.postRecovery("post recovery from shard_store");
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
@ -77,6 +78,25 @@ public class TranslogRecoveryPerformer {
|
|||
return numOps;
|
||||
}
|
||||
|
||||
public int recoveryFromSnapshot(Engine engine, Translog.Snapshot snapshot) throws IOException {
|
||||
Translog.Operation operation;
|
||||
int opsRecovered = 0;
|
||||
while ((operation = snapshot.next()) != null) {
|
||||
try {
|
||||
performRecoveryOperation(engine, operation, true);
|
||||
opsRecovered++;
|
||||
} catch (ElasticsearchException e) {
|
||||
if (e.status() == RestStatus.BAD_REQUEST) {
|
||||
// mainly for MapperParsingException and Failure to detect xcontent
|
||||
logger.info("ignoring recovery of a corrupt translog entry", e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
return opsRecovered;
|
||||
}
|
||||
|
||||
public static class BatchOperationException extends ElasticsearchException {
|
||||
|
||||
private final int completedOperations;
|
||||
|
@ -182,6 +202,7 @@ public class TranslogRecoveryPerformer {
|
|||
// noop
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the recovered types modifying the mapping during the recovery
|
||||
*/
|
||||
|
|
|
@ -93,6 +93,14 @@ public class RecoverySource extends AbstractComponent implements IndexEventListe
|
|||
logger.debug("delaying recovery of {} as source node {} is unknown", request.shardId(), request.targetNode());
|
||||
throw new DelayRecoveryException("source node does not have the node [" + request.targetNode() + "] in its state yet..");
|
||||
}
|
||||
|
||||
ShardRouting routingEntry = shard.routingEntry();
|
||||
if (request.recoveryType() == RecoveryState.Type.PRIMARY_RELOCATION &&
|
||||
(routingEntry.relocating() == false || routingEntry.relocatingNodeId().equals(request.targetNode().getId()) == false)) {
|
||||
logger.debug("delaying recovery of {} as source shard is not marked yet as relocating to {}", request.shardId(), request.targetNode());
|
||||
throw new DelayRecoveryException("source shard is not marked yet as relocating to [" + request.targetNode() + "]");
|
||||
}
|
||||
|
||||
ShardRouting targetShardRouting = null;
|
||||
for (ShardRouting shardRouting : node) {
|
||||
if (shardRouting.shardId().equals(request.shardId())) {
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.percolator;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
@ -32,7 +31,6 @@ import org.elasticsearch.action.percolate.PercolateShardRequest;
|
|||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
|
@ -46,6 +44,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -73,11 +72,11 @@ import org.elasticsearch.search.query.QuerySearchResult;
|
|||
import org.elasticsearch.search.rescore.RescoreSearchContext;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -99,14 +98,11 @@ public class PercolateContext extends SearchContext {
|
|||
private final long originNanoTime = System.nanoTime();
|
||||
private final long startTime;
|
||||
private final boolean onlyCount;
|
||||
private String[] types;
|
||||
|
||||
private Engine.Searcher docSearcher;
|
||||
private Engine.Searcher engineSearcher;
|
||||
private ContextIndexSearcher searcher;
|
||||
|
||||
private SearchContextHighlight highlight;
|
||||
private SearchLookup searchLookup;
|
||||
private ParsedQuery parsedQuery;
|
||||
private Query query;
|
||||
private Query percolateQuery;
|
||||
|
@ -115,7 +111,9 @@ public class PercolateContext extends SearchContext {
|
|||
private QuerySearchResult querySearchResult;
|
||||
private Sort sort;
|
||||
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
|
||||
private final QueryShardContext queryShardContext;
|
||||
private final Map<Class<?>, Collector> queryCollectors = new HashMap<>();
|
||||
private SearchLookup searchLookup;
|
||||
|
||||
public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard,
|
||||
IndexService indexService, PageCacheRecycler pageCacheRecycler,
|
||||
|
@ -126,7 +124,6 @@ public class PercolateContext extends SearchContext {
|
|||
this.fieldDataService = indexService.fieldData();
|
||||
this.mapperService = indexService.mapperService();
|
||||
this.searchShardTarget = searchShardTarget;
|
||||
this.types = new String[]{request.documentType()};
|
||||
this.pageCacheRecycler = pageCacheRecycler;
|
||||
this.bigArrays = bigArrays.withCircuitBreaking();
|
||||
this.querySearchResult = new QuerySearchResult(0, searchShardTarget);
|
||||
|
@ -137,10 +134,12 @@ public class PercolateContext extends SearchContext {
|
|||
this.aliasFilter = aliasFilter;
|
||||
this.startTime = request.getStartTime();
|
||||
this.onlyCount = request.onlyCount();
|
||||
queryShardContext = indexService.newQueryShardContext();
|
||||
queryShardContext.setTypes(request.documentType());
|
||||
}
|
||||
|
||||
// for testing:
|
||||
PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, MapperService mapperService) {
|
||||
PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, MapperService mapperService, QueryShardContext queryShardContext) {
|
||||
super(null);
|
||||
this.searchShardTarget = searchShardTarget;
|
||||
this.mapperService = mapperService;
|
||||
|
@ -154,6 +153,7 @@ public class PercolateContext extends SearchContext {
|
|||
this.startTime = 0;
|
||||
this.numberOfShards = 0;
|
||||
this.onlyCount = true;
|
||||
this.queryShardContext = queryShardContext;
|
||||
}
|
||||
|
||||
public IndexSearcher docSearcher() {
|
||||
|
@ -162,10 +162,10 @@ public class PercolateContext extends SearchContext {
|
|||
|
||||
public void initialize(Engine.Searcher docSearcher, ParsedDocument parsedDocument) {
|
||||
this.docSearcher = docSearcher;
|
||||
|
||||
IndexReader indexReader = docSearcher.reader();
|
||||
LeafReaderContext atomicReaderContext = indexReader.leaves().get(0);
|
||||
LeafSearchLookup leafLookup = lookup().getLeafSearchLookup(atomicReaderContext);
|
||||
this.searchLookup = new SearchLookup(mapperService(), fieldData(), queryShardContext.getTypes());
|
||||
LeafSearchLookup leafLookup = searchLookup.getLeafSearchLookup(atomicReaderContext);
|
||||
leafLookup.setDocument(0);
|
||||
leafLookup.source().setSource(parsedDocument.source());
|
||||
|
||||
|
@ -232,10 +232,10 @@ public class PercolateContext extends SearchContext {
|
|||
|
||||
@Override
|
||||
public SearchLookup lookup() {
|
||||
if (searchLookup == null) {
|
||||
searchLookup = new SearchLookup(mapperService(), fieldData(), types);
|
||||
}
|
||||
return searchLookup;
|
||||
// we cache this since it's really just a single document lookup - check the init method for details
|
||||
assert searchLookup != null : "context is not initialized";
|
||||
assert Arrays.equals(searchLookup.doc().getTypes(), getQueryShardContext().getTypes()) : "types mismatch - can't return lookup";
|
||||
return this.searchLookup;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -265,16 +265,6 @@ public class PercolateContext extends SearchContext {
|
|||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return types;
|
||||
}
|
||||
|
||||
public void types(String[] types) {
|
||||
this.types = types;
|
||||
searchLookup = new SearchLookup(mapperService(), fieldData(), types);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldDataService fieldData() {
|
||||
return fieldDataService;
|
||||
|
@ -341,11 +331,6 @@ public class PercolateContext extends SearchContext {
|
|||
return numberOfShards;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTypes() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float queryBoost() {
|
||||
throw new UnsupportedOperationException();
|
||||
|
@ -684,6 +669,11 @@ public class PercolateContext extends SearchContext {
|
|||
return queryCollectors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
return queryShardContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Profilers getProfilers() {
|
||||
throw new UnsupportedOperationException();
|
||||
|
|
|
@ -23,8 +23,6 @@ import org.apache.lucene.search.ConstantScoreQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.percolate.PercolateShardRequest;
|
||||
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
|
@ -35,7 +33,6 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.index.mapper.DocumentMapperForType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.aggregations.AggregationPhase;
|
||||
|
@ -60,7 +57,7 @@ public class PercolateDocumentParser {
|
|||
this.aggregationPhase = aggregationPhase;
|
||||
}
|
||||
|
||||
public ParsedDocument parse(PercolateShardRequest request, PercolateContext context, MapperService mapperService, QueryShardContext queryShardContext) {
|
||||
public ParsedDocument parse(final PercolateShardRequest request, final PercolateContext context, final MapperService mapperService) {
|
||||
BytesReference source = request.source();
|
||||
if (source == null || source.length() == 0) {
|
||||
if (request.docSource() != null && request.docSource().length() != 0) {
|
||||
|
@ -73,13 +70,13 @@ public class PercolateDocumentParser {
|
|||
// TODO: combine all feature parse elements into one map
|
||||
Map<String, ? extends SearchParseElement> hlElements = highlightPhase.parseElements();
|
||||
Map<String, ? extends SearchParseElement> aggregationElements = aggregationPhase.parseElements();
|
||||
|
||||
final QueryShardContext queryShardContext = context.getQueryShardContext();
|
||||
ParsedDocument doc = null;
|
||||
// Some queries (function_score query when for decay functions) rely on a SearchContext being set:
|
||||
// We switch types because this context needs to be in the context of the percolate queries in the shard and
|
||||
// not the in memory percolate doc
|
||||
String[] previousTypes = context.types();
|
||||
context.types(new String[]{PercolatorService.TYPE_NAME});
|
||||
final String[] previousTypes = queryShardContext.getTypes();
|
||||
queryShardContext.setTypes(PercolatorService.TYPE_NAME);
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source);) {
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
|
@ -176,7 +173,7 @@ public class PercolateDocumentParser {
|
|||
} catch (Throwable e) {
|
||||
throw new ElasticsearchParseException("failed to parse request", e);
|
||||
} finally {
|
||||
context.types(previousTypes);
|
||||
queryShardContext.setTypes(previousTypes);
|
||||
}
|
||||
|
||||
if (request.docSource() != null && request.docSource().length() != 0) {
|
||||
|
|
|
@ -169,8 +169,8 @@ public class PercolatorService extends AbstractComponent implements Releasable {
|
|||
}
|
||||
|
||||
public PercolateShardResponse percolate(PercolateShardRequest request) throws IOException {
|
||||
IndexService percolateIndexService = indicesService.indexServiceSafe(request.shardId().getIndex());
|
||||
IndexShard indexShard = percolateIndexService.getShard(request.shardId().id());
|
||||
final IndexService percolateIndexService = indicesService.indexServiceSafe(request.shardId().getIndex());
|
||||
final IndexShard indexShard = percolateIndexService.getShard(request.shardId().id());
|
||||
indexShard.readAllowed(); // check if we can read the shard...
|
||||
PercolatorQueriesRegistry percolateQueryRegistry = indexShard.percolateRegistry();
|
||||
percolateQueryRegistry.prePercolate();
|
||||
|
@ -183,7 +183,7 @@ public class PercolatorService extends AbstractComponent implements Releasable {
|
|||
indexShard.shardId().getIndex().getName(),
|
||||
request.indices()
|
||||
);
|
||||
Query aliasFilter = percolateIndexService.aliasFilter(indexShard.getQueryShardContext(), filteringAliases);
|
||||
Query aliasFilter = percolateIndexService.aliasFilter(percolateIndexService.newQueryShardContext(), filteringAliases);
|
||||
|
||||
SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.shardId().getIndex(), request.shardId().id());
|
||||
final PercolateContext context = new PercolateContext(
|
||||
|
@ -191,8 +191,7 @@ public class PercolatorService extends AbstractComponent implements Releasable {
|
|||
);
|
||||
SearchContext.setCurrent(context);
|
||||
try {
|
||||
ParsedDocument parsedDocument = percolateDocumentParser.parse(request, context, percolateIndexService.mapperService(), percolateIndexService.getQueryShardContext());
|
||||
|
||||
ParsedDocument parsedDocument = percolateDocumentParser.parse(request, context, percolateIndexService.mapperService());
|
||||
if (context.searcher().getIndexReader().maxDoc() == 0) {
|
||||
return new PercolateShardResponse(Lucene.EMPTY_TOP_DOCS, Collections.emptyMap(), Collections.emptyMap(), context);
|
||||
}
|
||||
|
|
|
@ -646,8 +646,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
if (source == null) {
|
||||
return;
|
||||
}
|
||||
final IndexShard indexShard = context.indexShard();
|
||||
QueryShardContext queryShardContext = indexShard.getQueryShardContext();
|
||||
QueryShardContext queryShardContext = context.getQueryShardContext();
|
||||
context.from(source.from());
|
||||
context.size(source.size());
|
||||
ObjectFloatHashMap<String> indexBoostMap = source.indexBoost();
|
||||
|
@ -751,7 +750,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
if (source.rescores() != null) {
|
||||
try {
|
||||
for (RescoreBuilder<?> rescore : source.rescores()) {
|
||||
context.addRescore(rescore.build(context.indexShard().getQueryShardContext()));
|
||||
context.addRescore(rescore.build(context.getQueryShardContext()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new SearchContextException(context, "failed to create RescoreSearchContext", e);
|
||||
|
@ -776,7 +775,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
if (source.highlighter() != null) {
|
||||
HighlightBuilder highlightBuilder = source.highlighter();
|
||||
try {
|
||||
context.highlight(highlightBuilder.build(context.indexShard().getQueryShardContext()));
|
||||
context.highlight(highlightBuilder.build(context.getQueryShardContext()));
|
||||
} catch (IOException e) {
|
||||
throw new SearchContextException(context, "failed to create SearchContextHighlighter", e);
|
||||
}
|
||||
|
|
|
@ -124,7 +124,7 @@ public class AggregationPhase implements SearchPhase {
|
|||
if (!globals.isEmpty()) {
|
||||
BucketCollector globalsCollector = BucketCollector.wrap(globals);
|
||||
Query query = Queries.newMatchAllQuery();
|
||||
Query searchFilter = context.searchFilter(context.types());
|
||||
Query searchFilter = context.searchFilter(context.getQueryShardContext().getTypes());
|
||||
|
||||
if (searchFilter != null) {
|
||||
BooleanQuery filtered = new BooleanQuery.Builder()
|
||||
|
|
|
@ -39,7 +39,7 @@ public class FilterParser implements Aggregator.Parser {
|
|||
|
||||
@Override
|
||||
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
|
||||
ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
|
||||
ParsedQuery filter = context.getQueryShardContext().parseInnerFilter(parser);
|
||||
|
||||
return new FilterAggregator.Factory(aggregationName, filter == null ? new MatchAllDocsQuery() : filter.query());
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ public class FiltersParser implements Aggregator.Parser {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
key = parser.currentName();
|
||||
} else {
|
||||
ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
|
||||
ParsedQuery filter = context.getQueryShardContext().parseInnerFilter(parser);
|
||||
filters.add(new FiltersAggregator.KeyedFilter(key, filter == null ? Queries.newMatchAllQuery() : filter.query()));
|
||||
}
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ public class FiltersParser implements Aggregator.Parser {
|
|||
keyed = false;
|
||||
int idx = 0;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
|
||||
ParsedQuery filter = context.getQueryShardContext().parseInnerFilter(parser);
|
||||
filters.add(new FiltersAggregator.KeyedFilter(String.valueOf(idx), filter == null ? Queries.newMatchAllQuery()
|
||||
: filter.query()));
|
||||
idx++;
|
||||
|
|
|
@ -66,7 +66,7 @@ public class SignificantTermsParametersParser extends AbstractTermsParametersPar
|
|||
if (significanceHeuristicParser != null) {
|
||||
significanceHeuristic = significanceHeuristicParser.parse(parser, context.parseFieldMatcher(), context);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, BACKGROUND_FILTER)) {
|
||||
filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser).query();
|
||||
filter = context.getQueryShardContext().parseInnerFilter(parser).query();
|
||||
} else {
|
||||
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
|
||||
+ currentFieldName + "].", parser.getTokenLocation());
|
||||
|
|
|
@ -59,7 +59,7 @@ public class InnerHitsParseElement implements SearchParseElement {
|
|||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext searchContext) throws Exception {
|
||||
QueryShardContext context = searchContext.indexShard().getQueryShardContext();
|
||||
QueryShardContext context = searchContext.getQueryShardContext();
|
||||
context.reset(parser);
|
||||
Map<String, InnerHitsContext.BaseInnerHits> topLevelInnerHits = parseInnerHits(parser, context, searchContext);
|
||||
if (topLevelInnerHits != null) {
|
||||
|
|
|
@ -54,7 +54,7 @@ public class HighlighterParseElement implements SearchParseElement {
|
|||
@Override
|
||||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
try {
|
||||
context.highlight(parse(parser, context.indexShard().getQueryShardContext()));
|
||||
context.highlight(parse(parser, context.getQueryShardContext()));
|
||||
} catch (IllegalArgumentException ex) {
|
||||
throw new SearchParseException(context, "Error while trying to parse Highlighter element in request", parser.getTokenLocation());
|
||||
}
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
|
@ -53,6 +52,7 @@ import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
|||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.AbstractQueryBuilder;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -62,7 +62,6 @@ import org.elasticsearch.search.dfs.DfsSearchResult;
|
|||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.highlight.SearchContextHighlight;
|
||||
|
@ -150,6 +149,7 @@ public class DefaultSearchContext extends SearchContext {
|
|||
|
||||
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
|
||||
private final Map<Class<?>, Collector> queryCollectors = new HashMap<>();
|
||||
private final QueryShardContext queryShardContext;
|
||||
|
||||
public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
|
||||
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard,
|
||||
|
@ -175,6 +175,8 @@ public class DefaultSearchContext extends SearchContext {
|
|||
this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy());
|
||||
this.timeEstimateCounter = timeEstimateCounter;
|
||||
this.timeoutInMillis = timeout.millis();
|
||||
queryShardContext = indexService.newQueryShardContext();
|
||||
queryShardContext.setTypes(request.types());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -206,7 +208,7 @@ public class DefaultSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
// initialize the filtering alias based on the provided filters
|
||||
aliasFilter = indexService.aliasFilter(indexShard.getQueryShardContext(), request.filteringAliases());
|
||||
aliasFilter = indexService.aliasFilter(queryShardContext, request.filteringAliases());
|
||||
|
||||
if (query() == null) {
|
||||
parsedQuery(ParsedQuery.parsedMatchAllQuery());
|
||||
|
@ -223,7 +225,7 @@ public class DefaultSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
private ParsedQuery buildFilteredQuery() {
|
||||
Query searchFilter = searchFilter(types());
|
||||
Query searchFilter = searchFilter(queryShardContext.getTypes());
|
||||
if (searchFilter == null) {
|
||||
return originalQuery;
|
||||
}
|
||||
|
@ -312,16 +314,6 @@ public class DefaultSearchContext extends SearchContext {
|
|||
return request.numberOfShards();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTypes() {
|
||||
return request.types() != null && request.types().length > 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return request.types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float queryBoost() {
|
||||
return queryBoost;
|
||||
|
@ -765,6 +757,11 @@ public class DefaultSearchContext extends SearchContext {
|
|||
return queryCollectors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
return queryShardContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Profilers getProfilers() {
|
||||
return profilers;
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -117,16 +118,6 @@ public abstract class FilteredSearchContext extends SearchContext {
|
|||
return in.numberOfShards();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTypes() {
|
||||
return in.hasTypes();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return in.types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float queryBoost() {
|
||||
return in.queryBoost();
|
||||
|
@ -525,4 +516,8 @@ public abstract class FilteredSearchContext extends SearchContext {
|
|||
@Override
|
||||
public Map<Class<?>, Collector> queryCollectors() { return in.queryCollectors();}
|
||||
|
||||
@Override
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
return in.getQueryShardContext();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,12 +72,10 @@ public abstract class SearchContext implements Releasable {
|
|||
|
||||
public static void setCurrent(SearchContext value) {
|
||||
current.set(value);
|
||||
QueryShardContext.setTypes(value.types());
|
||||
}
|
||||
|
||||
public static void removeCurrent() {
|
||||
current.remove();
|
||||
QueryShardContext.removeTypes();
|
||||
}
|
||||
|
||||
public static SearchContext current() {
|
||||
|
@ -134,10 +132,6 @@ public abstract class SearchContext implements Releasable {
|
|||
|
||||
public abstract int numberOfShards();
|
||||
|
||||
public abstract boolean hasTypes();
|
||||
|
||||
public abstract String[] types();
|
||||
|
||||
public abstract float queryBoost();
|
||||
|
||||
public abstract SearchContext queryBoost(float queryBoost);
|
||||
|
@ -379,4 +373,6 @@ public abstract class SearchContext implements Releasable {
|
|||
CONTEXT
|
||||
}
|
||||
|
||||
public abstract QueryShardContext getQueryShardContext();
|
||||
|
||||
}
|
||||
|
|
|
@ -51,4 +51,8 @@ public class DocLookup {
|
|||
public LeafDocLookup getLeafDocLookup(LeafReaderContext context) {
|
||||
return new LeafDocLookup(mapperService, fieldDataService, types, context);
|
||||
}
|
||||
|
||||
public String[] getTypes() {
|
||||
return types;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,9 +30,9 @@ public class PostFilterParseElement implements SearchParseElement {
|
|||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
ParsedQuery postFilter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
|
||||
ParsedQuery postFilter = context.getQueryShardContext().parseInnerFilter(parser);
|
||||
if (postFilter != null) {
|
||||
context.parsedPostFilter(postFilter);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,6 @@ public class QueryParseElement implements SearchParseElement {
|
|||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
context.parsedQuery(context.indexShard().getQueryShardContext().parse(parser));
|
||||
context.parsedQuery(context.getQueryShardContext().parse(parser));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,10 +36,10 @@ public class RescoreParseElement implements SearchParseElement {
|
|||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
context.addRescore(parseSingleRescoreContext(parser, context.indexShard().getQueryShardContext()));
|
||||
context.addRescore(parseSingleRescoreContext(parser, context.getQueryShardContext()));
|
||||
}
|
||||
} else {
|
||||
context.addRescore(parseSingleRescoreContext(parser, context.indexShard().getQueryShardContext()));
|
||||
context.addRescore(parseSingleRescoreContext(parser, context.getQueryShardContext()));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.common.lucene.Lucene;
|
|||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
|
@ -122,8 +121,7 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
|
|||
final ExecutableScript executable = scriptService.executable(collateScript, vars);
|
||||
final BytesReference querySource = (BytesReference) executable.run();
|
||||
IndexService indexService = indicesService.indexService(suggestion.getIndex());
|
||||
IndexShard shard = indexService.getShard(suggestion.getShard());
|
||||
final ParsedQuery parsedQuery = shard.getQueryShardContext().parse(querySource);
|
||||
final ParsedQuery parsedQuery = indexService.newQueryShardContext().parse(querySource);
|
||||
collateMatch = Lucene.exists(searcher, parsedQuery.query());
|
||||
}
|
||||
if (!collateMatch && !collatePrune) {
|
||||
|
|
|
@ -194,7 +194,7 @@ public class ESExceptionTests extends ESTestCase {
|
|||
|
||||
public void testToXContent() throws IOException {
|
||||
{
|
||||
ElasticsearchException ex = new SearchParseException(new TestSearchContext(), "foo", new XContentLocation(1,0));
|
||||
ElasticsearchException ex = new SearchParseException(new TestSearchContext(null), "foo", new XContentLocation(1,0));
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
ex.toXContent(builder, PARAMS);
|
||||
|
|
|
@ -439,7 +439,7 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testSearchParseException() throws IOException {
|
||||
SearchContext ctx = new TestSearchContext();
|
||||
SearchContext ctx = new TestSearchContext(null);
|
||||
SearchParseException ex = serialize(new SearchParseException(ctx, "foo", new XContentLocation(66, 666)));
|
||||
assertEquals("foo", ex.getMessage());
|
||||
assertEquals(66, ex.getLineNumber());
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class BootstrapSettingsTests extends ESTestCase {
|
||||
|
||||
public void testDefaultSettings() {
|
||||
assertTrue(BootstrapSettings.SECURITY_MANAGER_ENABLED_SETTING.get(Settings.EMPTY));
|
||||
assertTrue(BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(Settings.EMPTY));
|
||||
assertFalse(BootstrapSettings.MLOCKALL_SETTING.get(Settings.EMPTY));
|
||||
assertTrue(BootstrapSettings.SECCOMP_SETTING.get(Settings.EMPTY));
|
||||
assertTrue(BootstrapSettings.CTRLHANDLER_SETTING.get(Settings.EMPTY));
|
||||
}
|
||||
|
||||
}
|
|
@ -36,8 +36,10 @@ public class JavaVersionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testToString() {
|
||||
JavaVersion javaVersion = JavaVersion.parse("1.7.0");
|
||||
assertThat("1.7.0", is(javaVersion.toString()));
|
||||
JavaVersion javaVersion170 = JavaVersion.parse("1.7.0");
|
||||
assertThat(javaVersion170.toString(), is("1.7.0"));
|
||||
JavaVersion javaVersion9 = JavaVersion.parse("9");
|
||||
assertThat(javaVersion9.toString(), is("9"));
|
||||
}
|
||||
|
||||
public void testCompare() {
|
||||
|
|
|
@ -48,6 +48,11 @@ public class ShardRoutingHelper {
|
|||
routing.reinitializeShard();
|
||||
}
|
||||
|
||||
public static void reinit(ShardRouting routing, UnassignedInfo.Reason reason) {
|
||||
routing.reinitializeShard();
|
||||
routing.updateUnassignedInfo(new UnassignedInfo(reason, "test_reinit"));
|
||||
}
|
||||
|
||||
public static void moveToUnassigned(ShardRouting routing, UnassignedInfo info) {
|
||||
routing.moveToUnassigned(info);
|
||||
}
|
||||
|
|
|
@ -59,16 +59,14 @@ public class MessageDigestsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testToHexString() throws Exception {
|
||||
for (int i = 0; i < 1024; i++) {
|
||||
BigInteger expected = BigInteger.probablePrime(256, random());
|
||||
byte[] bytes = expected.toByteArray();
|
||||
String hex = MessageDigests.toHexString(bytes);
|
||||
String zeros = new String(new char[2 * bytes.length]).replace("\0", "0");
|
||||
String expectedAsString = expected.toString(16);
|
||||
String expectedHex = zeros.substring(expectedAsString.length()) + expectedAsString;
|
||||
assertEquals(expectedHex, hex);
|
||||
BigInteger actual = new BigInteger(hex, 16);
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
BigInteger expected = BigInteger.probablePrime(256, random());
|
||||
byte[] bytes = expected.toByteArray();
|
||||
String hex = MessageDigests.toHexString(bytes);
|
||||
String zeros = new String(new char[2 * bytes.length]).replace("\0", "0");
|
||||
String expectedAsString = expected.toString(16);
|
||||
String expectedHex = zeros.substring(expectedAsString.length()) + expectedAsString;
|
||||
assertEquals(expectedHex, hex);
|
||||
BigInteger actual = new BigInteger(hex, 16);
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
@ -87,17 +86,17 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
|
|||
assertThat(indexService.getMetaData().getAliases().containsKey("dogs"), equalTo(true));
|
||||
assertThat(indexService.getMetaData().getAliases().containsKey("turtles"), equalTo(false));
|
||||
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "cats").toString(), equalTo("animal:cat"));
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "cats", "dogs").toString(), equalTo("animal:cat animal:dog"));
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext(), "cats").toString(), equalTo("animal:cat"));
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext(), "cats", "dogs").toString(), equalTo("animal:cat animal:dog"));
|
||||
|
||||
// Non-filtering alias should turn off all filters because filters are ORed
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "all"), nullValue());
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "cats", "all"), nullValue());
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "all", "cats"), nullValue());
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext(), "all"), nullValue());
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext(), "cats", "all"), nullValue());
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext(), "all", "cats"), nullValue());
|
||||
|
||||
add(indexService, "cats", filter(termQuery("animal", "feline")));
|
||||
add(indexService, "dogs", filter(termQuery("animal", "canine")));
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
|
||||
}
|
||||
|
||||
public void testAliasFilters() throws Exception {
|
||||
|
@ -107,14 +106,14 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
|
|||
add(indexService, "cats", filter(termQuery("animal", "cat")));
|
||||
add(indexService, "dogs", filter(termQuery("animal", "dog")));
|
||||
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext()), nullValue());
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs").toString(), equalTo("animal:dog"));
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:dog animal:cat"));
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext()), nullValue());
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext(), "dogs").toString(), equalTo("animal:dog"));
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:dog animal:cat"));
|
||||
|
||||
add(indexService, "cats", filter(termQuery("animal", "feline")));
|
||||
add(indexService, "dogs", filter(termQuery("animal", "canine")));
|
||||
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
|
||||
assertThat(indexService.aliasFilter(indexService.newQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
|
||||
}
|
||||
|
||||
public void testRemovedAliasFilter() throws Exception {
|
||||
|
@ -124,7 +123,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
|
|||
add(indexService, "cats", filter(termQuery("animal", "cat")));
|
||||
remove(indexService, "cats");
|
||||
try {
|
||||
indexService.aliasFilter(shard.getQueryShardContext(), "cats");
|
||||
indexService.aliasFilter(indexService.newQueryShardContext(), "cats");
|
||||
fail("Expected InvalidAliasNameException");
|
||||
} catch (InvalidAliasNameException e) {
|
||||
assertThat(e.getMessage(), containsString("Invalid alias name [cats]"));
|
||||
|
@ -139,7 +138,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
|
|||
add(indexService, "dogs", filter(termQuery("animal", "dog")));
|
||||
|
||||
try {
|
||||
indexService.aliasFilter(shard.getQueryShardContext(), "unknown");
|
||||
indexService.aliasFilter(indexService.newQueryShardContext(), "unknown");
|
||||
fail();
|
||||
} catch (InvalidAliasNameException e) {
|
||||
// all is well
|
||||
|
|
|
@ -272,16 +272,16 @@ public class IndexSettingsTests extends ESTestCase {
|
|||
|
||||
public void testTranslogFlushSizeThreshold() {
|
||||
ByteSizeValue translogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt()));
|
||||
ByteSizeValue actualValue = ByteSizeValue.parseBytesSizeValue(translogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey());
|
||||
ByteSizeValue actualValue = ByteSizeValue.parseBytesSizeValue(translogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey());
|
||||
IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), translogFlushThresholdSize.toString())
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), translogFlushThresholdSize.toString())
|
||||
.build());
|
||||
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
|
||||
assertEquals(actualValue, settings.getFlushThresholdSize());
|
||||
ByteSizeValue newTranslogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt()));
|
||||
ByteSizeValue actualNewTranslogFlushThresholdSize = ByteSizeValue.parseBytesSizeValue(newTranslogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey());
|
||||
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), newTranslogFlushThresholdSize.toString()).build()));
|
||||
ByteSizeValue actualNewTranslogFlushThresholdSize = ByteSizeValue.parseBytesSizeValue(newTranslogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey());
|
||||
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), newTranslogFlushThresholdSize.toString()).build()));
|
||||
assertEquals(actualNewTranslogFlushThresholdSize, settings.getFlushThresholdSize());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -182,7 +182,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
Settings idxSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2)
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB))
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB))
|
||||
.put(IndexMetaData.SETTING_DATA_PATH, dataPath.toAbsolutePath().toString())
|
||||
.put(IndexMetaData.SETTING_SHADOW_REPLICAS, true)
|
||||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
|
|
|
@ -260,7 +260,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
NumericRangeQuery<Long> rangeQuery;
|
||||
try {
|
||||
SearchContext.setCurrent(new TestSearchContext());
|
||||
SearchContext.setCurrent(new TestSearchContext(null));
|
||||
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true).rewrite(null);
|
||||
} finally {
|
||||
SearchContext.removeCurrent();
|
||||
|
@ -286,7 +286,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
NumericRangeQuery<Long> rangeQuery;
|
||||
try {
|
||||
SearchContext.setCurrent(new TestSearchContext());
|
||||
SearchContext.setCurrent(new TestSearchContext(null));
|
||||
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true).rewrite(null);
|
||||
} finally {
|
||||
SearchContext.removeCurrent();
|
||||
|
|
|
@ -63,7 +63,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
Collections.singletonMap(ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser()));
|
||||
|
||||
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::getQueryShardContext);
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
|
||||
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject(ExternalMetadataMapper.CONTENT_TYPE)
|
||||
|
@ -109,7 +109,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
|
||||
|
||||
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::getQueryShardContext);
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
|
||||
|
||||
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
|
@ -168,7 +168,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
|
||||
|
||||
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::getQueryShardContext);
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
|
||||
|
||||
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
|
|
|
@ -236,9 +236,9 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
|
|||
IndicesModule indicesModule = new IndicesModule();
|
||||
indicesModule.registerMetadataMapper("_dummy", new DummyMetadataFieldMapper.TypeParser());
|
||||
final MapperRegistry mapperRegistry = indicesModule.getMapperRegistry();
|
||||
MapperService mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::getQueryShardContext);
|
||||
MapperService mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
|
||||
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService,
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::getQueryShardContext);
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
ParsedDocument parsedDocument = mapper.parse("index", "type", "id", new BytesArray("{}"));
|
||||
|
|
|
@ -50,6 +50,7 @@ import org.elasticsearch.test.InternalSettingsPlugin;
|
|||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
@ -280,6 +281,33 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testSearchAnalyzerSerialization() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("analyzer", "standard")
|
||||
.field("search_analyzer", "keyword")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
// special case: default index analyzer
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("analyzer", "default")
|
||||
.field("search_analyzer", "keyword")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
}
|
||||
|
||||
private Map<String, Object> getSerializedMap(String fieldName, DocumentMapper mapper) throws Exception {
|
||||
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper(fieldName);
|
||||
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
|
||||
|
|
|
@ -332,15 +332,14 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
}
|
||||
|
||||
protected void setSearchContext(String[] types) {
|
||||
TestSearchContext testSearchContext = new TestSearchContext();
|
||||
testSearchContext.setTypes(types);
|
||||
TestSearchContext testSearchContext = new TestSearchContext(queryShardContext);
|
||||
testSearchContext.getQueryShardContext().setTypes(types);
|
||||
SearchContext.setCurrent(testSearchContext);
|
||||
}
|
||||
|
||||
@After
|
||||
public void afterTest() {
|
||||
clientInvocationHandler.delegate = null;
|
||||
QueryShardContext.removeTypes();
|
||||
SearchContext.removeCurrent();
|
||||
}
|
||||
|
||||
|
|
|
@ -84,7 +84,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryShardContext().getMapperService();
|
||||
final IndexFieldDataService fieldData = indexFieldDataService();
|
||||
TestSearchContext testSearchContext = new TestSearchContext() {
|
||||
TestSearchContext testSearchContext = new TestSearchContext(queryShardContext()) {
|
||||
|
||||
@Override
|
||||
public MapperService mapperService() {
|
||||
|
@ -96,7 +96,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
return fieldData; // need to build / parse inner hits sort fields
|
||||
}
|
||||
};
|
||||
testSearchContext.setTypes(types);
|
||||
testSearchContext.getQueryShardContext().setTypes(types);
|
||||
SearchContext.setCurrent(testSearchContext);
|
||||
}
|
||||
|
||||
|
@ -230,11 +230,12 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
}
|
||||
public void testToQueryInnerQueryType() throws IOException {
|
||||
String[] searchTypes = new String[]{PARENT_TYPE};
|
||||
QueryShardContext.setTypes(searchTypes);
|
||||
QueryShardContext shardContext = createShardContext();
|
||||
shardContext.setTypes(searchTypes);
|
||||
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(CHILD_TYPE, new IdsQueryBuilder().addIds("id"));
|
||||
Query query = hasChildQueryBuilder.toQuery(createShardContext());
|
||||
Query query = hasChildQueryBuilder.toQuery(shardContext);
|
||||
//verify that the context types are still the same as the ones we previously set
|
||||
assertThat(QueryShardContext.getTypes(), equalTo(searchTypes));
|
||||
assertThat(shardContext.getTypes(), equalTo(searchTypes));
|
||||
assertLateParsingQuery(query, CHILD_TYPE, "id");
|
||||
}
|
||||
|
||||
|
@ -253,7 +254,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) rewrittenTermsQuery;
|
||||
assertThat(constantScoreQuery.getQuery(), instanceOf(BooleanQuery.class));
|
||||
BooleanQuery booleanTermsQuery = (BooleanQuery) constantScoreQuery.getQuery();
|
||||
assertThat(booleanTermsQuery.clauses().size(), equalTo(1));
|
||||
assertThat(booleanTermsQuery.clauses().toString(), booleanTermsQuery.clauses().size(), equalTo(1));
|
||||
assertThat(booleanTermsQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.SHOULD));
|
||||
assertThat(booleanTermsQuery.clauses().get(0).getQuery(), instanceOf(TermQuery.class));
|
||||
TermQuery termQuery = (TermQuery) booleanTermsQuery.clauses().get(0).getQuery();
|
||||
|
|
|
@ -79,7 +79,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryShardContext().getMapperService();
|
||||
final IndexFieldDataService fieldData = indexFieldDataService();
|
||||
TestSearchContext testSearchContext = new TestSearchContext() {
|
||||
TestSearchContext testSearchContext = new TestSearchContext(queryShardContext()) {
|
||||
|
||||
@Override
|
||||
public MapperService mapperService() {
|
||||
|
@ -91,7 +91,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
return fieldData; // need to build / parse inner hits sort fields
|
||||
}
|
||||
};
|
||||
testSearchContext.setTypes(types);
|
||||
testSearchContext.getQueryShardContext().setTypes(types);
|
||||
SearchContext.setCurrent(testSearchContext);
|
||||
}
|
||||
|
||||
|
@ -192,11 +192,12 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
|
||||
public void testToQueryInnerQueryType() throws IOException {
|
||||
String[] searchTypes = new String[]{CHILD_TYPE};
|
||||
QueryShardContext.setTypes(searchTypes);
|
||||
QueryShardContext shardContext = createShardContext();
|
||||
shardContext.setTypes(searchTypes);
|
||||
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_TYPE, new IdsQueryBuilder().addIds("id"));
|
||||
Query query = hasParentQueryBuilder.toQuery(createShardContext());
|
||||
Query query = hasParentQueryBuilder.toQuery(shardContext);
|
||||
//verify that the context types are still the same as the ones we previously set
|
||||
assertThat(QueryShardContext.getTypes(), equalTo(searchTypes));
|
||||
assertThat(shardContext.getTypes(), equalTo(searchTypes));
|
||||
HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_TYPE, "id");
|
||||
}
|
||||
|
||||
|
|
|
@ -60,7 +60,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryShardContext().getMapperService();
|
||||
final IndexFieldDataService fieldData = indexFieldDataService();
|
||||
TestSearchContext testSearchContext = new TestSearchContext() {
|
||||
TestSearchContext testSearchContext = new TestSearchContext(queryShardContext()) {
|
||||
|
||||
@Override
|
||||
public MapperService mapperService() {
|
||||
|
@ -72,7 +72,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||
return fieldData; // need to build / parse inner hits sort fields
|
||||
}
|
||||
};
|
||||
testSearchContext.setTypes(types);
|
||||
testSearchContext.getQueryShardContext().setTypes(types);
|
||||
SearchContext.setCurrent(testSearchContext);
|
||||
}
|
||||
|
||||
|
@ -139,45 +139,45 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||
|
||||
public void testFromJson() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"nested\" : {\n" +
|
||||
" \"query\" : {\n" +
|
||||
" \"bool\" : {\n" +
|
||||
" \"must\" : [ {\n" +
|
||||
" \"match\" : {\n" +
|
||||
" \"obj1.name\" : {\n" +
|
||||
" \"query\" : \"blue\",\n" +
|
||||
" \"type\" : \"boolean\",\n" +
|
||||
" \"operator\" : \"OR\",\n" +
|
||||
" \"slop\" : 0,\n" +
|
||||
" \"prefix_length\" : 0,\n" +
|
||||
" \"max_expansions\" : 50,\n" +
|
||||
" \"fuzzy_transpositions\" : true,\n" +
|
||||
" \"lenient\" : false,\n" +
|
||||
" \"zero_terms_query\" : \"NONE\",\n" +
|
||||
" \"boost\" : 1.0\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }, {\n" +
|
||||
" \"range\" : {\n" +
|
||||
" \"obj1.count\" : {\n" +
|
||||
" \"from\" : 5,\n" +
|
||||
" \"to\" : null,\n" +
|
||||
" \"include_lower\" : false,\n" +
|
||||
" \"include_upper\" : true,\n" +
|
||||
" \"boost\" : 1.0\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" } ],\n" +
|
||||
" \"disable_coord\" : false,\n" +
|
||||
" \"adjust_pure_negative\" : true,\n" +
|
||||
" \"boost\" : 1.0\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"path\" : \"obj1\",\n" +
|
||||
" \"score_mode\" : \"avg\",\n" +
|
||||
" \"boost\" : 1.0\n" +
|
||||
" }\n" +
|
||||
"{\n" +
|
||||
" \"nested\" : {\n" +
|
||||
" \"query\" : {\n" +
|
||||
" \"bool\" : {\n" +
|
||||
" \"must\" : [ {\n" +
|
||||
" \"match\" : {\n" +
|
||||
" \"obj1.name\" : {\n" +
|
||||
" \"query\" : \"blue\",\n" +
|
||||
" \"type\" : \"boolean\",\n" +
|
||||
" \"operator\" : \"OR\",\n" +
|
||||
" \"slop\" : 0,\n" +
|
||||
" \"prefix_length\" : 0,\n" +
|
||||
" \"max_expansions\" : 50,\n" +
|
||||
" \"fuzzy_transpositions\" : true,\n" +
|
||||
" \"lenient\" : false,\n" +
|
||||
" \"zero_terms_query\" : \"NONE\",\n" +
|
||||
" \"boost\" : 1.0\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }, {\n" +
|
||||
" \"range\" : {\n" +
|
||||
" \"obj1.count\" : {\n" +
|
||||
" \"from\" : 5,\n" +
|
||||
" \"to\" : null,\n" +
|
||||
" \"include_lower\" : false,\n" +
|
||||
" \"include_upper\" : true,\n" +
|
||||
" \"boost\" : 1.0\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" } ],\n" +
|
||||
" \"disable_coord\" : false,\n" +
|
||||
" \"adjust_pure_negative\" : true,\n" +
|
||||
" \"boost\" : 1.0\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"path\" : \"obj1\",\n" +
|
||||
" \"score_mode\" : \"avg\",\n" +
|
||||
" \"boost\" : 1.0\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
NestedQueryBuilder parsed = (NestedQueryBuilder) parseQuery(json);
|
||||
|
|
|
@ -64,7 +64,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
|
|||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryShardContext().getMapperService();
|
||||
final IndexFieldDataService fieldData = indexFieldDataService();
|
||||
TestSearchContext testSearchContext = new TestSearchContext() {
|
||||
TestSearchContext testSearchContext = new TestSearchContext(queryShardContext()) {
|
||||
|
||||
@Override
|
||||
public MapperService mapperService() {
|
||||
|
@ -76,7 +76,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
|
|||
return fieldData; // need to build / parse inner hits sort fields
|
||||
}
|
||||
};
|
||||
testSearchContext.setTypes(types);
|
||||
testSearchContext.getQueryShardContext().setTypes(types);
|
||||
SearchContext.setCurrent(testSearchContext);
|
||||
}
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ public class CustomQueryParserIT extends ESIntegTestCase {
|
|||
|
||||
private static QueryShardContext queryShardContext() {
|
||||
IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class);
|
||||
return indicesService.indexServiceSafe("index").getQueryShardContext();
|
||||
return indicesService.indexServiceSafe("index").newQueryShardContext();
|
||||
}
|
||||
|
||||
//see #11120
|
||||
|
|
|
@ -69,7 +69,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testCrossFieldMultiMatchQuery() throws IOException {
|
||||
QueryShardContext queryShardContext = indexService.getShard(0).getQueryShardContext();
|
||||
QueryShardContext queryShardContext = indexService.newQueryShardContext();
|
||||
queryShardContext.setAllowUnmappedFields(true);
|
||||
Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
|
||||
try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) {
|
||||
|
|
|
@ -70,7 +70,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.ShardLock;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
|
@ -705,7 +704,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
IndexService test = indicesService.indexService("test");
|
||||
IndexShard shard = test.getShardOrNull(0);
|
||||
assertFalse(shard.shouldFlush());
|
||||
client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get();
|
||||
client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get();
|
||||
client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get();
|
||||
assertFalse(shard.shouldFlush());
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null);
|
||||
|
@ -721,7 +720,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
shard.getEngine().getTranslog().sync();
|
||||
long size = shard.getEngine().getTranslog().sizeInBytes();
|
||||
logger.info("--> current translog size: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration());
|
||||
client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(size, ByteSizeUnit.BYTES))
|
||||
client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(size, ByteSizeUnit.BYTES))
|
||||
.build()).get();
|
||||
client().prepareDelete("test", "test", "2").get();
|
||||
logger.info("--> translog size after delete: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration());
|
||||
|
@ -739,7 +738,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
IndexService test = indicesService.indexService("test");
|
||||
final IndexShard shard = test.getShardOrNull(0);
|
||||
assertFalse(shard.shouldFlush());
|
||||
client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get();
|
||||
client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get();
|
||||
client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get();
|
||||
assertFalse(shard.shouldFlush());
|
||||
final AtomicBoolean running = new AtomicBoolean(true);
|
||||
|
@ -865,10 +864,11 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
IndexService test = indicesService.indexService("test");
|
||||
final IndexShard shard = test.getShardOrNull(0);
|
||||
|
||||
int translogOps = 1;
|
||||
client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get();
|
||||
if (randomBoolean()) {
|
||||
client().admin().indices().prepareFlush().get();
|
||||
translogOps = 0;
|
||||
}
|
||||
ShardRouting routing = new ShardRouting(shard.routingEntry());
|
||||
test.removeShard(0, "b/c simon says so");
|
||||
|
@ -878,6 +878,10 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);
|
||||
newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode));
|
||||
assertTrue(newShard.recoverFromStore(localNode));
|
||||
assertEquals(translogOps, newShard.recoveryState().getTranslog().recoveredOperations());
|
||||
assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperations());
|
||||
assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperationsOnStart());
|
||||
assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f);
|
||||
routing = new ShardRouting(routing);
|
||||
ShardRoutingHelper.moveToStarted(routing);
|
||||
newShard.updateRoutingEntry(routing, true);
|
||||
|
@ -885,6 +889,36 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
assertHitCount(response, 1);
|
||||
}
|
||||
|
||||
public void testRecoverFromCleanStore() throws IOException {
|
||||
createIndex("test");
|
||||
ensureGreen();
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
IndexService test = indicesService.indexService("test");
|
||||
final IndexShard shard = test.getShardOrNull(0);
|
||||
client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get();
|
||||
if (randomBoolean()) {
|
||||
client().admin().indices().prepareFlush().get();
|
||||
}
|
||||
ShardRouting routing = new ShardRouting(shard.routingEntry());
|
||||
test.removeShard(0, "b/c simon says so");
|
||||
ShardRoutingHelper.reinit(routing, UnassignedInfo.Reason.INDEX_CREATED);
|
||||
IndexShard newShard = test.createShard(routing);
|
||||
newShard.updateRoutingEntry(routing, false);
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);
|
||||
newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode,
|
||||
localNode));
|
||||
assertTrue(newShard.recoverFromStore(localNode));
|
||||
assertEquals(0, newShard.recoveryState().getTranslog().recoveredOperations());
|
||||
assertEquals(0, newShard.recoveryState().getTranslog().totalOperations());
|
||||
assertEquals(0, newShard.recoveryState().getTranslog().totalOperationsOnStart());
|
||||
assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f);
|
||||
routing = new ShardRouting(routing);
|
||||
ShardRoutingHelper.moveToStarted(routing);
|
||||
newShard.updateRoutingEntry(routing, true);
|
||||
SearchResponse response = client().prepareSearch().get();
|
||||
assertHitCount(response, 0);
|
||||
}
|
||||
|
||||
public void testFailIfIndexNotPresentInRecoverFromStore() throws IOException {
|
||||
createIndex("test");
|
||||
ensureGreen();
|
||||
|
@ -1187,7 +1221,8 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
List<Translog.Operation> operations = new ArrayList<>();
|
||||
operations.add(new Translog.Index("testtype", "1", jsonBuilder().startObject().field("foo", "bar").endObject().bytes().toBytes()));
|
||||
newShard.prepareForIndexRecovery();
|
||||
newShard.performTranslogRecovery(true);
|
||||
newShard.recoveryState().getTranslog().totalOperations(operations.size());
|
||||
newShard.skipTranslogRecovery();
|
||||
newShard.performBatchRecovery(operations);
|
||||
assertFalse(newShard.getTranslog().syncNeeded());
|
||||
}
|
||||
|
|
|
@ -109,7 +109,7 @@ public class ShardPathTests extends ESTestCase {
|
|||
|
||||
public void testGetRootPaths() throws IOException {
|
||||
boolean useCustomDataPath = randomBoolean();
|
||||
final Settings indexSetttings;
|
||||
final Settings indexSettings;
|
||||
final Settings nodeSettings;
|
||||
Settings.Builder indexSettingsBuilder = settingsBuilder()
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, "0xDEADBEEF")
|
||||
|
@ -118,7 +118,7 @@ public class ShardPathTests extends ESTestCase {
|
|||
if (useCustomDataPath) {
|
||||
final Path path = createTempDir();
|
||||
final boolean includeNodeId = randomBoolean();
|
||||
indexSetttings = indexSettingsBuilder.put(IndexMetaData.SETTING_DATA_PATH, "custom").build();
|
||||
indexSettings = indexSettingsBuilder.put(IndexMetaData.SETTING_DATA_PATH, "custom").build();
|
||||
nodeSettings = settingsBuilder().put(Environment.PATH_SHARED_DATA_SETTING.getKey(), path.toAbsolutePath().toAbsolutePath())
|
||||
.put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), includeNodeId).build();
|
||||
if (includeNodeId) {
|
||||
|
@ -128,7 +128,7 @@ public class ShardPathTests extends ESTestCase {
|
|||
}
|
||||
} else {
|
||||
customPath = null;
|
||||
indexSetttings = indexSettingsBuilder.build();
|
||||
indexSettings = indexSettingsBuilder.build();
|
||||
nodeSettings = Settings.EMPTY;
|
||||
}
|
||||
try (final NodeEnvironment env = newNodeEnvironment(nodeSettings)) {
|
||||
|
@ -136,7 +136,7 @@ public class ShardPathTests extends ESTestCase {
|
|||
Path[] paths = env.availableShardPaths(shardId);
|
||||
Path path = randomFrom(paths);
|
||||
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), 2, path);
|
||||
ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), indexSetttings));
|
||||
ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), indexSettings));
|
||||
boolean found = false;
|
||||
for (Path p : env.nodeDataPaths()) {
|
||||
if (p.equals(shardPath.getRootStatePath())) {
|
||||
|
|
|
@ -146,7 +146,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "1")
|
||||
.put(MergePolicyConfig.INDEX_MERGE_ENABLED, false)
|
||||
.put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files
|
||||
));
|
||||
ensureGreen();
|
||||
disableAllocation("test");
|
||||
|
@ -250,7 +250,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0")
|
||||
.put(MergePolicyConfig.INDEX_MERGE_ENABLED, false)
|
||||
.put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files
|
||||
));
|
||||
ensureGreen();
|
||||
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
|
||||
|
@ -475,7 +475,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") // no replicas for this test
|
||||
.put(MergePolicyConfig.INDEX_MERGE_ENABLED, false)
|
||||
.put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files
|
||||
));
|
||||
ensureGreen();
|
||||
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
|
||||
|
@ -529,7 +529,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, cluster().numDataNodes() - 1)
|
||||
.put(MergePolicyConfig.INDEX_MERGE_ENABLED, false)
|
||||
.put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files
|
||||
));
|
||||
ensureGreen();
|
||||
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
|
||||
|
|
|
@ -167,13 +167,13 @@ public class CorruptedTranslogIT extends ESIntegTestCase {
|
|||
|
||||
/** Disables translog flushing for the specified index */
|
||||
private static void disableTranslogFlush(String index) {
|
||||
Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)).build();
|
||||
Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)).build();
|
||||
client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get();
|
||||
}
|
||||
|
||||
/** Enables translog flushing for the specified index */
|
||||
private static void enableTranslogFlush(String index) {
|
||||
Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)).build();
|
||||
Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)).build();
|
||||
client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ public class FlushIT extends ESIntegTestCase {
|
|||
createIndex("test");
|
||||
|
||||
client().admin().indices().prepareUpdateSettings("test").setSettings(
|
||||
Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1))
|
||||
Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1))
|
||||
.get();
|
||||
ensureGreen();
|
||||
final AtomicBoolean stop = new AtomicBoolean(false);
|
||||
|
|
|
@ -104,8 +104,8 @@ public class PercolateDocumentParserTests extends ESTestCase {
|
|||
.endObject();
|
||||
Mockito.when(request.source()).thenReturn(source.bytes());
|
||||
|
||||
PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService);
|
||||
ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext);
|
||||
PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService, queryShardContext);
|
||||
ParsedDocument parsedDocument = parser.parse(request, context, mapperService);
|
||||
assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1"));
|
||||
}
|
||||
|
||||
|
@ -123,8 +123,8 @@ public class PercolateDocumentParserTests extends ESTestCase {
|
|||
.endObject();
|
||||
Mockito.when(request.source()).thenReturn(source.bytes());
|
||||
|
||||
PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService);
|
||||
ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext);
|
||||
PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService, queryShardContext);
|
||||
ParsedDocument parsedDocument = parser.parse(request, context, mapperService);
|
||||
assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1"));
|
||||
assertThat(context.percolateQuery(), equalTo(new TermQuery(new Term("field1", "value1"))));
|
||||
assertThat(context.trackScores(), is(true));
|
||||
|
@ -147,8 +147,8 @@ public class PercolateDocumentParserTests extends ESTestCase {
|
|||
Mockito.when(request.source()).thenReturn(source.bytes());
|
||||
Mockito.when(request.docSource()).thenReturn(docSource.bytes());
|
||||
|
||||
PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService);
|
||||
ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext);
|
||||
PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService, queryShardContext);
|
||||
ParsedDocument parsedDocument = parser.parse(request, context, mapperService);
|
||||
assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1"));
|
||||
assertThat(context.percolateQuery(), equalTo(new TermQuery(new Term("field1", "value1"))));
|
||||
assertThat(context.trackScores(), is(true));
|
||||
|
@ -174,9 +174,9 @@ public class PercolateDocumentParserTests extends ESTestCase {
|
|||
Mockito.when(request.source()).thenReturn(source.bytes());
|
||||
Mockito.when(request.docSource()).thenReturn(docSource.bytes());
|
||||
|
||||
PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService);
|
||||
PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService, queryShardContext);
|
||||
try {
|
||||
parser.parse(request, context, mapperService, queryShardContext);
|
||||
parser.parse(request, context, mapperService);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("Can't specify the document to percolate in the source of the request and as document id"));
|
||||
}
|
||||
|
|
|
@ -1351,12 +1351,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
|
||||
|
||||
PercolateResponse.Match[] matches = response.getMatches();
|
||||
Arrays.sort(matches, new Comparator<PercolateResponse.Match>() {
|
||||
@Override
|
||||
public int compare(PercolateResponse.Match a, PercolateResponse.Match b) {
|
||||
return a.getId().compareTo(b.getId());
|
||||
}
|
||||
});
|
||||
Arrays.sort(matches, (a, b) -> a.getId().compareTo(b.getId()));
|
||||
|
||||
assertThat(matches[0].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick <em>brown</em> <em>fox</em> jumps over the lazy dog"));
|
||||
assertThat(matches[1].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the <em>lazy</em> <em>dog</em>"));
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.test.TestSearchContext;
|
|||
|
||||
public class GeoHashGridParserTests extends ESTestCase {
|
||||
public void testParseValidFromInts() throws Exception {
|
||||
SearchContext searchContext = new TestSearchContext();
|
||||
SearchContext searchContext = new TestSearchContext(null);
|
||||
int precision = randomIntBetween(1, 12);
|
||||
XContentParser stParser = JsonXContent.jsonXContent.createParser(
|
||||
"{\"field\":\"my_loc\", \"precision\":" + precision + ", \"size\": 500, \"shard_size\": 550}");
|
||||
|
@ -37,7 +37,7 @@ public class GeoHashGridParserTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testParseValidFromStrings() throws Exception {
|
||||
SearchContext searchContext = new TestSearchContext();
|
||||
SearchContext searchContext = new TestSearchContext(null);
|
||||
int precision = randomIntBetween(1, 12);
|
||||
XContentParser stParser = JsonXContent.jsonXContent.createParser(
|
||||
"{\"field\":\"my_loc\", \"precision\":\"" + precision + "\", \"size\": \"500\", \"shard_size\": \"550\"}");
|
||||
|
@ -47,7 +47,7 @@ public class GeoHashGridParserTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testParseErrorOnNonIntPrecision() throws Exception {
|
||||
SearchContext searchContext = new TestSearchContext();
|
||||
SearchContext searchContext = new TestSearchContext(null);
|
||||
XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"my_loc\", \"precision\":\"2.0\"}");
|
||||
GeoHashGridParser parser = new GeoHashGridParser();
|
||||
try {
|
||||
|
@ -59,7 +59,7 @@ public class GeoHashGridParserTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testParseErrorOnBooleanPrecision() throws Exception {
|
||||
SearchContext searchContext = new TestSearchContext();
|
||||
SearchContext searchContext = new TestSearchContext(null);
|
||||
XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"my_loc\", \"precision\":false}");
|
||||
GeoHashGridParser parser = new GeoHashGridParser();
|
||||
try {
|
||||
|
@ -71,7 +71,7 @@ public class GeoHashGridParserTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testParseErrorOnPrecisionOutOfRange() throws Exception {
|
||||
SearchContext searchContext = new TestSearchContext();
|
||||
SearchContext searchContext = new TestSearchContext(null);
|
||||
XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"my_loc\", \"precision\":\"13\"}");
|
||||
GeoHashGridParser parser = new GeoHashGridParser();
|
||||
try {
|
||||
|
@ -81,4 +81,4 @@ public class GeoHashGridParserTests extends ESTestCase {
|
|||
assertEquals("Invalid geohash aggregation precision of 13. Must be between 1 and 12.", ex.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,6 +69,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
|||
*/
|
||||
public class SignificanceHeuristicTests extends ESTestCase {
|
||||
static class SignificantTermsTestSearchContext extends TestSearchContext {
|
||||
|
||||
public SignificantTermsTestSearchContext() {
|
||||
super(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int numberOfShards() {
|
||||
return 1;
|
||||
|
|
|
@ -40,7 +40,7 @@ public class FieldDataFieldsTests extends ESTestCase {
|
|||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
SearchContext context = new TestSearchContext();
|
||||
SearchContext context = new TestSearchContext(null);
|
||||
parseElement.parse(parser, context);
|
||||
}
|
||||
|
||||
|
@ -52,7 +52,7 @@ public class FieldDataFieldsTests extends ESTestCase {
|
|||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
SearchContext context = new TestSearchContext();
|
||||
SearchContext context = new TestSearchContext(null);
|
||||
parseElement.parse(parser, context);
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,7 @@ public class FieldDataFieldsTests extends ESTestCase {
|
|||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
SearchContext context = new TestSearchContext();
|
||||
SearchContext context = new TestSearchContext(null);
|
||||
try {
|
||||
parseElement.parse(parser, context);
|
||||
fail("Expected IllegalStateException");
|
||||
|
|
|
@ -51,7 +51,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
public class QueryPhaseTests extends ESTestCase {
|
||||
|
||||
private void countTestCase(Query query, IndexReader reader, boolean shouldCollect) throws Exception {
|
||||
TestSearchContext context = new TestSearchContext();
|
||||
TestSearchContext context = new TestSearchContext(null);
|
||||
context.parsedQuery(new ParsedQuery(query));
|
||||
context.setSize(0);
|
||||
|
||||
|
@ -120,7 +120,7 @@ public class QueryPhaseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testPostFilterDisablesCountOptimization() throws Exception {
|
||||
TestSearchContext context = new TestSearchContext();
|
||||
TestSearchContext context = new TestSearchContext(null);
|
||||
context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
|
||||
context.setSize(0);
|
||||
|
||||
|
@ -143,7 +143,7 @@ public class QueryPhaseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testMinScoreDisablesCountOptimization() throws Exception {
|
||||
TestSearchContext context = new TestSearchContext();
|
||||
TestSearchContext context = new TestSearchContext(null);
|
||||
context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
|
||||
context.setSize(0);
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -36,7 +36,7 @@ public class SortParserTests extends ESSingleNodeTestCase {
|
|||
mapping.startObject().startObject("type").startObject("properties").startObject("location").field("type", "geo_point").endObject().endObject().endObject().endObject();
|
||||
IndexService indexService = createIndex("testidx", Settings.settingsBuilder().build(), "type", mapping);
|
||||
TestSearchContext context = (TestSearchContext) createSearchContext(indexService);
|
||||
context.setTypes("type");
|
||||
context.getQueryShardContext().setTypes("type");
|
||||
|
||||
XContentBuilder sortBuilder = jsonBuilder();
|
||||
sortBuilder.startObject();
|
||||
|
|
|
@ -112,6 +112,7 @@ The `GET` HTTP verb for `/_forcemerge` is no longer supported, please use the
|
|||
==== Deprecated queries removed
|
||||
|
||||
The following deprecated queries have been removed:
|
||||
|
||||
* `filtered`: use `bool` query instead, which supports `filter` clauses too
|
||||
* `and`: use `must` clauses in a `bool` query instead
|
||||
* `or`: use should clauses in a `bool` query instead
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -60,7 +60,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
|
|||
Collections.singletonMap(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser()),
|
||||
Collections.emptyMap());
|
||||
parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::getQueryShardContext);
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
|
@ -136,7 +136,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
|
|||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
indexService = createIndex("test_bwc", settings);
|
||||
parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::getQueryShardContext);
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "murmur3")
|
||||
|
@ -152,7 +152,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
|
|||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
indexService = createIndex("test_bwc", settings);
|
||||
parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::getQueryShardContext);
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "murmur3")
|
||||
|
|
|
@ -66,7 +66,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
|
|||
Map<String, MetadataFieldMapper.TypeParser> metadataMappers = new HashMap<>();
|
||||
IndicesModule indices = new IndicesModule();
|
||||
indices.registerMetadataMapper(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser());
|
||||
mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), indices.getMapperRegistry(), indexService::getQueryShardContext);
|
||||
mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), indices.getMapperRegistry(), indexService::newQueryShardContext);
|
||||
parser = mapperService.documentMapperParser();
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
|
|||
Collections.emptyMap(),
|
||||
Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()));
|
||||
parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService,
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::getQueryShardContext);
|
||||
indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
|
||||
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
BytesReference source = XContentFactory.jsonBuilder()
|
||||
|
|
|
@ -515,10 +515,10 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
|
||||
private static Settings.Builder setRandomIndexTranslogSettings(Random random, Settings.Builder builder) {
|
||||
if (random.nextBoolean()) {
|
||||
builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 300), ByteSizeUnit.MB));
|
||||
builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 300), ByteSizeUnit.MB));
|
||||
}
|
||||
if (random.nextBoolean()) {
|
||||
builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush
|
||||
builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush
|
||||
}
|
||||
if (random.nextBoolean()) {
|
||||
builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), RandomPicks.randomFrom(random, Translog.Durability.values()));
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -27,7 +26,6 @@ import org.apache.lucene.util.Counter;
|
|||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
|
@ -38,6 +36,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -47,7 +46,6 @@ import org.elasticsearch.search.dfs.DfsSearchResult;
|
|||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.highlight.SearchContextHighlight;
|
||||
|
@ -62,11 +60,9 @@ import org.elasticsearch.search.rescore.RescoreSearchContext;
|
|||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public class TestSearchContext extends SearchContext {
|
||||
|
||||
|
@ -80,6 +76,7 @@ public class TestSearchContext extends SearchContext {
|
|||
final IndexShard indexShard;
|
||||
final Counter timeEstimateCounter = Counter.newCounter();
|
||||
final QuerySearchResult queryResult = new QuerySearchResult();
|
||||
final QueryShardContext queryShardContext;
|
||||
ScriptService scriptService;
|
||||
ParsedQuery originalQuery;
|
||||
ParsedQuery postFilter;
|
||||
|
@ -89,7 +86,6 @@ public class TestSearchContext extends SearchContext {
|
|||
ContextIndexSearcher searcher;
|
||||
int size;
|
||||
private int terminateAfter = DEFAULT_TERMINATE_AFTER;
|
||||
private String[] types;
|
||||
private SearchContextAggregations aggregations;
|
||||
|
||||
private final long originNanoTime = System.nanoTime();
|
||||
|
@ -105,9 +101,10 @@ public class TestSearchContext extends SearchContext {
|
|||
this.threadPool = threadPool;
|
||||
this.indexShard = indexService.getShardOrNull(0);
|
||||
this.scriptService = scriptService;
|
||||
queryShardContext = indexService.newQueryShardContext();
|
||||
}
|
||||
|
||||
public TestSearchContext() {
|
||||
public TestSearchContext(QueryShardContext queryShardContext) {
|
||||
super(ParseFieldMatcher.STRICT);
|
||||
this.pageCacheRecycler = null;
|
||||
this.bigArrays = null;
|
||||
|
@ -117,10 +114,7 @@ public class TestSearchContext extends SearchContext {
|
|||
this.fixedBitSetFilterCache = null;
|
||||
this.indexShard = null;
|
||||
scriptService = null;
|
||||
}
|
||||
|
||||
public void setTypes(String... types) {
|
||||
this.types = types;
|
||||
this.queryShardContext = queryShardContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -167,16 +161,6 @@ public class TestSearchContext extends SearchContext {
|
|||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTypes() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public float queryBoost() {
|
||||
return 0;
|
||||
|
@ -590,4 +574,9 @@ public class TestSearchContext extends SearchContext {
|
|||
@Override
|
||||
public Map<Class<?>, Collector> queryCollectors() {return queryCollectors;}
|
||||
|
||||
@Override
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
return queryShardContext;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue