Merge pull request #14518 from s1monw/simplify_index_level_services
Remove guice from the index level
This commit is contained in:
commit
b328e26160
|
@ -55,6 +55,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
@ -95,12 +96,14 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
private final AliasValidator aliasValidator;
|
||||
private final IndexTemplateFilter indexTemplateFilter;
|
||||
private final Environment env;
|
||||
private final NodeServicesProvider nodeServicesProvider;
|
||||
|
||||
|
||||
@Inject
|
||||
public MetaDataCreateIndexService(Settings settings, ClusterService clusterService,
|
||||
IndicesService indicesService, AllocationService allocationService,
|
||||
Version version, AliasValidator aliasValidator,
|
||||
Set<IndexTemplateFilter> indexTemplateFilters, Environment env) {
|
||||
Set<IndexTemplateFilter> indexTemplateFilters, Environment env, NodeServicesProvider nodeServicesProvider) {
|
||||
super(settings);
|
||||
this.clusterService = clusterService;
|
||||
this.indicesService = indicesService;
|
||||
|
@ -108,6 +111,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
this.version = version;
|
||||
this.aliasValidator = aliasValidator;
|
||||
this.env = env;
|
||||
this.nodeServicesProvider = nodeServicesProvider;
|
||||
|
||||
if (indexTemplateFilters.isEmpty()) {
|
||||
this.indexTemplateFilter = DEFAULT_INDEX_TEMPLATE_FILTER;
|
||||
|
@ -295,7 +299,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
// Set up everything, now locally create the index to see that things are ok, and apply
|
||||
final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build();
|
||||
// create the index here (on the master) to validate it can be created, as well as adding the mapping
|
||||
indicesService.createIndex(tmpImd, Collections.EMPTY_LIST);
|
||||
indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.EMPTY_LIST);
|
||||
indexCreated = true;
|
||||
// now add the mappings
|
||||
IndexService indexService = indicesService.indexServiceSafe(request.index());
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
|
||||
|
@ -49,12 +50,15 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
|
|||
|
||||
private final AliasValidator aliasValidator;
|
||||
|
||||
private final NodeServicesProvider nodeServicesProvider;
|
||||
|
||||
@Inject
|
||||
public MetaDataIndexAliasesService(Settings settings, ClusterService clusterService, IndicesService indicesService, AliasValidator aliasValidator) {
|
||||
public MetaDataIndexAliasesService(Settings settings, ClusterService clusterService, IndicesService indicesService, AliasValidator aliasValidator, NodeServicesProvider nodeServicesProvider) {
|
||||
super(settings);
|
||||
this.clusterService = clusterService;
|
||||
this.indicesService = indicesService;
|
||||
this.aliasValidator = aliasValidator;
|
||||
this.nodeServicesProvider = nodeServicesProvider;
|
||||
}
|
||||
|
||||
public void indicesAliases(final IndicesAliasesClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
||||
|
@ -95,7 +99,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
|
|||
if (indexService == null) {
|
||||
// temporarily create the index and add mappings so we can parse the filter
|
||||
try {
|
||||
indexService = indicesService.createIndex(indexMetaData, Collections.EMPTY_LIST);
|
||||
indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST);
|
||||
if (indexMetaData.getMappings().containsKey(MapperService.DEFAULT_MAPPING)) {
|
||||
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.getMappings().get(MapperService.DEFAULT_MAPPING).source(), false, false);
|
||||
}
|
||||
|
|
|
@ -146,6 +146,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
|
|||
"index.translog.flush_threshold_period",
|
||||
"index.translog.interval",
|
||||
"index.translog.sync_interval",
|
||||
"index.shard.inactive_time",
|
||||
UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING));
|
||||
|
||||
/**
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
|
@ -57,12 +58,14 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
private final List<MappingTask> refreshOrUpdateQueue = new ArrayList<>();
|
||||
private long refreshOrUpdateInsertOrder;
|
||||
private long refreshOrUpdateProcessedInsertOrder;
|
||||
private final NodeServicesProvider nodeServicesProvider;
|
||||
|
||||
@Inject
|
||||
public MetaDataMappingService(Settings settings, ClusterService clusterService, IndicesService indicesService) {
|
||||
public MetaDataMappingService(Settings settings, ClusterService clusterService, IndicesService indicesService, NodeServicesProvider nodeServicesProvider) {
|
||||
super(settings);
|
||||
this.clusterService = clusterService;
|
||||
this.indicesService = indicesService;
|
||||
this.nodeServicesProvider = nodeServicesProvider;
|
||||
}
|
||||
|
||||
static class MappingTask {
|
||||
|
@ -172,7 +175,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
IndexService indexService = indicesService.indexService(index);
|
||||
if (indexService == null) {
|
||||
// we need to create the index here, and add the current mapping to it, so we can merge
|
||||
indexService = indicesService.createIndex(indexMetaData, Collections.EMPTY_LIST);
|
||||
indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST);
|
||||
removeIndex = true;
|
||||
Set<String> typesToIntroduce = new HashSet<>();
|
||||
for (MappingTask task : tasks) {
|
||||
|
@ -350,7 +353,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
continue;
|
||||
}
|
||||
final IndexMetaData indexMetaData = currentState.metaData().index(index);
|
||||
IndexService indexService = indicesService.createIndex(indexMetaData, Collections.EMPTY_LIST);
|
||||
IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST);
|
||||
indicesToClose.add(indexMetaData.getIndex());
|
||||
// make sure to add custom default mapping if exists
|
||||
if (indexMetaData.getMappings().containsKey(MapperService.DEFAULT_MAPPING)) {
|
||||
|
|
|
@ -231,7 +231,7 @@ public class TimeValue implements Streamable {
|
|||
|
||||
public static TimeValue parseTimeValue(String sValue, TimeValue defaultValue, String settingName) {
|
||||
settingName = Objects.requireNonNull(settingName);
|
||||
assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_TIME_SETTINGS.contains(settingName);
|
||||
assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_TIME_SETTINGS.contains(settingName) : settingName;
|
||||
if (sValue == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
|
|
@ -125,6 +125,18 @@ final class CompositeIndexEventListener implements IndexEventListener {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onShardActive(IndexShard indexShard) {
|
||||
for (IndexEventListener listener : listeners) {
|
||||
try {
|
||||
listener.onShardActive(indexShard);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("[{}] failed to invoke on shard active callback", t, indexShard.shardId().getId());
|
||||
throw t;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, IndexShardState currentState, @Nullable String reason) {
|
||||
for (IndexEventListener listener : listeners) {
|
||||
|
|
|
@ -20,20 +20,14 @@
|
|||
package org.elasticsearch.index;
|
||||
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.QueryCache;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.engine.InternalEngineFactory;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.similarity.BM25SimilarityProvider;
|
||||
|
@ -41,7 +35,7 @@ import org.elasticsearch.index.similarity.SimilarityProvider;
|
|||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.store.IndexStore;
|
||||
import org.elasticsearch.index.store.IndexStoreConfig;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -61,7 +55,7 @@ import java.util.function.Consumer;
|
|||
* <li>Settings update listener - Custom settings update listener can be registered via {@link #addIndexSettingsListener(Consumer)}</li>
|
||||
* </ul>
|
||||
*/
|
||||
public final class IndexModule extends AbstractModule {
|
||||
public final class IndexModule {
|
||||
|
||||
public static final String STORE_TYPE = "index.store.type";
|
||||
public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity";
|
||||
|
@ -72,10 +66,9 @@ public final class IndexModule extends AbstractModule {
|
|||
public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
|
||||
private final IndexSettings indexSettings;
|
||||
private final IndexStoreConfig indexStoreConfig;
|
||||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final AnalysisRegistry analysisRegistry;
|
||||
// pkg private so tests can mock
|
||||
Class<? extends EngineFactory> engineFactoryImpl = InternalEngineFactory.class;
|
||||
final SetOnce<EngineFactory> engineFactory = new SetOnce<>();
|
||||
private SetOnce<IndexSearcherWrapperFactory> indexSearcherWrapper = new SetOnce<>();
|
||||
private final Set<Consumer<Settings>> settingsConsumers = new HashSet<>();
|
||||
private final Set<IndexEventListener> indexEventListeners = new HashSet<>();
|
||||
|
@ -83,14 +76,11 @@ public final class IndexModule extends AbstractModule {
|
|||
private final Map<String, BiFunction<String, Settings, SimilarityProvider>> similarities = new HashMap<>();
|
||||
private final Map<String, BiFunction<IndexSettings, IndexStoreConfig, IndexStore>> storeTypes = new HashMap<>();
|
||||
private final Map<String, BiFunction<IndexSettings, IndicesQueryCache, QueryCache>> queryCaches = new HashMap<>();
|
||||
private IndicesWarmer indicesWarmer;
|
||||
|
||||
|
||||
public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig, IndicesQueryCache indicesQueryCache, IndicesWarmer warmer, AnalysisRegistry analysisRegistry) {
|
||||
public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig, AnalysisRegistry analysisRegistry) {
|
||||
this.indexStoreConfig = indexStoreConfig;
|
||||
this.indexSettings = indexSettings;
|
||||
this.indicesQueryCache = indicesQueryCache;
|
||||
this.indicesWarmer = warmer;
|
||||
this.analysisRegistry = analysisRegistry;
|
||||
registerQueryCache(INDEX_QUERY_CACHE, IndexQueryCache::new);
|
||||
registerQueryCache(NONE_QUERY_CACHE, (a, b) -> new NoneQueryCache(a));
|
||||
|
@ -220,50 +210,6 @@ public final class IndexModule extends AbstractModule {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
final IndexSettings settings = indexSettings.newWithListener(settingsConsumers);
|
||||
try {
|
||||
bind(AnalysisService.class).toInstance(analysisRegistry.build(settings));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("can't create analysis service", e);
|
||||
}
|
||||
bind(EngineFactory.class).to(engineFactoryImpl).asEagerSingleton();
|
||||
bind(IndexSearcherWrapperFactory.class).toInstance(indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get());
|
||||
bind(IndexEventListener.class).toInstance(freeze());
|
||||
bind(IndexService.class).asEagerSingleton();
|
||||
bind(IndexServicesProvider.class).asEagerSingleton();
|
||||
bind(MapperService.class).asEagerSingleton();
|
||||
bind(IndexFieldDataService.class).asEagerSingleton();
|
||||
bind(IndexSettings.class).toInstance(settings);
|
||||
|
||||
final String storeType = settings.getSettings().get(STORE_TYPE);
|
||||
final IndexStore store;
|
||||
if (storeType == null || isBuiltinType(storeType)) {
|
||||
store = new IndexStore(settings, indexStoreConfig);
|
||||
} else {
|
||||
BiFunction<IndexSettings, IndexStoreConfig, IndexStore> factory = storeTypes.get(storeType);
|
||||
if (factory == null) {
|
||||
throw new IllegalArgumentException("Unknown store type [" + storeType + "]");
|
||||
}
|
||||
store = factory.apply(settings, indexStoreConfig);
|
||||
if (store == null) {
|
||||
throw new IllegalStateException("store must not be null");
|
||||
}
|
||||
}
|
||||
|
||||
final String queryCacheType = settings.getSettings().get(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE);
|
||||
BiFunction<IndexSettings, IndicesQueryCache, QueryCache> queryCacheProvider = queryCaches.get(queryCacheType);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, indicesWarmer);
|
||||
QueryCache queryCache = queryCacheProvider.apply(settings, indicesQueryCache);
|
||||
IndexCache indexCache = new IndexCache(settings, queryCache, bitsetFilterCache);
|
||||
bind(QueryCache.class).toInstance(queryCache);
|
||||
bind(IndexCache.class).toInstance(indexCache);
|
||||
bind(BitsetFilterCache.class).toInstance(bitsetFilterCache);
|
||||
bind(IndexStore.class).toInstance(store);
|
||||
bind(SimilarityService.class).toInstance(new SimilarityService(settings, similarities));
|
||||
}
|
||||
|
||||
public enum Type {
|
||||
NIOFS,
|
||||
MMAPFS,
|
||||
|
@ -291,4 +237,29 @@ public final class IndexModule extends AbstractModule {
|
|||
*/
|
||||
IndexSearcherWrapper newWrapper(final IndexService indexService);
|
||||
}
|
||||
|
||||
public IndexService newIndexService(NodeEnvironment environment, IndexService.ShardStoreDeleter shardStoreDeleter, NodeServicesProvider servicesProvider) throws IOException {
|
||||
final IndexSettings settings = indexSettings.newWithListener(settingsConsumers);
|
||||
IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get();
|
||||
IndexEventListener eventListener = freeze();
|
||||
final String storeType = settings.getSettings().get(STORE_TYPE);
|
||||
final IndexStore store;
|
||||
if (storeType == null || isBuiltinType(storeType)) {
|
||||
store = new IndexStore(settings, indexStoreConfig);
|
||||
} else {
|
||||
BiFunction<IndexSettings, IndexStoreConfig, IndexStore> factory = storeTypes.get(storeType);
|
||||
if (factory == null) {
|
||||
throw new IllegalArgumentException("Unknown store type [" + storeType + "]");
|
||||
}
|
||||
store = factory.apply(settings, indexStoreConfig);
|
||||
if (store == null) {
|
||||
throw new IllegalStateException("store must not be null");
|
||||
}
|
||||
}
|
||||
final String queryCacheType = settings.getSettings().get(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE);
|
||||
final BiFunction<IndexSettings, IndicesQueryCache, QueryCache> queryCacheProvider = queryCaches.get(queryCacheType);
|
||||
final QueryCache queryCache = queryCacheProvider.apply(settings, servicesProvider.getIndicesQueryCache());
|
||||
return new IndexService(settings, environment, new SimilarityService(settings, similarities), shardStoreDeleter, analysisRegistry, engineFactory.get(),
|
||||
servicesProvider, queryCache, store, eventListener, searcherWrapperFactory);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,9 +35,12 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.ShardLock;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.QueryCache;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
|
@ -65,17 +68,21 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable<IndexShard> {
|
||||
public final class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable<IndexShard>{
|
||||
|
||||
private final IndexEventListener eventListener;
|
||||
private final AnalysisService analysisService;
|
||||
private final IndexFieldDataService indexFieldData;
|
||||
private final BitsetFilterCache bitsetFilterCache;
|
||||
private final NodeEnvironment nodeEnv;
|
||||
private final IndicesService indicesServices;
|
||||
private final IndexServicesProvider indexServicesProvider;
|
||||
private final ShardStoreDeleter shardStoreDeleter;
|
||||
private final NodeServicesProvider nodeServicesProvider;
|
||||
private final IndexStore indexStore;
|
||||
private final IndexSearcherWrapper searcherWrapper;
|
||||
private final IndexCache indexCache;
|
||||
private final MapperService mapperService;
|
||||
private final SimilarityService similarityService;
|
||||
private final EngineFactory engineFactory;
|
||||
private volatile Map<Integer, IndexShard> shards = emptyMap();
|
||||
private final AtomicBoolean closed = new AtomicBoolean(false);
|
||||
private final AtomicBoolean deleted = new AtomicBoolean(false);
|
||||
|
@ -83,26 +90,31 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
|
||||
@Inject
|
||||
public IndexService(IndexSettings indexSettings, NodeEnvironment nodeEnv,
|
||||
AnalysisService analysisService,
|
||||
IndexFieldDataService indexFieldData,
|
||||
BitsetFilterCache bitSetFilterCache,
|
||||
IndicesService indicesServices,
|
||||
IndexServicesProvider indexServicesProvider,
|
||||
SimilarityService similarityService,
|
||||
ShardStoreDeleter shardStoreDeleter,
|
||||
AnalysisRegistry registry,
|
||||
@Nullable EngineFactory engineFactory,
|
||||
NodeServicesProvider nodeServicesProvider,
|
||||
QueryCache queryCache,
|
||||
IndexStore indexStore,
|
||||
IndexEventListener eventListener,
|
||||
IndexModule.IndexSearcherWrapperFactory wrapperFactory) {
|
||||
IndexModule.IndexSearcherWrapperFactory wrapperFactory) throws IOException {
|
||||
super(indexSettings);
|
||||
this.indexSettings = indexSettings;
|
||||
this.analysisService = analysisService;
|
||||
this.indexFieldData = indexFieldData;
|
||||
this.bitsetFilterCache = bitSetFilterCache;
|
||||
this.indicesServices = indicesServices;
|
||||
this.analysisService = registry.build(indexSettings);
|
||||
this.similarityService = similarityService;
|
||||
this.mapperService = new MapperService(indexSettings, analysisService, similarityService);
|
||||
this.indexFieldData = new IndexFieldDataService(indexSettings, nodeServicesProvider.getIndicesFieldDataCache(), nodeServicesProvider.getCircuitBreakerService(), mapperService);
|
||||
this.shardStoreDeleter = shardStoreDeleter;
|
||||
this.eventListener = eventListener;
|
||||
this.nodeEnv = nodeEnv;
|
||||
this.indexServicesProvider = indexServicesProvider;
|
||||
this.nodeServicesProvider = nodeServicesProvider;
|
||||
this.indexStore = indexStore;
|
||||
indexFieldData.setListener(new FieldDataCacheListener(this));
|
||||
bitSetFilterCache.setListener(new BitsetCacheListener(this));
|
||||
this.bitsetFilterCache = new BitsetFilterCache(indexSettings, nodeServicesProvider.getWarmer(), new BitsetCacheListener(this));
|
||||
this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache);
|
||||
this.engineFactory = engineFactory;
|
||||
// initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE
|
||||
this.searcherWrapper = wrapperFactory.newWrapper(this);
|
||||
}
|
||||
|
||||
|
@ -145,39 +157,37 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
public Set<Integer> shardIds() { return shards.keySet(); }
|
||||
|
||||
public IndexCache cache() {
|
||||
return indexServicesProvider.getIndexCache();
|
||||
return indexCache;
|
||||
}
|
||||
|
||||
public IndexFieldDataService fieldData() {
|
||||
return indexFieldData;
|
||||
}
|
||||
|
||||
public BitsetFilterCache bitsetFilterCache() {
|
||||
return bitsetFilterCache;
|
||||
}
|
||||
public IndexFieldDataService fieldData() { return indexFieldData; }
|
||||
|
||||
public AnalysisService analysisService() {
|
||||
return this.analysisService;
|
||||
}
|
||||
|
||||
public MapperService mapperService() {
|
||||
return indexServicesProvider.getMapperService();
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public SimilarityService similarityService() {
|
||||
return indexServicesProvider.getSimilarityService();
|
||||
return similarityService;
|
||||
}
|
||||
|
||||
public synchronized void close(final String reason, boolean delete) {
|
||||
public synchronized void close(final String reason, boolean delete) throws IOException {
|
||||
if (closed.compareAndSet(false, true)) {
|
||||
deleted.compareAndSet(false, delete);
|
||||
final Set<Integer> shardIds = shardIds();
|
||||
for (final int shardId : shardIds) {
|
||||
try {
|
||||
removeShard(shardId, reason);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("failed to close shard", t);
|
||||
try {
|
||||
final Set<Integer> shardIds = shardIds();
|
||||
for (final int shardId : shardIds) {
|
||||
try {
|
||||
removeShard(shardId, reason);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("failed to close shard", t);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
IOUtils.close(bitsetFilterCache, indexCache, mapperService, indexFieldData, analysisService);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -262,11 +272,11 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
// if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary.
|
||||
final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false ||
|
||||
(primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
|
||||
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> indexServicesProvider.getIndicesQueryCache().onClose(shardId)));
|
||||
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> nodeServicesProvider.getIndicesQueryCache().onClose(shardId)));
|
||||
if (useShadowEngine(primary, indexSettings)) {
|
||||
indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, searcherWrapper, indexServicesProvider);
|
||||
indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldData, engineFactory, eventListener, searcherWrapper, nodeServicesProvider);
|
||||
} else {
|
||||
indexShard = new IndexShard(shardId, this.indexSettings, path, store, searcherWrapper, indexServicesProvider);
|
||||
indexShard = new IndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldData, engineFactory, eventListener, searcherWrapper, nodeServicesProvider);
|
||||
}
|
||||
|
||||
eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created");
|
||||
|
@ -339,19 +349,19 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
try {
|
||||
eventListener.beforeIndexShardDeleted(lock.getShardId(), indexSettings);
|
||||
} finally {
|
||||
indicesServices.deleteShardStore("delete index", lock, indexSettings);
|
||||
shardStoreDeleter.deleteShardStore("delete index", lock, indexSettings);
|
||||
eventListener.afterIndexShardDeleted(lock.getShardId(), indexSettings);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
indicesServices.addPendingDelete(lock.getShardId(), indexSettings);
|
||||
shardStoreDeleter.addPendingDelete(lock.getShardId(), indexSettings);
|
||||
logger.debug("[{}] failed to delete shard content - scheduled a retry", e, lock.getShardId().id());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public IndexServicesProvider getIndexServices() {
|
||||
return indexServicesProvider;
|
||||
public NodeServicesProvider getIndexServices() {
|
||||
return nodeServicesProvider;
|
||||
}
|
||||
|
||||
public IndexSettings getIndexSettings() {
|
||||
|
@ -359,7 +369,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
}
|
||||
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
return new QueryShardContext(indexSettings, indexServicesProvider.getClient(), bitsetFilterCache(), indexServicesProvider.getIndexFieldDataService(), mapperService(), similarityService(), indexServicesProvider.getScriptService(), indexServicesProvider.getIndicesQueriesRegistry());
|
||||
return new QueryShardContext(indexSettings, nodeServicesProvider.getClient(), indexCache.bitsetFilterCache(), indexFieldData, mapperService(), similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry());
|
||||
}
|
||||
|
||||
private class StoreCloseListener implements Store.OnClose {
|
||||
|
@ -521,4 +531,23 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public interface ShardStoreDeleter {
|
||||
void deleteShardStore(String reason, ShardLock lock, Settings indexSettings) throws IOException;
|
||||
void addPendingDelete(ShardId shardId, Settings indexSettings);
|
||||
}
|
||||
|
||||
final EngineFactory getEngineFactory() {
|
||||
return engineFactory;
|
||||
} // pkg private for testing
|
||||
|
||||
final IndexSearcherWrapper getSearcherWrapper() {
|
||||
return searcherWrapper;
|
||||
} // pkg private for testing
|
||||
|
||||
final IndexStore getIndexStore() {
|
||||
return indexStore;
|
||||
} // pkg private for testing
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -23,16 +23,11 @@ import org.elasticsearch.client.Client;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.codec.CodecService;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -43,88 +38,49 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
* a shard.
|
||||
* This is just a temporary solution until we cleaned up index creation and removed injectors on that level as well.
|
||||
*/
|
||||
public final class IndexServicesProvider {
|
||||
public final class NodeServicesProvider {
|
||||
|
||||
private final ThreadPool threadPool;
|
||||
private final MapperService mapperService;
|
||||
private final IndexCache indexCache;
|
||||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final CodecService codecService;
|
||||
private final TermVectorsService termVectorsService;
|
||||
private final IndexFieldDataService indexFieldDataService;
|
||||
private final IndicesWarmer warmer;
|
||||
private final SimilarityService similarityService;
|
||||
private final EngineFactory factory;
|
||||
private final BigArrays bigArrays;
|
||||
private final IndexingMemoryController indexingMemoryController;
|
||||
private final IndexEventListener listener;
|
||||
private final Client client;
|
||||
private final IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
private final ScriptService scriptService;
|
||||
private final IndicesFieldDataCache indicesFieldDataCache;
|
||||
private final CircuitBreakerService circuitBreakerService;
|
||||
|
||||
@Inject
|
||||
public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, IndexingMemoryController indexingMemoryController, Client client, ScriptService scriptService, IndicesQueriesRegistry indicesQueriesRegistry) {
|
||||
this.listener = listener;
|
||||
public NodeServicesProvider(ThreadPool threadPool, IndicesQueryCache indicesQueryCache, TermVectorsService termVectorsService, @Nullable IndicesWarmer warmer, BigArrays bigArrays, Client client, ScriptService scriptService, IndicesQueriesRegistry indicesQueriesRegistry, IndicesFieldDataCache indicesFieldDataCache, CircuitBreakerService circuitBreakerService) {
|
||||
this.threadPool = threadPool;
|
||||
this.mapperService = mapperService;
|
||||
this.indexCache = indexCache;
|
||||
this.indicesQueryCache = indicesQueryCache;
|
||||
this.codecService = codecService;
|
||||
this.termVectorsService = termVectorsService;
|
||||
this.indexFieldDataService = indexFieldDataService;
|
||||
this.warmer = warmer;
|
||||
this.similarityService = similarityService;
|
||||
this.factory = factory;
|
||||
this.bigArrays = bigArrays;
|
||||
this.indexingMemoryController = indexingMemoryController;
|
||||
this.client = client;
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
this.scriptService = scriptService;
|
||||
this.indicesFieldDataCache = indicesFieldDataCache;
|
||||
this.circuitBreakerService = circuitBreakerService;
|
||||
}
|
||||
|
||||
public IndexEventListener getIndexEventListener() {
|
||||
return listener;
|
||||
}
|
||||
public ThreadPool getThreadPool() {
|
||||
return threadPool;
|
||||
}
|
||||
|
||||
public MapperService getMapperService() {
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public IndexCache getIndexCache() {
|
||||
return indexCache;
|
||||
}
|
||||
|
||||
public IndicesQueryCache getIndicesQueryCache() {
|
||||
return indicesQueryCache;
|
||||
}
|
||||
|
||||
public CodecService getCodecService() {
|
||||
return codecService;
|
||||
}
|
||||
|
||||
public TermVectorsService getTermVectorsService() {
|
||||
return termVectorsService;
|
||||
}
|
||||
|
||||
public IndexFieldDataService getIndexFieldDataService() {
|
||||
return indexFieldDataService;
|
||||
}
|
||||
|
||||
public IndicesWarmer getWarmer() {
|
||||
return warmer;
|
||||
}
|
||||
|
||||
public SimilarityService getSimilarityService() {
|
||||
return similarityService;
|
||||
}
|
||||
|
||||
public EngineFactory getFactory() {
|
||||
return factory;
|
||||
}
|
||||
|
||||
public BigArrays getBigArrays() { return bigArrays; }
|
||||
|
||||
public Client getClient() {
|
||||
|
@ -139,7 +95,11 @@ public final class IndexServicesProvider {
|
|||
return scriptService;
|
||||
}
|
||||
|
||||
public IndexingMemoryController getIndexingMemoryController() {
|
||||
return indexingMemoryController;
|
||||
public IndicesFieldDataCache getIndicesFieldDataCache() {
|
||||
return indicesFieldDataCache;
|
||||
}
|
||||
|
||||
public CircuitBreakerService getCircuitBreakerService() {
|
||||
return circuitBreakerService;
|
||||
}
|
||||
}
|
|
@ -69,44 +69,26 @@ import java.util.concurrent.Executor;
|
|||
public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Query, BitsetFilterCache.Value>>, Closeable {
|
||||
|
||||
public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly";
|
||||
private static final Listener DEFAULT_NOOP_LISTENER = new Listener() {
|
||||
@Override
|
||||
public void onCache(ShardId shardId, Accountable accountable) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(ShardId shardId, Accountable accountable) {
|
||||
}
|
||||
};
|
||||
|
||||
private final boolean loadRandomAccessFiltersEagerly;
|
||||
private final Cache<Object, Cache<Query, Value>> loadedFilters;
|
||||
private volatile Listener listener = DEFAULT_NOOP_LISTENER;
|
||||
private final Listener listener;
|
||||
private final BitSetProducerWarmer warmer;
|
||||
private final IndicesWarmer indicesWarmer;
|
||||
|
||||
public BitsetFilterCache(IndexSettings indexSettings, IndicesWarmer indicesWarmer) {
|
||||
public BitsetFilterCache(IndexSettings indexSettings, IndicesWarmer indicesWarmer, Listener listener) {
|
||||
super(indexSettings);
|
||||
if (listener == null) {
|
||||
throw new IllegalArgumentException("listener must not be null");
|
||||
}
|
||||
this.loadRandomAccessFiltersEagerly = this.indexSettings.getSettings().getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true);
|
||||
this.loadedFilters = CacheBuilder.<Object, Cache<Query, Value>>builder().removalListener(this).build();
|
||||
this.warmer = new BitSetProducerWarmer();
|
||||
this.indicesWarmer = indicesWarmer;
|
||||
indicesWarmer.addListener(warmer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a listener that is invoked for all subsequent cache and removal events.
|
||||
* @throws IllegalStateException if the listener is set more than once
|
||||
*/
|
||||
public void setListener(Listener listener) {
|
||||
if (listener == null) {
|
||||
throw new IllegalArgumentException("listener must not be null");
|
||||
}
|
||||
if (this.listener != DEFAULT_NOOP_LISTENER) {
|
||||
throw new IllegalStateException("can't set listener more than once");
|
||||
}
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public BitSetProducer getBitSetProducer(Query query) {
|
||||
|
|
|
@ -22,15 +22,10 @@ package org.elasticsearch.index.codec;
|
|||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
|
||||
import org.apache.lucene.codecs.lucene54.Lucene54Codec;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
|
@ -38,11 +33,9 @@ import java.util.Map;
|
|||
* codec layer that allows to use use-case specific file formats &
|
||||
* data-structures per field. Elasticsearch exposes the full
|
||||
* {@link Codec} capabilities through this {@link CodecService}.
|
||||
*
|
||||
*/
|
||||
public class CodecService extends AbstractIndexComponent {
|
||||
public class CodecService {
|
||||
|
||||
private final MapperService mapperService;
|
||||
private final Map<String, Codec> codecs;
|
||||
|
||||
public final static String DEFAULT_CODEC = "default";
|
||||
|
@ -50,11 +43,8 @@ public class CodecService extends AbstractIndexComponent {
|
|||
/** the raw unfiltered lucene default. useful for testing */
|
||||
public final static String LUCENE_DEFAULT_CODEC = "lucene_default";
|
||||
|
||||
@Inject
|
||||
public CodecService(IndexSettings indexSettings, MapperService mapperService) {
|
||||
super(indexSettings);
|
||||
this.mapperService = mapperService;
|
||||
MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
|
||||
public CodecService(@Nullable MapperService mapperService, ESLogger logger) {
|
||||
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
|
||||
if (mapperService == null) {
|
||||
codecs.put(DEFAULT_CODEC, new Lucene54Codec());
|
||||
codecs.put(BEST_COMPRESSION_CODEC, new Lucene54Codec(Mode.BEST_COMPRESSION));
|
||||
|
@ -71,10 +61,6 @@ public class CodecService extends AbstractIndexComponent {
|
|||
this.codecs = codecs.immutableMap();
|
||||
}
|
||||
|
||||
public MapperService mapperService() {
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public Codec codec(String name) {
|
||||
Codec codec = codecs.get(name);
|
||||
if (codec == null) {
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.util.Accountable;
|
|||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData;
|
||||
|
@ -44,6 +43,8 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
|
@ -54,7 +55,7 @@ import static java.util.Collections.unmodifiableMap;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class IndexFieldDataService extends AbstractIndexComponent {
|
||||
public class IndexFieldDataService extends AbstractIndexComponent implements Closeable {
|
||||
|
||||
public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache";
|
||||
public static final String FIELDDATA_CACHE_VALUE_NODE = "node";
|
||||
|
@ -157,7 +158,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
private volatile IndexFieldDataCache.Listener listener = DEFAULT_NOOP_LISTENER;
|
||||
|
||||
|
||||
@Inject
|
||||
public IndexFieldDataService(IndexSettings indexSettings, IndicesFieldDataCache indicesFieldDataCache,
|
||||
CircuitBreakerService circuitBreakerService, MapperService mapperService) {
|
||||
super(indexSettings);
|
||||
|
@ -260,4 +260,8 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
this.listener = listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
clear();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.util.concurrent.ReleasableLock;
|
||||
|
@ -105,7 +104,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
|
||||
private volatile Set<String> parentTypes = emptySet();
|
||||
|
||||
@Inject
|
||||
public MapperService(IndexSettings indexSettings, AnalysisService analysisService,
|
||||
SimilarityService similarityService) {
|
||||
super(indexSettings);
|
||||
|
|
|
@ -89,6 +89,13 @@ public interface IndexEventListener {
|
|||
*/
|
||||
default void onShardInactive(IndexShard indexShard) {}
|
||||
|
||||
/**
|
||||
* Called when a shard is marked as active ie. was previously inactive and is now active again.
|
||||
*
|
||||
* @param indexShard The shard that was marked active
|
||||
*/
|
||||
default void onShardActive(IndexShard indexShard) {}
|
||||
|
||||
/**
|
||||
* Called before the index gets created. Note that this is also called
|
||||
* when the index is created on data nodes
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.elasticsearch.cluster.routing.ShardRouting;
|
|||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
|
@ -61,8 +60,7 @@ import org.elasticsearch.common.util.concurrent.FutureUtils;
|
|||
import org.elasticsearch.gateway.MetaDataStateFormat;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.IndexServicesProvider;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache;
|
||||
|
@ -154,7 +152,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final IndexEventListener indexEventListener;
|
||||
private final IndexSettings idxSettings;
|
||||
private final IndexServicesProvider provider;
|
||||
private final NodeServicesProvider provider;
|
||||
|
||||
private TimeValue refreshInterval;
|
||||
|
||||
|
@ -186,7 +184,9 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS = "index.translog.flush_threshold_ops";
|
||||
public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE = "index.translog.flush_threshold_size";
|
||||
public static final String INDEX_TRANSLOG_DISABLE_FLUSH = "index.translog.disable_flush";
|
||||
|
||||
/** If we see no indexing operations after this much time for a given shard, we consider that shard inactive (default: 5 minutes). */
|
||||
public static final String INDEX_SHARD_INACTIVE_TIME_SETTING = "index.shard.inactive_time";
|
||||
private static final String INDICES_INACTIVE_TIME_SETTING = "indices.memory.shard_inactive_time";
|
||||
|
||||
private final ShardPath path;
|
||||
|
||||
|
@ -195,29 +195,31 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
private final EnumSet<IndexShardState> readAllowedStates = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED, IndexShardState.POST_RECOVERY);
|
||||
|
||||
private final IndexSearcherWrapper searcherWrapper;
|
||||
private final TimeValue inactiveTime;
|
||||
|
||||
/** True if this shard is still indexing (recently) and false if we've been idle for long enough (as periodically checked by {@link
|
||||
* IndexingMemoryController}). */
|
||||
private final AtomicBoolean active = new AtomicBoolean();
|
||||
|
||||
private final IndexingMemoryController indexingMemoryController;
|
||||
|
||||
@Inject
|
||||
public IndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexSearcherWrapper indexSearcherWrapper, IndexServicesProvider provider) {
|
||||
public IndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexCache indexCache,
|
||||
MapperService mapperService, SimilarityService similarityService, IndexFieldDataService indexFieldDataService,
|
||||
@Nullable EngineFactory engineFactory,
|
||||
IndexEventListener indexEventListener, IndexSearcherWrapper indexSearcherWrapper, NodeServicesProvider provider) {
|
||||
super(shardId, indexSettings);
|
||||
this.inactiveTime = this.indexSettings.getAsTime(INDEX_SHARD_INACTIVE_TIME_SETTING, this.indexSettings.getAsTime(INDICES_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5)));
|
||||
this.idxSettings = indexSettings;
|
||||
this.codecService = provider.getCodecService();
|
||||
this.codecService = new CodecService(mapperService, logger);
|
||||
this.warmer = provider.getWarmer();
|
||||
this.deletionPolicy = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
||||
this.similarityService = provider.getSimilarityService();
|
||||
this.similarityService = similarityService;
|
||||
Objects.requireNonNull(store, "Store must be provided to the index shard");
|
||||
this.engineFactory = provider.getFactory();
|
||||
this.engineFactory = engineFactory == null ? new InternalEngineFactory() : engineFactory;
|
||||
this.store = store;
|
||||
this.indexEventListener = provider.getIndexEventListener();
|
||||
this.indexEventListener = indexEventListener;
|
||||
this.mergeSchedulerConfig = new MergeSchedulerConfig(this.indexSettings);
|
||||
this.threadPool = provider.getThreadPool();
|
||||
this.mapperService = provider.getMapperService();
|
||||
this.indexCache = provider.getIndexCache();
|
||||
this.mapperService = mapperService;
|
||||
this.indexCache = indexCache;
|
||||
this.indexingService = new ShardIndexingService(shardId, indexSettings);
|
||||
this.getService = new ShardGetService(indexSettings, this, mapperService);
|
||||
this.termVectorsService = provider.getTermVectorsService();
|
||||
|
@ -226,7 +228,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
this.indicesQueryCache = provider.getIndicesQueryCache();
|
||||
this.shardQueryCache = new ShardRequestCache(shardId, indexSettings);
|
||||
this.shardFieldData = new ShardFieldData();
|
||||
this.indexFieldDataService = provider.getIndexFieldDataService();
|
||||
this.indexFieldDataService = indexFieldDataService;
|
||||
this.shardBitsetFilterCache = new ShardBitsetFilterCache(shardId, indexSettings);
|
||||
state = IndexShardState.CREATED;
|
||||
this.refreshInterval = this.indexSettings.getAsTime(INDEX_REFRESH_INTERVAL, EngineConfig.DEFAULT_REFRESH_INTERVAL);
|
||||
|
@ -234,7 +236,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
this.path = path;
|
||||
this.mergePolicyConfig = new MergePolicyConfig(logger, this.indexSettings);
|
||||
/* create engine config */
|
||||
|
||||
logger.debug("state: [CREATED]");
|
||||
|
||||
this.checkIndexOnStartup = this.indexSettings.get("index.shard.check_on_startup", "false");
|
||||
|
@ -249,7 +250,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
cachingPolicy = new UsageTrackingQueryCachingPolicy();
|
||||
}
|
||||
|
||||
this.indexingMemoryController = provider.getIndexingMemoryController();
|
||||
this.engineConfig = newEngineConfig(translogConfig, cachingPolicy);
|
||||
this.flushThresholdOperations = this.indexSettings.getAsInt(INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, this.indexSettings.getAsInt("index.translog.flush_threshold", Integer.MAX_VALUE));
|
||||
this.flushThresholdSize = this.indexSettings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB));
|
||||
|
@ -919,10 +919,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
/** Records timestamp of the last write operation, possibly switching {@code active} to true if we were inactive. */
|
||||
private void markLastWrite() {
|
||||
if (active.getAndSet(true) == false) {
|
||||
// We are currently inactive, but a new write operation just showed up, so we now notify IMC
|
||||
// to wake up and fix our indexing buffer. We could do this async instead, but cost should
|
||||
// be low, and it's rare this happens.
|
||||
indexingMemoryController.forceCheck();
|
||||
indexEventListener.onShardActive(this);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1036,9 +1033,9 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
/** Called by {@link IndexingMemoryController} to check whether more than {@code inactiveTimeNS} has passed since the last
|
||||
* indexing operation, and become inactive (reducing indexing and translog buffers to tiny values) if so. This returns true
|
||||
* if the shard is inactive. */
|
||||
public boolean checkIdle(long inactiveTimeNS) {
|
||||
public boolean checkIdle() {
|
||||
Engine engineOrNull = getEngineOrNull();
|
||||
if (engineOrNull != null && System.nanoTime() - engineOrNull.getLastWriteNanos() >= inactiveTimeNS) {
|
||||
if (engineOrNull != null && System.nanoTime() - engineOrNull.getLastWriteNanos() >= inactiveTime.nanos()) {
|
||||
boolean wasActive = active.getAndSet(false);
|
||||
if (wasActive) {
|
||||
updateBufferSize(IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
|
||||
|
@ -1051,7 +1048,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
}
|
||||
|
||||
/** Returns {@code true} if this shard is active (has seen indexing ops in the last {@link
|
||||
* IndexingMemoryController#SHARD_INACTIVE_TIME_SETTING} (default 5 minutes), else {@code false}. */
|
||||
* IndexShard#INDEX_SHARD_INACTIVE_TIME_SETTING} (default 5 minutes), else {@code false}. */
|
||||
public boolean getActive() {
|
||||
return active.get();
|
||||
}
|
||||
|
@ -1240,6 +1237,10 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
return indexEventListener;
|
||||
}
|
||||
|
||||
public TimeValue getInactiveTime() {
|
||||
return inactiveTime;
|
||||
}
|
||||
|
||||
class EngineRefresher implements Runnable {
|
||||
@Override
|
||||
public void run() {
|
||||
|
@ -1469,7 +1470,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
final Engine.Warmer engineWarmer = (searcher, toLevel) -> warmer.warm(searcher, this, idxSettings, toLevel);
|
||||
return new EngineConfig(shardId,
|
||||
threadPool, indexingService, indexSettings, engineWarmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig,
|
||||
mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig, indexingMemoryController.getInactiveTime());
|
||||
mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig, inactiveTime);
|
||||
}
|
||||
|
||||
private static class IndexShardOperationCounter extends AbstractRefCounted {
|
||||
|
@ -1621,4 +1622,8 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
return queryShardContextCache.get();
|
||||
}
|
||||
|
||||
EngineFactory getEngineFactory() {
|
||||
return engineFactory;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,11 +21,17 @@ package org.elasticsearch.index.shard;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.IndexServicesProvider;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.EngineConfig;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.index.translog.TranslogStats;
|
||||
|
||||
|
@ -37,8 +43,9 @@ import org.elasticsearch.index.translog.TranslogStats;
|
|||
*/
|
||||
public final class ShadowIndexShard extends IndexShard {
|
||||
|
||||
public ShadowIndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexSearcherWrapper wrapper, IndexServicesProvider provider) throws IOException {
|
||||
super(shardId, indexSettings, path, store, wrapper, provider);
|
||||
public ShadowIndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexCache indexCache, MapperService mapperService, SimilarityService similarityService, IndexFieldDataService indexFieldDataService, @Nullable EngineFactory engineFactory,
|
||||
IndexEventListener indexEventListener, IndexSearcherWrapper wrapper, NodeServicesProvider provider) throws IOException {
|
||||
super(shardId, indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldDataService, engineFactory, indexEventListener, wrapper, provider);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
import java.util.function.BiFunction;
|
||||
|
||||
public class SimilarityService extends AbstractIndexComponent {
|
||||
public final class SimilarityService extends AbstractIndexComponent {
|
||||
|
||||
public final static String DEFAULT_SIMILARITY = "default";
|
||||
private final Similarity defaultSimilarity;
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
|
|||
import org.elasticsearch.common.geo.ShapesAvailability;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.util.ExtensionPoint;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.query.*;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
|
@ -134,6 +135,7 @@ public class IndicesModule extends AbstractModule {
|
|||
bind(MetaDataIndexUpgradeService.class).asEagerSingleton();
|
||||
bind(IndicesFieldDataCacheListener.class).asEagerSingleton();
|
||||
bind(TermVectorsService.class).asEagerSingleton();
|
||||
bind(NodeServicesProvider.class).asEagerSingleton();
|
||||
}
|
||||
|
||||
protected void bindQueryParsersExtension() {
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
|
|||
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -37,7 +36,6 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.*;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
|
@ -46,15 +44,10 @@ import org.elasticsearch.env.ShardLock;
|
|||
import org.elasticsearch.gateway.MetaDataStateFormat;
|
||||
import org.elasticsearch.index.*;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.flush.FlushStats;
|
||||
import org.elasticsearch.index.get.GetStats;
|
||||
import org.elasticsearch.index.indexing.IndexingStats;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.recovery.RecoveryStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
|
@ -65,11 +58,9 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.index.store.IndexStoreConfig;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.plugins.PluginsService;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.*;
|
||||
|
@ -78,7 +69,6 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
|
@ -89,46 +79,18 @@ import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class IndicesService extends AbstractLifecycleComponent<IndicesService> implements Iterable<IndexService> {
|
||||
public class IndicesService extends AbstractLifecycleComponent<IndicesService> implements Iterable<IndexService>, IndexService.ShardStoreDeleter {
|
||||
|
||||
public static final String INDICES_SHARDS_CLOSED_TIMEOUT = "indices.shards_closed_timeout";
|
||||
private final Injector injector;
|
||||
private final PluginsService pluginsService;
|
||||
private final NodeEnvironment nodeEnv;
|
||||
private final TimeValue shardsClosedTimeout;
|
||||
private final IndicesWarmer indicesWarmer;
|
||||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final AnalysisRegistry analysisRegistry;
|
||||
private final IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
private final ClusterService clusterService;
|
||||
private final IndexNameExpressionResolver indexNameExpressionResolver;
|
||||
|
||||
private volatile Map<String, IndexServiceInjectorPair> indices = emptyMap();
|
||||
|
||||
public AnalysisRegistry getAnalysis() {
|
||||
return analysisRegistry;
|
||||
}
|
||||
|
||||
static class IndexServiceInjectorPair {
|
||||
private final IndexService indexService;
|
||||
private final Injector injector;
|
||||
|
||||
public IndexServiceInjectorPair(IndexService indexService, Injector injector) {
|
||||
this.indexService = indexService;
|
||||
this.injector = injector;
|
||||
}
|
||||
|
||||
public IndexService getIndexService() {
|
||||
return indexService;
|
||||
}
|
||||
|
||||
public Injector getInjector() {
|
||||
return injector;
|
||||
}
|
||||
}
|
||||
|
||||
private volatile Map<String, IndexService> indices = emptyMap();
|
||||
private final Map<Index, List<PendingDelete>> pendingDeletes = new HashMap<>();
|
||||
|
||||
private final OldShardsStats oldShardsStats = new OldShardsStats();
|
||||
private final IndexStoreConfig indexStoreConfig;
|
||||
|
||||
|
@ -137,13 +99,10 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
}
|
||||
|
||||
@Inject
|
||||
public IndicesService(Settings settings, Injector injector, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService, IndicesQueryCache indicesQueryCache, IndicesWarmer indicesWarmer, AnalysisRegistry analysisRegistry, IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService) {
|
||||
public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService, AnalysisRegistry analysisRegistry, IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService) {
|
||||
super(settings);
|
||||
this.injector = injector;
|
||||
this.pluginsService = pluginsService;
|
||||
this.nodeEnv = nodeEnv;
|
||||
this.indicesWarmer = indicesWarmer;
|
||||
this.indicesQueryCache = indicesQueryCache;
|
||||
this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS));
|
||||
this.indexStoreConfig = new IndexStoreConfig(settings);
|
||||
this.analysisRegistry = analysisRegistry;
|
||||
|
@ -184,8 +143,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
|
||||
@Override
|
||||
protected void doClose() {
|
||||
IOUtils.closeWhileHandlingException(injector.getInstance(RecoverySettings.class),
|
||||
analysisRegistry);
|
||||
IOUtils.closeWhileHandlingException(analysisRegistry);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -230,8 +188,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
}
|
||||
|
||||
Map<Index, List<IndexShardStats>> statsByShard = new HashMap<>();
|
||||
for (IndexServiceInjectorPair value : indices.values()) {
|
||||
IndexService indexService = value.getIndexService();
|
||||
for (IndexService indexService : indices.values()) {
|
||||
for (IndexShard indexShard : indexService) {
|
||||
try {
|
||||
if (indexShard.routingEntry() == null) {
|
||||
|
@ -262,7 +219,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
|
||||
@Override
|
||||
public Iterator<IndexService> iterator() {
|
||||
return indices.values().stream().map((p) -> p.getIndexService()).iterator();
|
||||
return indices.values().iterator();
|
||||
}
|
||||
|
||||
public boolean hasIndex(String index) {
|
||||
|
@ -275,12 +232,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
*/
|
||||
@Nullable
|
||||
public IndexService indexService(String index) {
|
||||
IndexServiceInjectorPair indexServiceInjectorPair = indices.get(index);
|
||||
if (indexServiceInjectorPair == null) {
|
||||
return null;
|
||||
} else {
|
||||
return indexServiceInjectorPair.getIndexService();
|
||||
}
|
||||
return indices.get(index);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -302,7 +254,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
* @param builtInListeners a list of built-in lifecycle {@link IndexEventListener} that should should be used along side with the per-index listeners
|
||||
* @throws IndexAlreadyExistsException if the index already exists.
|
||||
*/
|
||||
public synchronized IndexService createIndex(IndexMetaData indexMetaData, List<IndexEventListener> builtInListeners) {
|
||||
public synchronized IndexService createIndex(final NodeServicesProvider nodeServicesProvider, IndexMetaData indexMetaData, List<IndexEventListener> builtInListeners) throws IOException {
|
||||
if (!lifecycle.started()) {
|
||||
throw new IllegalStateException("Can't create an index [" + indexMetaData.getIndex() + "], node is closed");
|
||||
}
|
||||
|
@ -319,36 +271,28 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
idxSettings.getNumberOfReplicas(),
|
||||
idxSettings.isShadowReplicaIndex() ? "s" : "");
|
||||
|
||||
|
||||
ModulesBuilder modules = new ModulesBuilder();
|
||||
// plugin modules must be added here, before others or we can get crazy injection errors...
|
||||
for (Module pluginModule : pluginsService.indexModules(idxSettings.getSettings())) {
|
||||
modules.add(pluginModule);
|
||||
}
|
||||
final IndexModule indexModule = new IndexModule(idxSettings, indexStoreConfig, indicesQueryCache, indicesWarmer, analysisRegistry);
|
||||
final IndexModule indexModule = new IndexModule(idxSettings, indexStoreConfig, analysisRegistry);
|
||||
pluginsService.onIndexModule(indexModule);
|
||||
for (IndexEventListener listener : builtInListeners) {
|
||||
indexModule.addIndexEventListener(listener);
|
||||
}
|
||||
indexModule.addIndexEventListener(oldShardsStats);
|
||||
modules.add(indexModule);
|
||||
pluginsService.processModules(modules);
|
||||
final IndexEventListener listener = indexModule.freeze();
|
||||
listener.beforeIndexCreated(index, idxSettings.getSettings());
|
||||
|
||||
Injector indexInjector;
|
||||
final IndexService indexService = indexModule.newIndexService(nodeEnv, this, nodeServicesProvider);
|
||||
boolean success = false;
|
||||
try {
|
||||
indexInjector = modules.createChildInjector(injector);
|
||||
} catch (CreationException e) {
|
||||
throw new IndexCreationException(index, Injectors.getFirstErrorFailure(e));
|
||||
} catch (Throwable e) {
|
||||
throw new IndexCreationException(index, e);
|
||||
assert indexService.getIndexEventListener() == listener;
|
||||
listener.afterIndexCreated(indexService);
|
||||
indices = newMapBuilder(indices).put(index.name(), indexService).immutableMap();
|
||||
success = true;
|
||||
return indexService;
|
||||
} finally {
|
||||
if (success == false) {
|
||||
indexService.close("plugins_failed", true);
|
||||
}
|
||||
}
|
||||
|
||||
IndexService indexService = indexInjector.getInstance(IndexService.class);
|
||||
assert indexService.getIndexEventListener() == listener;
|
||||
listener.afterIndexCreated(indexService);
|
||||
indices = newMapBuilder(indices).put(index.name(), new IndexServiceInjectorPair(indexService, indexInjector)).immutableMap();
|
||||
return indexService;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -364,7 +308,6 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
private void removeIndex(String index, String reason, boolean delete) {
|
||||
try {
|
||||
final IndexService indexService;
|
||||
final Injector indexInjector;
|
||||
final IndexEventListener listener;
|
||||
synchronized (this) {
|
||||
if (indices.containsKey(index) == false) {
|
||||
|
@ -372,10 +315,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
}
|
||||
|
||||
logger.debug("[{}] closing ... (reason [{}])", index, reason);
|
||||
Map<String, IndexServiceInjectorPair> newIndices = new HashMap<>(indices);
|
||||
IndexServiceInjectorPair remove = newIndices.remove(index);
|
||||
indexService = remove.getIndexService();
|
||||
indexInjector = remove.getInjector();
|
||||
Map<String, IndexService> newIndices = new HashMap<>(indices);
|
||||
indexService = newIndices.remove(index);
|
||||
indices = unmodifiableMap(newIndices);
|
||||
listener = indexService.getIndexEventListener();
|
||||
}
|
||||
|
@ -384,21 +325,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
if (delete) {
|
||||
listener.beforeIndexDeleted(indexService);
|
||||
}
|
||||
Stream<Closeable> closeables = pluginsService.indexServices().stream().map(p -> indexInjector.getInstance(p));
|
||||
IOUtils.close(closeables::iterator);
|
||||
|
||||
logger.debug("[{}] closing index service (reason [{}])", index, reason);
|
||||
indexService.close(reason, delete);
|
||||
|
||||
logger.debug("[{}] closing index cache (reason [{}])", index, reason);
|
||||
indexInjector.getInstance(IndexCache.class).close();
|
||||
logger.debug("[{}] clearing index field data (reason [{}])", index, reason);
|
||||
indexInjector.getInstance(IndexFieldDataService.class).clear();
|
||||
logger.debug("[{}] closing analysis service (reason [{}])", index, reason);
|
||||
indexInjector.getInstance(AnalysisService.class).close();
|
||||
|
||||
logger.debug("[{}] closing mapper service (reason [{}])", index, reason);
|
||||
indexInjector.getInstance(MapperService.class).close();
|
||||
logger.debug("[{}] closed... (reason [{}])", index, reason);
|
||||
listener.afterIndexClosed(indexService.index(), indexService.getIndexSettings().getSettings());
|
||||
if (delete) {
|
||||
|
@ -473,7 +401,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
synchronized (this) {
|
||||
String indexName = metaData.getIndex();
|
||||
if (indices.containsKey(indexName)) {
|
||||
String localUUid = indices.get(indexName).getIndexService().indexUUID();
|
||||
String localUUid = indices.get(indexName).indexUUID();
|
||||
throw new IllegalStateException("Can't delete index store for [" + indexName + "] - it's still part of the indices service [" + localUUid + "] [" + metaData.getIndexUUID() + "]");
|
||||
}
|
||||
if (clusterState.metaData().hasIndex(indexName) && (clusterState.nodes().localNode().masterNode() == true)) {
|
||||
|
@ -574,11 +502,11 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
* @return true if the index can be deleted on this node
|
||||
*/
|
||||
public boolean canDeleteIndexContents(Index index, Settings indexSettings, boolean closed) {
|
||||
final IndexServiceInjectorPair indexServiceInjectorPair = this.indices.get(index.name());
|
||||
final IndexService indexService = this.indices.get(index.name());
|
||||
// Closed indices may be deleted, even if they are on a shared
|
||||
// filesystem. Since it is closed we aren't deleting it for relocation
|
||||
if (IndexMetaData.isOnSharedFilesystem(indexSettings) == false || closed) {
|
||||
if (indexServiceInjectorPair == null && nodeEnv.hasNodeFile()) {
|
||||
if (indexService == null && nodeEnv.hasNodeFile()) {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
|
@ -609,10 +537,9 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
}
|
||||
|
||||
private boolean canDeleteShardContent(ShardId shardId, Settings indexSettings) {
|
||||
final IndexServiceInjectorPair indexServiceInjectorPair = this.indices.get(shardId.getIndex());
|
||||
final IndexService indexService = this.indices.get(shardId.getIndex());
|
||||
if (IndexMetaData.isOnSharedFilesystem(indexSettings) == false) {
|
||||
if (indexServiceInjectorPair != null && nodeEnv.hasNodeFile()) {
|
||||
final IndexService indexService = indexServiceInjectorPair.getIndexService();
|
||||
if (indexService != null && nodeEnv.hasNodeFile()) {
|
||||
return indexService.hasShard(shardId.id()) == false;
|
||||
} else if (nodeEnv.hasNodeFile()) {
|
||||
if (NodeEnvironment.hasCustomDataPath(indexSettings)) {
|
||||
|
@ -805,4 +732,9 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
public IndicesQueriesRegistry getIndicesQueryRegistry() {
|
||||
return indicesQueriesRegistry;
|
||||
}
|
||||
|
||||
public AnalysisRegistry getAnalysis() {
|
||||
return analysisRegistry;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -63,7 +63,6 @@ public final class IndicesWarmer extends AbstractComponent {
|
|||
if (shard.state() == IndexShardState.CLOSED) {
|
||||
return;
|
||||
}
|
||||
final IndexMetaData indexMetaData = settings.getIndexMetaData();
|
||||
final Settings indexSettings = settings.getSettings();
|
||||
if (!indexSettings.getAsBoolean(INDEX_WARMER_ENABLED, settings.getNodeSettings().getAsBoolean(INDEX_WARMER_ENABLED, true))) {
|
||||
return;
|
||||
|
|
|
@ -46,12 +46,14 @@ import org.elasticsearch.common.util.Callback;
|
|||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.IndexShardAlreadyExistsException;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.shard.*;
|
||||
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.flush.SyncedFlushService;
|
||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||
import org.elasticsearch.indices.recovery.RecoveryFailedException;
|
||||
import org.elasticsearch.indices.recovery.RecoverySource;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
|
@ -76,6 +78,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||
private final ShardStateAction shardStateAction;
|
||||
private final NodeIndexDeletedAction nodeIndexDeletedAction;
|
||||
private final NodeMappingRefreshAction nodeMappingRefreshAction;
|
||||
private final NodeServicesProvider nodeServicesProvider;
|
||||
|
||||
private static final ShardStateAction.Listener SHARD_STATE_ACTION_LISTENER = new NoOpShardStateActionListener();
|
||||
|
||||
|
@ -110,9 +113,12 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||
ThreadPool threadPool, RecoveryTarget recoveryTarget,
|
||||
ShardStateAction shardStateAction,
|
||||
NodeIndexDeletedAction nodeIndexDeletedAction,
|
||||
NodeMappingRefreshAction nodeMappingRefreshAction, RepositoriesService repositoriesService, RestoreService restoreService, SearchService searchService, SyncedFlushService syncedFlushService, RecoverySource recoverySource) {
|
||||
NodeMappingRefreshAction nodeMappingRefreshAction,
|
||||
RepositoriesService repositoriesService, RestoreService restoreService,
|
||||
SearchService searchService, SyncedFlushService syncedFlushService,
|
||||
RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider, IndexingMemoryController indexingMemoryController) {
|
||||
super(settings);
|
||||
this.buildInIndexListener = Arrays.asList(recoverySource, recoveryTarget, searchService, syncedFlushService);
|
||||
this.buildInIndexListener = Arrays.asList(recoverySource, recoveryTarget, searchService, syncedFlushService, indexingMemoryController);
|
||||
this.indicesService = indicesService;
|
||||
this.clusterService = clusterService;
|
||||
this.threadPool = threadPool;
|
||||
|
@ -123,6 +129,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||
this.restoreService = restoreService;
|
||||
this.repositoriesService = repositoriesService;
|
||||
this.sendRefreshMapping = this.settings.getAsBoolean("indices.cluster.send_refresh_mapping", true);
|
||||
this.nodeServicesProvider = nodeServicesProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -302,7 +309,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||
logger.debug("[{}] creating index", indexMetaData.getIndex());
|
||||
}
|
||||
try {
|
||||
indicesService.createIndex(indexMetaData, buildInIndexListener);
|
||||
indicesService.createIndex(nodeServicesProvider, indexMetaData, buildInIndexListener);
|
||||
} catch (Throwable e) {
|
||||
sendFailShard(shard, indexMetaData.getIndexUUID(), "failed to create index", e);
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.common.util.concurrent.FutureUtils;
|
|||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.EngineClosedException;
|
||||
import org.elasticsearch.index.engine.FlushNotAllowedEngineException;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.IndexShardState;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -40,7 +41,7 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
import java.util.*;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
|
||||
public class IndexingMemoryController extends AbstractLifecycleComponent<IndexingMemoryController> {
|
||||
public class IndexingMemoryController extends AbstractLifecycleComponent<IndexingMemoryController> implements IndexEventListener {
|
||||
|
||||
/** How much heap (% or bytes) we will share across all actively indexing shards on this node (default: 10%). */
|
||||
public static final String INDEX_BUFFER_SIZE_SETTING = "indices.memory.index_buffer_size";
|
||||
|
@ -72,9 +73,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
/** Sets a ceiling on the per-shard translog buffer size (default: 64 KB). */
|
||||
public static final String MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.max_shard_translog_buffer_size";
|
||||
|
||||
/** If we see no indexing operations after this much time for a given shard, we consider that shard inactive (default: 5 minutes). */
|
||||
public static final String SHARD_INACTIVE_TIME_SETTING = "indices.memory.shard_inactive_time";
|
||||
|
||||
/** How frequently we check shards to find inactive ones (default: 30 seconds). */
|
||||
public static final String SHARD_INACTIVE_INTERVAL_TIME_SETTING = "indices.memory.interval";
|
||||
|
||||
|
@ -95,7 +93,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
private final ByteSizeValue minShardTranslogBufferSize;
|
||||
private final ByteSizeValue maxShardTranslogBufferSize;
|
||||
|
||||
private final TimeValue inactiveTime;
|
||||
private final TimeValue interval;
|
||||
|
||||
private volatile ScheduledFuture scheduler;
|
||||
|
@ -159,17 +156,15 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
this.minShardTranslogBufferSize = this.settings.getAsBytesSize(MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(2, ByteSizeUnit.KB));
|
||||
this.maxShardTranslogBufferSize = this.settings.getAsBytesSize(MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||
|
||||
this.inactiveTime = this.settings.getAsTime(SHARD_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5));
|
||||
// we need to have this relatively small to move a shard from inactive to active fast (enough)
|
||||
this.interval = this.settings.getAsTime(SHARD_INACTIVE_INTERVAL_TIME_SETTING, TimeValue.timeValueSeconds(30));
|
||||
|
||||
this.statusChecker = new ShardsIndicesStatusChecker();
|
||||
|
||||
logger.debug("using indexing buffer size [{}], with {} [{}], {} [{}], {} [{}], {} [{}]",
|
||||
logger.debug("using indexing buffer size [{}], with {} [{}], {} [{}], {} [{}]",
|
||||
this.indexingBuffer,
|
||||
MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, this.minShardIndexBufferSize,
|
||||
MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, this.maxShardIndexBufferSize,
|
||||
SHARD_INACTIVE_TIME_SETTING, this.inactiveTime,
|
||||
SHARD_INACTIVE_INTERVAL_TIME_SETTING, this.interval);
|
||||
}
|
||||
|
||||
|
@ -303,7 +298,6 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
|
||||
// Was the shard active last time we checked?
|
||||
Boolean wasActive = shardWasActive.get(shardId);
|
||||
|
||||
if (wasActive == null) {
|
||||
// First time we are seeing this shard
|
||||
shardWasActive.put(shardId, isActive);
|
||||
|
@ -315,12 +309,10 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
changes.add(ShardStatusChangeType.BECAME_ACTIVE);
|
||||
logger.debug("marking shard {} as active indexing wise", shardId);
|
||||
shardWasActive.put(shardId, true);
|
||||
} else if (checkIdle(shardId, inactiveTime.nanos()) == Boolean.TRUE) {
|
||||
} else if (checkIdle(shardId) == Boolean.TRUE) {
|
||||
// Make shard inactive now
|
||||
changes.add(ShardStatusChangeType.BECAME_INACTIVE);
|
||||
logger.debug("marking shard {} as inactive (inactive_time[{}]) indexing wise",
|
||||
shardId,
|
||||
inactiveTime);
|
||||
|
||||
shardWasActive.put(shardId, false);
|
||||
}
|
||||
}
|
||||
|
@ -397,12 +389,18 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
|
||||
/** ask this shard to check now whether it is inactive, and reduces its indexing and translog buffers if so. returns Boolean.TRUE if
|
||||
* it did deactive, Boolean.FALSE if it did not, and null if the shard is unknown */
|
||||
protected Boolean checkIdle(ShardId shardId, long inactiveTimeNS) {
|
||||
String ignoreReason = null;
|
||||
protected Boolean checkIdle(ShardId shardId) {
|
||||
final String ignoreReason;
|
||||
final IndexShard shard = getShard(shardId);
|
||||
if (shard != null) {
|
||||
try {
|
||||
return shard.checkIdle(inactiveTimeNS);
|
||||
if (shard.checkIdle()) {
|
||||
logger.debug("marking shard {} as inactive (inactive_time[{}]) indexing wise",
|
||||
shardId,
|
||||
shard.getInactiveTime());
|
||||
return Boolean.TRUE;
|
||||
}
|
||||
return Boolean.FALSE;
|
||||
} catch (EngineClosedException e) {
|
||||
// ignore
|
||||
ignoreReason = "EngineClosedException";
|
||||
|
@ -423,7 +421,11 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
ADDED, DELETED, BECAME_ACTIVE, BECAME_INACTIVE
|
||||
}
|
||||
|
||||
public TimeValue getInactiveTime() {
|
||||
return inactiveTime;
|
||||
@Override
|
||||
public void onShardActive(IndexShard indexShard) {
|
||||
// At least one shard used to be inactive ie. a new write operation just showed up.
|
||||
// We try to fix the shards indexing buffer immediately. We could do this async instead, but cost should
|
||||
// be low, and it's rare this happens.
|
||||
forceCheck();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,6 +63,7 @@ import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
|||
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||
import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||
import org.elasticsearch.indices.store.IndicesStore;
|
||||
import org.elasticsearch.indices.ttl.IndicesTTLService;
|
||||
import org.elasticsearch.monitor.MonitorModule;
|
||||
|
@ -308,6 +309,7 @@ public class Node implements Releasable {
|
|||
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
|
||||
injector.getInstance(plugin).stop();
|
||||
}
|
||||
injector.getInstance(RecoverySettings.class).close();
|
||||
// we should stop this last since it waits for resources to get released
|
||||
// if we had scroll searchers etc or recovery going on we wait for to finish.
|
||||
injector.getInstance(IndicesService.class).stop();
|
||||
|
|
|
@ -444,7 +444,7 @@ public class PercolateContext extends SearchContext {
|
|||
|
||||
@Override
|
||||
public BitsetFilterCache bitsetFilterCache() {
|
||||
return indexService.bitsetFilterCache();
|
||||
return indexService.cache().bitsetFilterCache();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,10 +22,13 @@ package org.elasticsearch.plugins;
|
|||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* An extension point allowing to plug in custom functionality.
|
||||
|
@ -59,20 +62,6 @@ public abstract class Plugin {
|
|||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Per index modules.
|
||||
*/
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Per index services that will be automatically closed.
|
||||
*/
|
||||
public Collection<Class<? extends Closeable>> indexServices() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Additional node settings loaded by the plugin. Note that settings that are explicit in the nodes settings can't be
|
||||
* overwritten with the additional settings. These settings added if they don't exist.
|
||||
|
@ -80,4 +69,24 @@ public abstract class Plugin {
|
|||
public Settings additionalSettings() {
|
||||
return Settings.Builder.EMPTY_SETTINGS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called once the given {@link IndexService} is fully constructed but not yet published.
|
||||
* This is used to initialize plugin services that require acess to index level resources
|
||||
*/
|
||||
public void onIndexService(IndexService indexService) {}
|
||||
|
||||
/**
|
||||
* Called before a new index is created on a node. The given module can be used to regsiter index-leve
|
||||
* extensions.
|
||||
*/
|
||||
public void onIndexModule(IndexModule indexModule) {}
|
||||
|
||||
/**
|
||||
* Old-style guice index level extension point.
|
||||
*
|
||||
* @deprecated use #onIndexModule instead
|
||||
*/
|
||||
@Deprecated
|
||||
public final void onModule(IndexModule indexModule) {}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.analysis.util.TokenizerFactory;
|
|||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.DocValuesFormat;
|
||||
import org.apache.lucene.codecs.PostingsFormat;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
||||
import org.elasticsearch.bootstrap.JarHell;
|
||||
|
@ -37,6 +38,9 @@ import org.elasticsearch.common.io.FileSystemUtils;
|
|||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
@ -239,22 +243,19 @@ public class PluginsService extends AbstractComponent {
|
|||
return services;
|
||||
}
|
||||
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
List<Module> modules = new ArrayList<>();
|
||||
public void onIndexModule(IndexModule indexModule) {
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
modules.addAll(plugin.v2().indexModules(indexSettings));
|
||||
plugin.v2().onIndexModule(indexModule);
|
||||
}
|
||||
return modules;
|
||||
indexModule.addIndexEventListener(new IndexEventListener() {
|
||||
@Override
|
||||
public void afterIndexCreated(IndexService indexService) {
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
plugin.v2().onIndexService(indexService);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public Collection<Class<? extends Closeable>> indexServices() {
|
||||
List<Class<? extends Closeable>> services = new ArrayList<>();
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
services.addAll(plugin.v2().indexServices());
|
||||
}
|
||||
return services;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get information about plugins (jvm and site plugins).
|
||||
*/
|
||||
|
|
|
@ -450,7 +450,7 @@ public class DefaultSearchContext extends SearchContext {
|
|||
|
||||
@Override
|
||||
public BitsetFilterCache bitsetFilterCache() {
|
||||
return indexService.bitsetFilterCache();
|
||||
return indexService.cache().bitsetFilterCache();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -77,8 +77,8 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase {
|
|||
Version.CURRENT,
|
||||
null,
|
||||
new HashSet<>(),
|
||||
null
|
||||
);
|
||||
null,
|
||||
null);
|
||||
MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, null);
|
||||
|
||||
final List<Throwable> throwables = new ArrayList<>();
|
||||
|
|
|
@ -63,7 +63,7 @@ public class SettingsFilteringIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
public Collection<Module> nodeModules() {
|
||||
return Collections.<Module>singletonList(new SettingsFilteringModule());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.index;
|
||||
|
||||
import org.apache.lucene.index.AssertingDirectoryReader;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.FieldInvertState;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
@ -25,71 +26,137 @@ import org.apache.lucene.search.*;
|
|||
import org.apache.lucene.search.similarities.BM25Similarity;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.inject.ModuleTestCase;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.ShardLock;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.cache.query.QueryCache;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
import org.elasticsearch.index.engine.EngineConfig;
|
||||
import org.elasticsearch.index.engine.EngineException;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.engine.InternalEngineFactory;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.store.IndexStore;
|
||||
import org.elasticsearch.index.store.IndexStoreConfig;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptContextRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.test.engine.MockEngineFactory;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class IndexModuleTests extends ModuleTestCase {
|
||||
private final IndicesWarmer warmer = new IndicesWarmer(Settings.EMPTY, null);
|
||||
public void testWrapperIsBound() {
|
||||
final Index index = new Index("foo");
|
||||
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).build();
|
||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
|
||||
IndexModule module = new IndexModule(indexSettings, null, null, warmer, new AnalysisRegistry(null, new Environment(settings)));
|
||||
assertInstanceBinding(module, IndexModule.IndexSearcherWrapperFactory.class, (x) -> x.newWrapper(null) == null);
|
||||
module.setSearcherWrapper((s) -> new Wrapper());
|
||||
assertInstanceBinding(module, IndexModule.IndexSearcherWrapperFactory.class, (x) -> x.newWrapper(null) instanceof Wrapper);
|
||||
public class IndexModuleTests extends ESTestCase {
|
||||
private Index index;
|
||||
private Settings settings;
|
||||
private IndexSettings indexSettings;
|
||||
private Environment environment;
|
||||
private NodeEnvironment nodeEnvironment;
|
||||
private NodeServicesProvider nodeServicesProvider;
|
||||
private IndexService.ShardStoreDeleter deleter = new IndexService.ShardStoreDeleter() {
|
||||
@Override
|
||||
public void deleteShardStore(String reason, ShardLock lock, Settings indexSettings) throws IOException {
|
||||
}
|
||||
@Override
|
||||
public void addPendingDelete(ShardId shardId, Settings indexSettings) {
|
||||
}
|
||||
};
|
||||
|
||||
static NodeServicesProvider newNodeServiceProvider(Settings settings, Environment environment, Client client, ScriptEngineService... scriptEngineServices) throws IOException {
|
||||
// TODO this can be used in other place too - lets first refactor the IndicesQueriesRegistry
|
||||
ThreadPool threadPool = new ThreadPool("test");
|
||||
IndicesWarmer warmer = new IndicesWarmer(settings, threadPool);
|
||||
IndicesQueryCache indicesQueryCache = new IndicesQueryCache(settings);
|
||||
CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
|
||||
PageCacheRecycler recycler = new PageCacheRecycler(settings, threadPool);
|
||||
BigArrays bigArrays = new BigArrays(recycler, circuitBreakerService);
|
||||
IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(settings, new IndicesFieldDataCacheListener(circuitBreakerService), threadPool);
|
||||
Set<ScriptEngineService> scriptEngines = new HashSet<>();
|
||||
scriptEngines.add(new MustacheScriptEngineService(settings));
|
||||
scriptEngines.addAll(Arrays.asList(scriptEngineServices));
|
||||
ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), new ScriptContextRegistry(Collections.EMPTY_LIST));
|
||||
IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, Collections.EMPTY_SET, new NamedWriteableRegistry());
|
||||
return new NodeServicesProvider(threadPool, indicesQueryCache, null, warmer, bigArrays, client, scriptService, indicesQueriesRegistry, indicesFieldDataCache, circuitBreakerService);
|
||||
}
|
||||
|
||||
public void testEngineFactoryBound() {
|
||||
final Index index = new Index("foo");
|
||||
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).build();
|
||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
|
||||
IndexModule module = new IndexModule(indexSettings, null, null, warmer, new AnalysisRegistry(null, new Environment(settings)));
|
||||
assertBinding(module, EngineFactory.class, InternalEngineFactory.class);
|
||||
module.engineFactoryImpl = MockEngineFactory.class;
|
||||
assertBinding(module, EngineFactory.class, MockEngineFactory.class);
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
index = new Index("foo");
|
||||
settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).build();
|
||||
indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
|
||||
environment = new Environment(settings);
|
||||
nodeServicesProvider = newNodeServiceProvider(settings, environment, null);
|
||||
nodeEnvironment = new NodeEnvironment(settings, environment);
|
||||
}
|
||||
|
||||
public void testRegisterIndexStore() {
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
nodeEnvironment.close();
|
||||
nodeServicesProvider.getThreadPool().shutdown();
|
||||
if (nodeServicesProvider.getThreadPool().awaitTermination(10, TimeUnit.SECONDS) == false) {
|
||||
nodeServicesProvider.getThreadPool().shutdownNow();
|
||||
}
|
||||
}
|
||||
|
||||
public void testWrapperIsBound() throws IOException {
|
||||
IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment));
|
||||
module.setSearcherWrapper((s) -> new Wrapper());
|
||||
module.engineFactory.set(new MockEngineFactory(AssertingDirectoryReader.class));
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider);
|
||||
assertTrue(indexService.getSearcherWrapper() instanceof Wrapper);
|
||||
assertSame(indexService.getEngineFactory(), module.engineFactory.get());
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
|
||||
public void testRegisterIndexStore() throws IOException {
|
||||
final Index index = new Index("foo");
|
||||
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).put(IndexModule.STORE_TYPE, "foo_store").build();
|
||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
|
||||
IndexModule module = new IndexModule(indexSettings, null, null, warmer, new AnalysisRegistry(null, new Environment(settings)));
|
||||
IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment));
|
||||
module.addIndexStore("foo_store", FooStore::new);
|
||||
assertInstanceBinding(module, IndexStore.class, (x) -> x.getClass() == FooStore.class);
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider);
|
||||
assertTrue(indexService.getIndexStore() instanceof FooStore);
|
||||
try {
|
||||
module.addIndexStore("foo_store", FooStore::new);
|
||||
fail("already registered");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
// fine
|
||||
}
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
public void testOtherServiceBound() {
|
||||
public void testOtherServiceBound() throws IOException {
|
||||
final AtomicBoolean atomicBoolean = new AtomicBoolean(false);
|
||||
final IndexEventListener eventListener = new IndexEventListener() {
|
||||
@Override
|
||||
|
@ -97,31 +164,24 @@ public class IndexModuleTests extends ModuleTestCase {
|
|||
atomicBoolean.set(true);
|
||||
}
|
||||
};
|
||||
final Index index = new Index("foo");
|
||||
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).build();
|
||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
|
||||
IndexModule module = new IndexModule(indexSettings, null, null, warmer, new AnalysisRegistry(null, new Environment(settings)));
|
||||
IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment));
|
||||
Consumer<Settings> listener = (s) -> {};
|
||||
module.addIndexSettingsListener(listener);
|
||||
module.addIndexEventListener(eventListener);
|
||||
assertBinding(module, IndexService.class, IndexService.class);
|
||||
assertBinding(module, IndexServicesProvider.class, IndexServicesProvider.class);
|
||||
assertInstanceBinding(module, IndexEventListener.class, (x) -> {
|
||||
x.beforeIndexDeleted(null);
|
||||
return atomicBoolean.get();
|
||||
});
|
||||
assertInstanceBinding(module, IndexSettings.class, (x) -> x.getSettings().getAsMap().equals(indexSettings.getSettings().getAsMap()));
|
||||
assertInstanceBinding(module, IndexSettings.class, (x) -> x.getIndex().equals(indexSettings.getIndex()));
|
||||
assertInstanceBinding(module, IndexSettings.class, (x) -> x.getUpdateListeners().get(0) == listener);
|
||||
assertInstanceBinding(module, IndexStore.class, (x) -> x.getClass() == IndexStore.class);
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider);
|
||||
IndexSettings x = indexService.getIndexSettings();
|
||||
assertEquals(x.getSettings().getAsMap(), indexSettings.getSettings().getAsMap());
|
||||
assertEquals(x.getIndex(), index);
|
||||
assertSame(x.getUpdateListeners().get(0), listener);
|
||||
indexService.getIndexEventListener().beforeIndexDeleted(null);
|
||||
assertTrue(atomicBoolean.get());
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
|
||||
public void testListener() {
|
||||
final Index index = new Index("foo");
|
||||
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).build();
|
||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
|
||||
IndexModule module = new IndexModule(indexSettings, null, null, warmer, new AnalysisRegistry(null, new Environment(settings)));
|
||||
public void testListener() throws IOException {
|
||||
IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment));
|
||||
Consumer<Settings> listener = (s) -> {
|
||||
};
|
||||
module.addIndexSettingsListener(listener);
|
||||
|
@ -139,18 +199,21 @@ public class IndexModuleTests extends ModuleTestCase {
|
|||
} catch (IllegalArgumentException ex) {
|
||||
|
||||
}
|
||||
assertInstanceBinding(module, IndexSettings.class, (x) -> x.getUpdateListeners().size() == 1);
|
||||
assertInstanceBinding(module, IndexSettings.class, (x) -> x.getUpdateListeners().get(0) == listener);
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider);
|
||||
IndexSettings x = indexService.getIndexSettings();
|
||||
assertEquals(1, x.getUpdateListeners().size());
|
||||
assertSame(x.getUpdateListeners().get(0), listener);
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
public void testAddSimilarity() {
|
||||
public void testAddSimilarity() throws IOException {
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("index.similarity.my_similarity.type", "test_similarity")
|
||||
.put("index.similarity.my_similarity.key", "there is a key")
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer, new AnalysisRegistry(null, new Environment(indexSettings)));
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, new AnalysisRegistry(null, environment));
|
||||
module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() {
|
||||
@Override
|
||||
public String name() {
|
||||
|
@ -162,42 +225,39 @@ public class IndexModuleTests extends ModuleTestCase {
|
|||
return new TestSimilarity(settings.get("key"));
|
||||
}
|
||||
});
|
||||
assertInstanceBinding(module, SimilarityService.class, (inst) -> {
|
||||
if (inst instanceof SimilarityService) {
|
||||
assertNotNull(inst.getSimilarity("my_similarity"));
|
||||
assertTrue(inst.getSimilarity("my_similarity").get() instanceof TestSimilarity);
|
||||
assertEquals("my_similarity", inst.getSimilarity("my_similarity").name());
|
||||
assertEquals("there is a key" , ((TestSimilarity)inst.getSimilarity("my_similarity").get()).key);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider);
|
||||
SimilarityService similarityService = indexService.similarityService();
|
||||
assertNotNull(similarityService.getSimilarity("my_similarity"));
|
||||
assertTrue(similarityService.getSimilarity("my_similarity").get() instanceof TestSimilarity);
|
||||
assertEquals("my_similarity", similarityService.getSimilarity("my_similarity").name());
|
||||
assertEquals("there is a key", ((TestSimilarity) similarityService.getSimilarity("my_similarity").get()).key);
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
public void testSetupUnknownSimilarity() {
|
||||
public void testSetupUnknownSimilarity() throws IOException {
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put("index.similarity.my_similarity.type", "test_similarity")
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer, new AnalysisRegistry(null, new Environment(indexSettings)));
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, new AnalysisRegistry(null, environment));
|
||||
try {
|
||||
assertInstanceBinding(module, SimilarityService.class, (inst) -> inst instanceof SimilarityService);
|
||||
module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider);
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("Unknown Similarity type [test_similarity] for [my_similarity]", ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void testSetupWithoutType() {
|
||||
|
||||
public void testSetupWithoutType() throws IOException {
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put("index.similarity.my_similarity.foo", "bar")
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer, new AnalysisRegistry(null, new Environment(indexSettings)));
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null,new AnalysisRegistry(null, environment));
|
||||
try {
|
||||
assertInstanceBinding(module, SimilarityService.class, (inst) -> inst instanceof SimilarityService);
|
||||
module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider);
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("Similarity [my_similarity] must have an associated type", ex.getMessage());
|
||||
}
|
||||
|
@ -207,7 +267,7 @@ public class IndexModuleTests extends ModuleTestCase {
|
|||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer, new AnalysisRegistry(null, new Environment(indexSettings)));
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, new AnalysisRegistry(null, environment));
|
||||
try {
|
||||
module.registerQueryCache("index", IndexQueryCache::new);
|
||||
fail("only once");
|
||||
|
@ -230,12 +290,12 @@ public class IndexModuleTests extends ModuleTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testRegisterCustomQueryCache() {
|
||||
public void testRegisterCustomQueryCache() throws IOException {
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put(IndexModule.QUERY_CACHE_TYPE, "custom")
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer, new AnalysisRegistry(null, new Environment(indexSettings)));
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, new AnalysisRegistry(null, environment));
|
||||
module.registerQueryCache("custom", (a, b) -> new CustomQueryCache());
|
||||
try {
|
||||
module.registerQueryCache("custom", (a, b) -> new CustomQueryCache());
|
||||
|
@ -243,15 +303,20 @@ public class IndexModuleTests extends ModuleTestCase {
|
|||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [custom]");
|
||||
}
|
||||
assertInstanceBinding(module, QueryCache.class, (x) -> x instanceof CustomQueryCache);
|
||||
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider);
|
||||
assertTrue(indexService.cache().query() instanceof CustomQueryCache);
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
public void testDefaultQueryCacheImplIsSelected() {
|
||||
public void testDefaultQueryCacheImplIsSelected() throws IOException {
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, null, warmer, new AnalysisRegistry(null, new Environment(indexSettings)));
|
||||
assertInstanceBinding(module, QueryCache.class, (x) -> x instanceof IndexQueryCache);
|
||||
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings, Collections.EMPTY_LIST), null, new AnalysisRegistry(null, environment));
|
||||
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider);
|
||||
assertTrue(indexService.cache().query() instanceof IndexQueryCache);
|
||||
indexService.close("simon says", false);
|
||||
}
|
||||
|
||||
class CustomQueryCache implements QueryCache {
|
||||
|
@ -275,8 +340,6 @@ public class IndexModuleTests extends ModuleTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static class TestSimilarity extends Similarity {
|
||||
private final Similarity delegate = new BM25Similarity();
|
||||
private final String key;
|
||||
|
@ -305,6 +368,15 @@ public class IndexModuleTests extends ModuleTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static final class FooStore extends IndexStore {
|
||||
|
||||
public FooStore(IndexSettings indexSettings, IndexStoreConfig config) {
|
||||
super(indexSettings, config);
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Wrapper extends IndexSearcherWrapper {
|
||||
|
||||
@Override
|
||||
|
@ -318,11 +390,4 @@ public class IndexModuleTests extends ModuleTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public static final class FooStore extends IndexStore {
|
||||
|
||||
public FooStore(IndexSettings indexSettings, IndexStoreConfig config) {
|
||||
super(indexSettings, config);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -65,7 +65,8 @@ public class SettingsListenerIT extends ESIntegTestCase {
|
|||
clusterModule.registerIndexDynamicSetting("index.test.new.setting", Validator.INTEGER);
|
||||
}
|
||||
|
||||
public void onModule(IndexModule module) {
|
||||
@Override
|
||||
public void onIndexModule(IndexModule module) {
|
||||
if (module.getIndex().getName().equals("test")) { // only for the test index
|
||||
module.addIndexSettingsListener(service);
|
||||
service.accept(module.getSettings());
|
||||
|
|
|
@ -93,7 +93,17 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
|||
IndexReader reader = DirectoryReader.open(writer, false);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer);
|
||||
BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer, new BitsetFilterCache.Listener() {
|
||||
@Override
|
||||
public void onCache(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
});
|
||||
BitSetProducer filter = cache.getBitSetProducer(new TermQuery(new Term("field", "value")));
|
||||
assertThat(matchCount(filter, reader), equalTo(3));
|
||||
|
||||
|
@ -136,8 +146,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
|||
final AtomicInteger onCacheCalls = new AtomicInteger();
|
||||
final AtomicInteger onRemoveCalls = new AtomicInteger();
|
||||
|
||||
final BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer);
|
||||
cache.setListener(new BitsetFilterCache.Listener() {
|
||||
final BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer, new BitsetFilterCache.Listener() {
|
||||
@Override
|
||||
public void onCache(ShardId shardId, Accountable accountable) {
|
||||
onCacheCalls.incrementAndGet();
|
||||
|
@ -174,35 +183,12 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
|||
assertEquals(0, stats.get());
|
||||
}
|
||||
|
||||
public void testSetListenerTwice() {
|
||||
final BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer);
|
||||
cache.setListener(new BitsetFilterCache.Listener() {
|
||||
|
||||
@Override
|
||||
public void onCache(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
});
|
||||
public void testSetNullListener() {
|
||||
try {
|
||||
cache.setListener(new BitsetFilterCache.Listener() {
|
||||
|
||||
@Override
|
||||
public void onCache(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
});
|
||||
fail("can't set it twice");
|
||||
} catch (IllegalStateException ex) {
|
||||
new BitsetFilterCache(INDEX_SETTINGS, warmer, null);
|
||||
fail("listener can't be null");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("listener must not be null", ex.getMessage());
|
||||
// all is well
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,14 +39,30 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.SegmentReader;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
@SuppressCodecs("*") // we test against default codec so never get a random one here!
|
||||
public class CodecTests extends ESSingleNodeTestCase {
|
||||
public class CodecTests extends ESTestCase {
|
||||
public void testResolveDefaultCodecs() throws Exception {
|
||||
CodecService codecService = createCodecService();
|
||||
assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class));
|
||||
|
@ -90,13 +106,15 @@ public class CodecTests extends ESSingleNodeTestCase {
|
|||
dir.close();
|
||||
}
|
||||
|
||||
private static CodecService createCodecService() {
|
||||
return createCodecService(Settings.Builder.EMPTY_SETTINGS);
|
||||
}
|
||||
|
||||
private static CodecService createCodecService(Settings settings) {
|
||||
IndexService indexService = createIndex("test", settings);
|
||||
return indexService.getIndexServices().getCodecService();
|
||||
private static CodecService createCodecService() throws IOException {
|
||||
Settings nodeSettings = settingsBuilder()
|
||||
.put("path.home", createTempDir())
|
||||
.build();
|
||||
IndexSettings settings = IndexSettingsModule.newIndexSettings(new Index("_na"), nodeSettings, Collections.emptyList());
|
||||
SimilarityService similarityService = new SimilarityService(settings, Collections.EMPTY_MAP);
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(settings);
|
||||
MapperService service = new MapperService(settings, analysisService, similarityService);
|
||||
return new CodecService(service, ESLoggerFactory.getLogger("test"));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -148,7 +148,7 @@ public class InternalEngineTests extends ESTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
CodecService codecService = new CodecService(INDEX_SETTINGS, null);
|
||||
CodecService codecService = new CodecService(null, logger);
|
||||
String name = Codec.getDefault().getName();
|
||||
if (Arrays.asList(codecService.availableCodecs()).contains(name)) {
|
||||
// some codecs are read only so we only take the ones that we have in the service and randomly
|
||||
|
@ -264,7 +264,7 @@ public class InternalEngineTests extends ESTestCase {
|
|||
|
||||
EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, INDEX_SETTINGS), indexSettings
|
||||
, null, store, createSnapshotDeletionPolicy(), mergePolicy, mergeSchedulerConfig,
|
||||
iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(INDEX_SETTINGS, null), new Engine.EventListener() {
|
||||
iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), new Engine.EventListener() {
|
||||
@Override
|
||||
public void onFailedEngine(String reason, @Nullable Throwable t) {
|
||||
// we don't need to notify anybody in this test
|
||||
|
@ -1576,7 +1576,7 @@ public class InternalEngineTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testSettings() {
|
||||
CodecService codecService = new CodecService(INDEX_SETTINGS, null);
|
||||
CodecService codecService = new CodecService(null, logger);
|
||||
LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig();
|
||||
|
||||
assertEquals(engine.config().getCodec().getName(), codecService.codec(codecName).getName());
|
||||
|
@ -1972,7 +1972,7 @@ public class InternalEngineTests extends ESTestCase {
|
|||
|
||||
EngineConfig brokenConfig = new EngineConfig(shardId, threadPool, config.getIndexingService(), config.getIndexSettings()
|
||||
, null, store, createSnapshotDeletionPolicy(), newMergePolicy(), config.getMergeSchedulerConfig(),
|
||||
config.getAnalyzer(), config.getSimilarity(), new CodecService(INDEX_SETTINGS, null), config.getEventListener()
|
||||
config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener()
|
||||
, config.getTranslogRecoveryPerformer(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5));
|
||||
|
||||
try {
|
||||
|
|
|
@ -111,7 +111,7 @@ public class ShadowEngineTests extends ESTestCase {
|
|||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
CodecService codecService = new CodecService(INDEX_SETTINGS, null);
|
||||
CodecService codecService = new CodecService(null, logger);
|
||||
String name = Codec.getDefault().getName();
|
||||
if (Arrays.asList(codecService.availableCodecs()).contains(name)) {
|
||||
// some codecs are read only so we only take the ones that we have in the service and randomly
|
||||
|
@ -229,7 +229,7 @@ public class ShadowEngineTests extends ESTestCase {
|
|||
TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, IndexSettingsModule.newIndexSettings(shardId.index(), indexSettings, Collections.EMPTY_LIST), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool);
|
||||
EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, IndexSettingsModule.newIndexSettings(shardId.index(), indexSettings, Collections.EMPTY_LIST)), indexSettings
|
||||
, null, store, createSnapshotDeletionPolicy(), mergePolicy, mergeSchedulerConfig,
|
||||
iwc.getAnalyzer(), iwc.getSimilarity() , new CodecService(INDEX_SETTINGS, null), new Engine.EventListener() {
|
||||
iwc.getAnalyzer(), iwc.getSimilarity() , new CodecService(null, logger), new Engine.EventListener() {
|
||||
@Override
|
||||
public void onFailedEngine(String reason, @Nullable Throwable t) {
|
||||
// we don't need to notify anybody in this test
|
||||
|
@ -919,7 +919,7 @@ public class ShadowEngineTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testSettings() {
|
||||
CodecService codecService = new CodecService(INDEX_SETTINGS, null);
|
||||
CodecService codecService = new CodecService(null, logger);
|
||||
assertEquals(replicaEngine.config().getCodec().getName(), codecService.codec(codecName).getName());
|
||||
}
|
||||
|
||||
|
|
|
@ -135,7 +135,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
protected Nested createNested(IndexSearcher searcher, Query parentFilter, Query childFilter) throws IOException {
|
||||
BitsetFilterCache s = indexService.bitsetFilterCache();
|
||||
BitsetFilterCache s = indexService.cache().bitsetFilterCache();
|
||||
return new Nested(s.getBitSetProducer(parentFilter), searcher.createNormalizedWeight(childFilter, false));
|
||||
}
|
||||
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.externalvalues;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ExternalIndexModule extends AbstractModule {
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(RegisterExternalTypes.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -19,14 +19,19 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.externalvalues;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.io.Closeable;
|
||||
import java.util.List;
|
||||
|
||||
public class ExternalMapperPlugin extends Plugin {
|
||||
|
||||
public static final String EXTERNAL = "external";
|
||||
public static final String EXTERNAL_BIS = "external_bis";
|
||||
public static final String EXTERNAL_UPPER = "external_upper";
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "external-mappers";
|
||||
|
@ -38,7 +43,11 @@ public class ExternalMapperPlugin extends Plugin {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
return Collections.<Module>singletonList(new ExternalIndexModule());
|
||||
public void onIndexService(IndexService indexService) {
|
||||
final MapperService mapperService = indexService.mapperService();
|
||||
mapperService.documentMapperParser().putRootTypeParser(ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser());
|
||||
mapperService.documentMapperParser().putTypeParser(EXTERNAL, new ExternalMapper.TypeParser(EXTERNAL, "foo"));
|
||||
mapperService.documentMapperParser().putTypeParser(EXTERNAL_BIS, new ExternalMapper.TypeParser(EXTERNAL_BIS, "bar"));
|
||||
mapperService.documentMapperParser().putTypeParser(EXTERNAL_UPPER, new ExternalMapper.TypeParser(EXTERNAL_UPPER, "FOO BAR"));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -43,7 +43,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase {
|
|||
.startObject(ExternalMetadataMapper.CONTENT_TYPE)
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field").field("type", RegisterExternalTypes.EXTERNAL).endObject()
|
||||
.startObject("field").field("type", ExternalMapperPlugin.EXTERNAL).endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()).execute().get();
|
||||
ensureYellow("test-idx");
|
||||
|
@ -85,7 +85,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase {
|
|||
prepareCreate("test-idx").addMapping("doc",
|
||||
XContentFactory.jsonBuilder().startObject().startObject("doc").startObject("properties")
|
||||
.startObject("f")
|
||||
.field("type", RegisterExternalTypes.EXTERNAL_UPPER)
|
||||
.field("type", ExternalMapperPlugin.EXTERNAL_UPPER)
|
||||
.startObject("fields")
|
||||
.startObject("f")
|
||||
.field("type", "string")
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.externalvalues;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
||||
public class RegisterExternalTypes extends AbstractIndexComponent {
|
||||
public static final String EXTERNAL = "external";
|
||||
public static final String EXTERNAL_BIS = "external_bis";
|
||||
public static final String EXTERNAL_UPPER = "external_upper";
|
||||
|
||||
@Inject
|
||||
public RegisterExternalTypes(IndexSettings indexSettings, MapperService mapperService) {
|
||||
super(indexSettings);
|
||||
|
||||
mapperService.documentMapperParser().putRootTypeParser(ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser());
|
||||
mapperService.documentMapperParser().putTypeParser(EXTERNAL, new ExternalMapper.TypeParser(EXTERNAL, "foo"));
|
||||
mapperService.documentMapperParser().putTypeParser(EXTERNAL_BIS, new ExternalMapper.TypeParser(EXTERNAL_BIS, "bar"));
|
||||
mapperService.documentMapperParser().putTypeParser(EXTERNAL_UPPER, new ExternalMapper.TypeParser(EXTERNAL_UPPER, "FOO BAR"));
|
||||
}
|
||||
}
|
|
@ -35,8 +35,8 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
MapperService mapperService = createIndex("test").mapperService();
|
||||
mapperService.documentMapperParser().putRootTypeParser(ExternalMetadataMapper.CONTENT_TYPE,
|
||||
new ExternalMetadataMapper.TypeParser());
|
||||
mapperService.documentMapperParser().putTypeParser(RegisterExternalTypes.EXTERNAL,
|
||||
new ExternalMapper.TypeParser(RegisterExternalTypes.EXTERNAL, "foo"));
|
||||
mapperService.documentMapperParser().putTypeParser(ExternalMapperPlugin.EXTERNAL,
|
||||
new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
|
||||
|
||||
DocumentMapper documentMapper = mapperService.documentMapperParser().parse(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
@ -71,13 +71,13 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testExternalValuesWithMultifield() throws Exception {
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
mapperService.documentMapperParser().putTypeParser(RegisterExternalTypes.EXTERNAL,
|
||||
new ExternalMapper.TypeParser(RegisterExternalTypes.EXTERNAL, "foo"));
|
||||
mapperService.documentMapperParser().putTypeParser(ExternalMapperPlugin.EXTERNAL,
|
||||
new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
|
||||
|
||||
DocumentMapper documentMapper = mapperService.documentMapperParser().parse(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", RegisterExternalTypes.EXTERNAL)
|
||||
.field("type", ExternalMapperPlugin.EXTERNAL)
|
||||
.startObject("fields")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
|
@ -119,21 +119,21 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
public void testExternalValuesWithMultifieldTwoLevels() throws Exception {
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
|
||||
mapperService.documentMapperParser().putTypeParser(RegisterExternalTypes.EXTERNAL,
|
||||
new ExternalMapper.TypeParser(RegisterExternalTypes.EXTERNAL, "foo"));
|
||||
mapperService.documentMapperParser().putTypeParser(RegisterExternalTypes.EXTERNAL_BIS,
|
||||
new ExternalMapper.TypeParser(RegisterExternalTypes.EXTERNAL_BIS, "bar"));
|
||||
mapperService.documentMapperParser().putTypeParser(ExternalMapperPlugin.EXTERNAL,
|
||||
new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
|
||||
mapperService.documentMapperParser().putTypeParser(ExternalMapperPlugin.EXTERNAL_BIS,
|
||||
new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL_BIS, "bar"));
|
||||
|
||||
DocumentMapper documentMapper = mapperService.documentMapperParser().parse(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", RegisterExternalTypes.EXTERNAL)
|
||||
.field("type", ExternalMapperPlugin.EXTERNAL)
|
||||
.startObject("fields")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.startObject("fields")
|
||||
.startObject("generated")
|
||||
.field("type", RegisterExternalTypes.EXTERNAL_BIS)
|
||||
.field("type", ExternalMapperPlugin.EXTERNAL_BIS)
|
||||
.endObject()
|
||||
.startObject("raw")
|
||||
.field("type", "string")
|
||||
|
|
|
@ -24,7 +24,7 @@ import com.fasterxml.jackson.core.JsonParseException;
|
|||
import com.fasterxml.jackson.core.io.JsonStringEncoder;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
|
@ -65,19 +65,19 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.*;
|
||||
import org.elasticsearch.script.Script.ScriptParseException;
|
||||
|
@ -167,6 +167,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
Settings settings = Settings.settingsBuilder()
|
||||
.put("name", AbstractQueryTestCase.class.toString())
|
||||
.put("path.home", createTempDir())
|
||||
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false)
|
||||
.build();
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
|
@ -226,16 +227,6 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
new AbstractModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
try {
|
||||
bind(AnalysisService.class).toInstance(new AnalysisRegistry(null, new Environment(settings)).build(idxSettings));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException(e);
|
||||
}
|
||||
SimilarityService service = new SimilarityService(idxSettings, Collections.emptyMap());
|
||||
bind(SimilarityService.class).toInstance(service);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
|
||||
bind(BitsetFilterCache.class).toInstance(bitsetFilterCache);
|
||||
bind(IndexCache.class).toInstance(new IndexCache(idxSettings, new NoneQueryCache(idxSettings), bitsetFilterCache));
|
||||
bind(Client.class).toInstance(proxy);
|
||||
Multibinder.newSetBinder(binder(), ScoreFunctionParser.class);
|
||||
bind(ScoreFunctionParserMapper.class).asEagerSingleton();
|
||||
|
@ -245,11 +236,22 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
}
|
||||
}
|
||||
).createInjector();
|
||||
SimilarityService similarityService = injector.getInstance(SimilarityService.class);
|
||||
indexFieldDataService = injector.getInstance(IndexFieldDataService.class);
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
|
||||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
MapperService mapperService = injector.getInstance(MapperService.class);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
|
||||
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.EMPTY_MAP);
|
||||
MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService);
|
||||
indexFieldDataService = new IndexFieldDataService(idxSettings, injector.getInstance(IndicesFieldDataCache.class), injector.getInstance(CircuitBreakerService.class), mapperService);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null), new BitsetFilterCache.Listener() {
|
||||
@Override
|
||||
public void onCache(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
});
|
||||
indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
|
||||
queryShardContext = new QueryShardContext(idxSettings, proxy, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
|
||||
//create some random type with some default field, those types will stick around for all of the subclasses
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
|
@ -41,17 +41,17 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -108,16 +108,6 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
new AbstractModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
try {
|
||||
bind(AnalysisService.class).toInstance(new AnalysisRegistry(null, new Environment(settings)).build(idxSettings));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException(e);
|
||||
}
|
||||
SimilarityService service = new SimilarityService(idxSettings, Collections.EMPTY_MAP);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
|
||||
bind(BitsetFilterCache.class).toInstance(bitsetFilterCache);
|
||||
bind(IndexCache.class).toInstance(new IndexCache(idxSettings, new NoneQueryCache(idxSettings), bitsetFilterCache));
|
||||
bind(SimilarityService.class).toInstance(service);
|
||||
bind(Client.class).toInstance(proxy); // not needed here
|
||||
Multibinder.newSetBinder(binder(), ScoreFunctionParser.class);
|
||||
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
|
||||
|
@ -125,11 +115,23 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
).createInjector();
|
||||
SimilarityService similarityService = injector.getInstance(SimilarityService.class);
|
||||
IndexFieldDataService indexFieldDataService = injector.getInstance(IndexFieldDataService.class);
|
||||
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
|
||||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
MapperService mapperService = injector.getInstance(MapperService.class);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
|
||||
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.EMPTY_MAP);
|
||||
MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService);
|
||||
IndexFieldDataService indexFieldDataService =new IndexFieldDataService(idxSettings, injector.getInstance(IndicesFieldDataCache.class), injector.getInstance(CircuitBreakerService.class), mapperService);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null), new BitsetFilterCache.Listener() {
|
||||
@Override
|
||||
public void onCache(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(ShardId shardId, Accountable accountable) {
|
||||
|
||||
}
|
||||
});
|
||||
IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
|
||||
context = new QueryShardContext(idxSettings, proxy, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ import org.elasticsearch.env.Environment;
|
|||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.ShardLock;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.IndexServicesProvider;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.EngineConfig;
|
||||
import org.elasticsearch.index.engine.EngineException;
|
||||
|
@ -331,18 +331,15 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testMarkAsInactiveTriggersSyncedFlush() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0));
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0, IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "0s"));
|
||||
client().prepareIndex("test", "test").setSource("{}").get();
|
||||
ensureGreen("test");
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
Boolean result = indicesService.indexService("test").getShardOrNull(0).checkIdle(0);
|
||||
Boolean result = indicesService.indexService("test").getShardOrNull(0).checkIdle();
|
||||
assertEquals(Boolean.TRUE, result);
|
||||
assertBusy(new Runnable() { // should be very very quick
|
||||
@Override
|
||||
public void run() {
|
||||
IndexStats indexStats = client().admin().indices().prepareStats("test").clear().get().getIndex("test");
|
||||
assertNotNull(indexStats.getShards()[0].getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
|
||||
}
|
||||
assertBusy(() -> {
|
||||
IndexStats indexStats = client().admin().indices().prepareStats("test").clear().get().getIndex("test");
|
||||
assertNotNull(indexStats.getShards()[0].getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
|
||||
});
|
||||
IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test");
|
||||
assertNotNull(indexStats.getShards()[0].getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
|
||||
|
@ -1014,8 +1011,8 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
private final IndexShard reinitWithWrapper(IndexService indexService, IndexShard shard, IndexSearcherWrapper wrapper) throws IOException {
|
||||
ShardRouting routing = new ShardRouting(shard.routingEntry());
|
||||
shard.close("simon says", true);
|
||||
IndexServicesProvider indexServices = indexService.getIndexServices();
|
||||
IndexShard newShard = new IndexShard(shard.shardId(), indexService.getIndexSettings(), shard.shardPath(), shard.store(), wrapper, indexServices);
|
||||
NodeServicesProvider indexServices = indexService.getIndexServices();
|
||||
IndexShard newShard = new IndexShard(shard.shardId(), indexService.getIndexSettings(), shard.shardPath(), shard.store(), indexService.cache(), indexService.mapperService(), indexService.similarityService(), indexService.fieldData(), shard.getEngineFactory(), indexService.getIndexEventListener(), wrapper, indexServices);
|
||||
ShardRoutingHelper.reinit(routing);
|
||||
newShard.updateRoutingEntry(routing, false);
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -90,7 +91,8 @@ public class IndicesLifecycleListenerSingleNodeTests extends ESSingleNodeTestCas
|
|||
};
|
||||
indicesService.deleteIndex("test", "simon says");
|
||||
try {
|
||||
IndexService index = indicesService.createIndex(metaData, Arrays.asList(countingListener));
|
||||
NodeServicesProvider nodeServicesProvider = getInstanceFromNode(NodeServicesProvider.class);
|
||||
IndexService index = indicesService.createIndex(nodeServicesProvider, metaData, Arrays.asList(countingListener));
|
||||
ShardRouting newRouting = new ShardRouting(shardRouting);
|
||||
String nodeId = newRouting.currentNodeId();
|
||||
ShardRoutingHelper.moveToUnassigned(newRouting, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "boom"));
|
||||
|
|
|
@ -36,7 +36,7 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testIndexBufferPushedToEngine() throws InterruptedException {
|
||||
createNode(Settings.builder().put(IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING, "100000h",
|
||||
createNode(Settings.builder().put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "100000h",
|
||||
IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "32mb",
|
||||
IndexShard.INDEX_REFRESH_INTERVAL, "-1").build());
|
||||
|
||||
|
@ -65,7 +65,7 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testInactivePushedToShard() throws InterruptedException {
|
||||
createNode(Settings.builder().put(IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING, "100ms",
|
||||
createNode(Settings.builder().put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "100ms",
|
||||
IndexingMemoryController.SHARD_INACTIVE_INTERVAL_TIME_SETTING, "100ms",
|
||||
IndexShard.INDEX_REFRESH_INTERVAL, "-1").build());
|
||||
|
||||
|
|
|
@ -22,9 +22,8 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.engine.EngineConfig;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.translog.TranslogConfig;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -54,7 +53,7 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
|||
public MockController(Settings settings) {
|
||||
super(Settings.builder()
|
||||
.put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it
|
||||
.put(SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate
|
||||
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate
|
||||
.put(settings)
|
||||
.build(),
|
||||
null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb
|
||||
|
@ -102,11 +101,12 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Boolean checkIdle(ShardId shardId, long inactiveTimeNS) {
|
||||
protected Boolean checkIdle(ShardId shardId) {
|
||||
final TimeValue inactiveTime = settings.getAsTime(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5));
|
||||
Long ns = lastIndexTimeNanos.get(shardId);
|
||||
if (ns == null) {
|
||||
return null;
|
||||
} else if (currentTimeInNanos() - ns >= inactiveTimeNS) {
|
||||
} else if (currentTimeInNanos() - ns >= inactiveTime.nanos()) {
|
||||
indexingBuffers.put(shardId, INACTIVE);
|
||||
translogBuffers.put(shardId, INACTIVE);
|
||||
activeShards.remove(shardId);
|
||||
|
@ -165,7 +165,7 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
|||
MockController controller = new MockController(Settings.builder()
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb")
|
||||
.put(IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING, "5s")
|
||||
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s")
|
||||
.build());
|
||||
|
||||
final ShardId shard1 = new ShardId("test", 1);
|
||||
|
|
|
@ -33,13 +33,13 @@ import org.elasticsearch.common.breaker.CircuitBreaker;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.MockEngineFactoryPlugin;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.engine.MockEngineSupport;
|
||||
import org.elasticsearch.test.engine.MockEngineSupportModule;
|
||||
import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -212,8 +212,9 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
|||
public String description() {
|
||||
return "a mock reader wrapper that throws random exceptions for testing";
|
||||
}
|
||||
public void onModule(MockEngineSupportModule module) {
|
||||
module.wrapperImpl = RandomExceptionDirectoryReaderWrapper.class;
|
||||
|
||||
public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) {
|
||||
module.setReaderClass(RandomExceptionDirectoryReaderWrapper.class);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -32,12 +32,12 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.settings.Settings.Builder;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.MockEngineFactoryPlugin;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.engine.MockEngineSupport;
|
||||
import org.elasticsearch.test.engine.MockEngineSupportModule;
|
||||
import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -159,8 +159,8 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase {
|
|||
public String description() {
|
||||
return "a mock reader wrapper that throws random exceptions for testing";
|
||||
}
|
||||
public void onModule(MockEngineSupportModule module) {
|
||||
module.wrapperImpl = RandomExceptionDirectoryReaderWrapper.class;
|
||||
public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) {
|
||||
module.setReaderClass(RandomExceptionDirectoryReaderWrapper.class);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -65,12 +65,6 @@ public class JvmExamplePlugin extends Plugin {
|
|||
return services;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Module> indexModules(Settings indexSettings) { return Collections.emptyList();}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Closeable>> indexServices() { return Collections.emptyList();}
|
||||
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
return Settings.EMPTY;
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.murmur3;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
||||
public class RegisterMurmur3FieldMapper extends AbstractIndexComponent {
|
||||
|
||||
@Inject
|
||||
public RegisterMurmur3FieldMapper(IndexSettings indexSettings, MapperService mapperService) {
|
||||
super(indexSettings);
|
||||
mapperService.documentMapperParser().putTypeParser(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.mapper;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.index.mapper.murmur3.RegisterMurmur3FieldMapper;
|
||||
|
||||
public class MapperMurmur3IndexModule extends AbstractModule {
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(RegisterMurmur3FieldMapper.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -19,12 +19,12 @@
|
|||
|
||||
package org.elasticsearch.plugin.mapper;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.murmur3.Murmur3FieldMapper;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.io.Closeable;
|
||||
import java.util.List;
|
||||
|
||||
public class MapperMurmur3Plugin extends Plugin {
|
||||
|
||||
|
@ -39,8 +39,8 @@ public class MapperMurmur3Plugin extends Plugin {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Collection<Module> indexModules(Settings settings) {
|
||||
return Collections.<Module>singletonList(new MapperMurmur3IndexModule());
|
||||
public void onIndexService(IndexService indexService) {
|
||||
indexService.mapperService().documentMapperParser().putTypeParser(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.size;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
||||
public class RegisterSizeFieldMapper extends AbstractIndexComponent {
|
||||
|
||||
@Inject
|
||||
public RegisterSizeFieldMapper(IndexSettings indexSettings, MapperService mapperService) {
|
||||
super(indexSettings);
|
||||
mapperService.documentMapperParser().putRootTypeParser(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.mapper;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.index.mapper.size.RegisterSizeFieldMapper;
|
||||
|
||||
public class MapperSizeIndexModule extends AbstractModule {
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(RegisterSizeFieldMapper.class).asEagerSingleton();
|
||||
}
|
||||
|
||||
}
|
|
@ -19,12 +19,12 @@
|
|||
|
||||
package org.elasticsearch.plugin.mapper;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.size.SizeFieldMapper;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.io.Closeable;
|
||||
import java.util.List;
|
||||
|
||||
public class MapperSizePlugin extends Plugin {
|
||||
|
||||
|
@ -39,8 +39,9 @@ public class MapperSizePlugin extends Plugin {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
return Collections.<Module>singletonList(new MapperSizeIndexModule());
|
||||
public void onIndexService(IndexService indexService) {
|
||||
indexService.mapperService().documentMapperParser().putRootTypeParser(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -36,8 +36,9 @@ public class SMBStorePlugin extends Plugin {
|
|||
return "SMB Store Plugin";
|
||||
}
|
||||
|
||||
public void onModule(IndexModule storeModule) {
|
||||
storeModule.addIndexStore("smb_mmap_fs", SmbMmapFsIndexStore::new);
|
||||
storeModule.addIndexStore("smb_simple_fs", SmbSimpleFsIndexStore::new);
|
||||
@Override
|
||||
public void onIndexModule(IndexModule indexModule) {
|
||||
indexModule.addIndexStore("smb_mmap_fs", SmbMmapFsIndexStore::new);
|
||||
indexModule.addIndexStore("smb_simple_fs", SmbSimpleFsIndexStore::new);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,12 +18,12 @@
|
|||
*/
|
||||
package org.elasticsearch.index;
|
||||
|
||||
import org.apache.lucene.index.AssertingDirectoryReader;
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.engine.MockEngineFactory;
|
||||
import org.elasticsearch.test.engine.MockEngineSupportModule;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -38,11 +38,27 @@ public class MockEngineFactoryPlugin extends Plugin {
|
|||
public String description() {
|
||||
return "a mock engine factory for testing";
|
||||
}
|
||||
|
||||
private Class<? extends FilterDirectoryReader> readerWrapper = AssertingDirectoryReader.class;
|
||||
|
||||
@Override
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
return Collections.<Module>singletonList(new MockEngineSupportModule());
|
||||
public void onIndexModule(IndexModule module) {
|
||||
module.engineFactory.set(new MockEngineFactory(readerWrapper));
|
||||
}
|
||||
public void onModule(IndexModule module) {
|
||||
module.engineFactoryImpl = MockEngineFactory.class;
|
||||
|
||||
@Override
|
||||
public Collection<Module> nodeModules() {
|
||||
return Collections.singleton(new MockEngineReaderModule());
|
||||
}
|
||||
|
||||
public class MockEngineReaderModule extends AbstractModule {
|
||||
|
||||
public void setReaderClass(Class<? extends FilterDirectoryReader> readerWrapper) {
|
||||
MockEngineFactoryPlugin.this.readerWrapper = readerWrapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,14 +19,32 @@
|
|||
package org.elasticsearch.test;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener;
|
||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||
import org.elasticsearch.script.ScriptContextRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class IndexSettingsModule extends AbstractModule {
|
||||
|
|
|
@ -58,7 +58,8 @@ public final class MockIndexEventListener {
|
|||
return "a mock index listener for testing only";
|
||||
}
|
||||
|
||||
public void onModule(IndexModule module) {
|
||||
@Override
|
||||
public void onIndexModule(IndexModule module) {
|
||||
module.addIndexEventListener(listener);
|
||||
}
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ public class TestSearchContext extends SearchContext {
|
|||
this.bigArrays = bigArrays.withCircuitBreaking();
|
||||
this.indexService = indexService;
|
||||
this.indexFieldDataService = indexService.fieldData();
|
||||
this.fixedBitSetFilterCache = indexService.bitsetFilterCache();
|
||||
this.fixedBitSetFilterCache = indexService.cache().bitsetFilterCache();
|
||||
this.threadPool = threadPool;
|
||||
this.indexShard = indexService.getShardOrNull(0);
|
||||
this.scriptService = scriptService;
|
||||
|
|
|
@ -33,16 +33,10 @@ import static java.lang.annotation.ElementType.PARAMETER;
|
|||
import static java.lang.annotation.RetentionPolicy.RUNTIME;
|
||||
|
||||
public final class MockEngineFactory implements EngineFactory {
|
||||
@BindingAnnotation
|
||||
@Target({FIELD, PARAMETER})
|
||||
@Retention(RUNTIME)
|
||||
public @interface MockReaderType {
|
||||
}
|
||||
|
||||
private Class<? extends FilterDirectoryReader> wrapper;
|
||||
private final Class<? extends FilterDirectoryReader> wrapper;
|
||||
|
||||
@Inject
|
||||
public MockEngineFactory(@MockReaderType Class wrapper) {
|
||||
public MockEngineFactory(Class<? extends FilterDirectoryReader> wrapper) {
|
||||
this.wrapper = wrapper;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.test.engine;
|
||||
|
||||
import org.apache.lucene.index.AssertingDirectoryReader;
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
|
||||
public class MockEngineSupportModule extends AbstractModule {
|
||||
public Class<? extends FilterDirectoryReader> wrapperImpl = AssertingDirectoryReader.class;
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(Class.class).annotatedWith(MockEngineFactory.MockReaderType.class).toInstance(wrapperImpl);
|
||||
}
|
||||
}
|
|
@ -52,13 +52,14 @@ public class MockFSIndexStore extends IndexStore {
|
|||
return Settings.builder().put(IndexModule.STORE_TYPE, "mock").build();
|
||||
}
|
||||
|
||||
public void onModule(IndexModule module) {
|
||||
Settings indexSettings = module.getSettings();
|
||||
@Override
|
||||
public void onIndexModule(IndexModule indexModule) {
|
||||
Settings indexSettings = indexModule.getSettings();
|
||||
if ("mock".equals(indexSettings.get(IndexModule.STORE_TYPE))) {
|
||||
if (indexSettings.getAsBoolean(CHECK_INDEX_ON_CLOSE, true)) {
|
||||
module.addIndexEventListener(new Listener());
|
||||
indexModule.addIndexEventListener(new Listener());
|
||||
}
|
||||
module.addIndexStore("mock", MockFSIndexStore::new);
|
||||
indexModule.addIndexStore("mock", MockFSIndexStore::new);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue