First steps of integrating converted settings into the cluster/index settings infrastructure
The old infa has been removed in this commit such that nothing uses `DynamicSettings` anymore and all index-scoped settings require to be registered before the node has fully started up.
This commit is contained in:
parent
211f80fcad
commit
04045a072f
|
@ -22,14 +22,9 @@ package org.elasticsearch.action.admin.cluster.settings;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.block.ClusterBlocks;
|
import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.common.regex.Regex;
|
|
||||||
import org.elasticsearch.common.settings.ClusterSettings;
|
import org.elasticsearch.common.settings.ClusterSettings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import static org.elasticsearch.cluster.ClusterState.builder;
|
import static org.elasticsearch.cluster.ClusterState.builder;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -57,11 +52,11 @@ final class SettingsUpdater {
|
||||||
boolean changed = false;
|
boolean changed = false;
|
||||||
Settings.Builder transientSettings = Settings.settingsBuilder();
|
Settings.Builder transientSettings = Settings.settingsBuilder();
|
||||||
transientSettings.put(currentState.metaData().transientSettings());
|
transientSettings.put(currentState.metaData().transientSettings());
|
||||||
changed |= apply(transientToApply, transientSettings, transientUpdates, "transient");
|
changed |= clusterSettings.applyDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient");
|
||||||
|
|
||||||
Settings.Builder persistentSettings = Settings.settingsBuilder();
|
Settings.Builder persistentSettings = Settings.settingsBuilder();
|
||||||
persistentSettings.put(currentState.metaData().persistentSettings());
|
persistentSettings.put(currentState.metaData().persistentSettings());
|
||||||
changed |= apply(persistentToApply, persistentSettings, persistentUpdates, "persistent");
|
changed |= clusterSettings.applyDynamicSettings(persistentToApply, persistentSettings, persistentUpdates, "persistent");
|
||||||
|
|
||||||
if (!changed) {
|
if (!changed) {
|
||||||
return currentState;
|
return currentState;
|
||||||
|
@ -86,42 +81,5 @@ final class SettingsUpdater {
|
||||||
return build;
|
return build;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean apply(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) {
|
|
||||||
boolean changed = false;
|
|
||||||
final Set<String> toRemove = new HashSet<>();
|
|
||||||
Settings.Builder settingsBuilder = Settings.settingsBuilder();
|
|
||||||
for (Map.Entry<String, String> entry : toApply.getAsMap().entrySet()) {
|
|
||||||
if (entry.getValue() == null) {
|
|
||||||
toRemove.add(entry.getKey());
|
|
||||||
} else if (clusterSettings.isLoggerSetting(entry.getKey()) || clusterSettings.hasDynamicSetting(entry.getKey())) {
|
|
||||||
settingsBuilder.put(entry.getKey(), entry.getValue());
|
|
||||||
updates.put(entry.getKey(), entry.getValue());
|
|
||||||
changed = true;
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
changed |= applyDeletes(toRemove, target);
|
|
||||||
target.put(settingsBuilder.build());
|
|
||||||
return changed;
|
|
||||||
}
|
|
||||||
|
|
||||||
private final boolean applyDeletes(Set<String> deletes, Settings.Builder builder) {
|
|
||||||
boolean changed = false;
|
|
||||||
for (String entry : deletes) {
|
|
||||||
Set<String> keysToRemove = new HashSet<>();
|
|
||||||
Set<String> keySet = builder.internalMap().keySet();
|
|
||||||
for (String key : keySet) {
|
|
||||||
if (Regex.simpleMatch(entry, key)) {
|
|
||||||
keysToRemove.add(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (String key : keysToRemove) {
|
|
||||||
builder.remove(key);
|
|
||||||
changed = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return changed;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
|
||||||
import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction;
|
import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction;
|
||||||
import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction;
|
import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction;
|
||||||
import org.elasticsearch.cluster.action.shard.ShardStateAction;
|
import org.elasticsearch.cluster.action.shard.ShardStateAction;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|
||||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||||
import org.elasticsearch.cluster.metadata.IndexTemplateFilter;
|
import org.elasticsearch.cluster.metadata.IndexTemplateFilter;
|
||||||
import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService;
|
import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService;
|
||||||
|
@ -55,19 +54,12 @@ import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocatio
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
|
||||||
import org.elasticsearch.cluster.service.InternalClusterService;
|
import org.elasticsearch.cluster.service.InternalClusterService;
|
||||||
import org.elasticsearch.cluster.settings.DynamicSettings;
|
|
||||||
import org.elasticsearch.cluster.settings.Validator;
|
|
||||||
import org.elasticsearch.common.inject.AbstractModule;
|
import org.elasticsearch.common.inject.AbstractModule;
|
||||||
import org.elasticsearch.common.logging.ESLogger;
|
import org.elasticsearch.common.logging.ESLogger;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.util.ExtensionPoint;
|
import org.elasticsearch.common.util.ExtensionPoint;
|
||||||
import org.elasticsearch.gateway.GatewayAllocator;
|
import org.elasticsearch.gateway.GatewayAllocator;
|
||||||
import org.elasticsearch.gateway.PrimaryShardAllocator;
|
|
||||||
import org.elasticsearch.index.IndexSettings;
|
|
||||||
import org.elasticsearch.index.SearchSlowLog;
|
|
||||||
import org.elasticsearch.index.settings.IndexDynamicSettings;
|
|
||||||
import org.elasticsearch.index.MergePolicyConfig;
|
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -98,7 +90,6 @@ public class ClusterModule extends AbstractModule {
|
||||||
SnapshotInProgressAllocationDecider.class));
|
SnapshotInProgressAllocationDecider.class));
|
||||||
|
|
||||||
private final Settings settings;
|
private final Settings settings;
|
||||||
private final DynamicSettings.Builder indexDynamicSettings = new DynamicSettings.Builder();
|
|
||||||
private final ExtensionPoint.SelectedType<ShardsAllocator> shardsAllocators = new ExtensionPoint.SelectedType<>("shards_allocator", ShardsAllocator.class);
|
private final ExtensionPoint.SelectedType<ShardsAllocator> shardsAllocators = new ExtensionPoint.SelectedType<>("shards_allocator", ShardsAllocator.class);
|
||||||
private final ExtensionPoint.ClassSet<AllocationDecider> allocationDeciders = new ExtensionPoint.ClassSet<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class);
|
private final ExtensionPoint.ClassSet<AllocationDecider> allocationDeciders = new ExtensionPoint.ClassSet<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class);
|
||||||
private final ExtensionPoint.ClassSet<IndexTemplateFilter> indexTemplateFilters = new ExtensionPoint.ClassSet<>("index_template_filter", IndexTemplateFilter.class);
|
private final ExtensionPoint.ClassSet<IndexTemplateFilter> indexTemplateFilters = new ExtensionPoint.ClassSet<>("index_template_filter", IndexTemplateFilter.class);
|
||||||
|
@ -115,10 +106,6 @@ public class ClusterModule extends AbstractModule {
|
||||||
registerShardsAllocator(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR, BalancedShardsAllocator.class);
|
registerShardsAllocator(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR, BalancedShardsAllocator.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void registerIndexDynamicSetting(String setting, Validator validator) {
|
|
||||||
indexDynamicSettings.addSetting(setting, validator);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void registerAllocationDecider(Class<? extends AllocationDecider> allocationDecider) {
|
public void registerAllocationDecider(Class<? extends AllocationDecider> allocationDecider) {
|
||||||
allocationDeciders.registerExtension(allocationDecider);
|
allocationDeciders.registerExtension(allocationDecider);
|
||||||
}
|
}
|
||||||
|
@ -133,8 +120,6 @@ public class ClusterModule extends AbstractModule {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void configure() {
|
protected void configure() {
|
||||||
bind(DynamicSettings.class).annotatedWith(IndexDynamicSettings.class).toInstance(indexDynamicSettings.build());
|
|
||||||
|
|
||||||
// bind ShardsAllocator
|
// bind ShardsAllocator
|
||||||
String shardsAllocatorType = shardsAllocators.bindType(binder(), settings, ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.BALANCED_ALLOCATOR);
|
String shardsAllocatorType = shardsAllocators.bindType(binder(), settings, ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.BALANCED_ALLOCATOR);
|
||||||
if (shardsAllocatorType.equals(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR)) {
|
if (shardsAllocatorType.equals(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR)) {
|
||||||
|
|
|
@ -181,6 +181,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
|
||||||
public static final String SETTING_VERSION_UPGRADED_STRING = "index.version.upgraded_string";
|
public static final String SETTING_VERSION_UPGRADED_STRING = "index.version.upgraded_string";
|
||||||
public static final String SETTING_VERSION_MINIMUM_COMPATIBLE = "index.version.minimum_compatible";
|
public static final String SETTING_VERSION_MINIMUM_COMPATIBLE = "index.version.minimum_compatible";
|
||||||
public static final String SETTING_CREATION_DATE = "index.creation_date";
|
public static final String SETTING_CREATION_DATE = "index.creation_date";
|
||||||
|
public static final Setting<Long> INDEX_CREATION_DATE_SETTING = Setting.longSetting(SETTING_CREATION_DATE, -1, -1, false, Setting.Scope.INDEX);
|
||||||
public static final String SETTING_PRIORITY = "index.priority";
|
public static final String SETTING_PRIORITY = "index.priority";
|
||||||
public static final Setting<Integer> INDEX_PRIORITY_SETTING = Setting.intSetting("index.priority", 1, 0, true, Setting.Scope.INDEX);
|
public static final Setting<Integer> INDEX_PRIORITY_SETTING = Setting.intSetting("index.priority", 1, 0, true, Setting.Scope.INDEX);
|
||||||
public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string";
|
public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string";
|
||||||
|
@ -653,10 +654,6 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long creationDate() {
|
|
||||||
return settings.getAsLong(SETTING_CREATION_DATE, -1l);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder settings(Settings.Builder settings) {
|
public Builder settings(Settings.Builder settings) {
|
||||||
this.settings = settings.build();
|
this.settings = settings.build();
|
||||||
return this;
|
return this;
|
||||||
|
@ -671,11 +668,6 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
|
||||||
return mappings.get(type);
|
return mappings.get(type);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder removeMapping(String mappingType) {
|
|
||||||
mappings.remove(mappingType);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder putMapping(String type, String source) throws IOException {
|
public Builder putMapping(String type, String source) throws IOException {
|
||||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||||
putMapping(new MappingMetaData(type, parser.mapOrdered()));
|
putMapping(new MappingMetaData(type, parser.mapOrdered()));
|
||||||
|
@ -718,24 +710,11 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder removeCustom(String type) {
|
|
||||||
this.customs.remove(type);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Custom getCustom(String type) {
|
|
||||||
return this.customs.get(type);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder putActiveAllocationIds(int shardId, Set<String> allocationIds) {
|
public Builder putActiveAllocationIds(int shardId, Set<String> allocationIds) {
|
||||||
activeAllocationIds.put(shardId, new HashSet(allocationIds));
|
activeAllocationIds.put(shardId, new HashSet(allocationIds));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Set<String> getActiveAllocationIds(int shardId) {
|
|
||||||
return activeAllocationIds.get(shardId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public long version() {
|
public long version() {
|
||||||
return this.version;
|
return this.version;
|
||||||
}
|
}
|
||||||
|
|
|
@ -177,6 +177,9 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
||||||
public void createIndex(final CreateIndexClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
public void createIndex(final CreateIndexClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
||||||
Settings.Builder updatedSettingsBuilder = Settings.settingsBuilder();
|
Settings.Builder updatedSettingsBuilder = Settings.settingsBuilder();
|
||||||
updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
||||||
|
for (Map.Entry<String, String> entry : updatedSettingsBuilder.internalMap().entrySet()) {
|
||||||
|
indexScopeSettings.validate(entry.getKey(), entry.getValue());
|
||||||
|
}
|
||||||
request.settings(updatedSettingsBuilder.build());
|
request.settings(updatedSettingsBuilder.build());
|
||||||
|
|
||||||
clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]",
|
clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]",
|
||||||
|
@ -473,7 +476,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
||||||
validationErrors.add("custom path [" + customPath + "] is not a sub-path of path.shared_data [" + env.sharedDataFile() + "]");
|
validationErrors.add("custom path [" + customPath + "] is not a sub-path of path.shared_data [" + env.sharedDataFile() + "]");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//nocommit - this can be removed?
|
//norelease - this can be removed?
|
||||||
Integer number_of_primaries = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null);
|
Integer number_of_primaries = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null);
|
||||||
Integer number_of_replicas = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, null);
|
Integer number_of_replicas = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, null);
|
||||||
if (number_of_primaries != null && number_of_primaries <= 0) {
|
if (number_of_primaries != null && number_of_primaries <= 0) {
|
||||||
|
|
|
@ -34,15 +34,14 @@ import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||||
import org.elasticsearch.cluster.settings.DynamicSettings;
|
|
||||||
import org.elasticsearch.common.Booleans;
|
|
||||||
import org.elasticsearch.common.Priority;
|
import org.elasticsearch.common.Priority;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
import org.elasticsearch.common.component.AbstractComponent;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.settings.IndexScopeSettings;
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.index.settings.IndexDynamicSettings;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -63,18 +62,17 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
|
||||||
|
|
||||||
private final AllocationService allocationService;
|
private final AllocationService allocationService;
|
||||||
|
|
||||||
private final DynamicSettings dynamicSettings;
|
|
||||||
|
|
||||||
private final IndexNameExpressionResolver indexNameExpressionResolver;
|
private final IndexNameExpressionResolver indexNameExpressionResolver;
|
||||||
|
private final IndexScopeSettings indexScopeSettings;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService, @IndexDynamicSettings DynamicSettings dynamicSettings, IndexNameExpressionResolver indexNameExpressionResolver) {
|
public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService, IndexScopeSettings indexScopeSettings, IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||||
super(settings);
|
super(settings);
|
||||||
this.clusterService = clusterService;
|
this.clusterService = clusterService;
|
||||||
this.indexNameExpressionResolver = indexNameExpressionResolver;
|
this.indexNameExpressionResolver = indexNameExpressionResolver;
|
||||||
this.clusterService.add(this);
|
this.clusterService.add(this);
|
||||||
this.allocationService = allocationService;
|
this.allocationService = allocationService;
|
||||||
this.dynamicSettings = dynamicSettings;
|
this.indexScopeSettings = indexScopeSettings;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -147,40 +145,32 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
|
||||||
public void updateSettings(final UpdateSettingsClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
public void updateSettings(final UpdateSettingsClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
||||||
Settings.Builder updatedSettingsBuilder = Settings.settingsBuilder();
|
Settings.Builder updatedSettingsBuilder = Settings.settingsBuilder();
|
||||||
updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
||||||
|
Settings.Builder settingsForClosedIndices = Settings.builder();
|
||||||
|
Settings.Builder settingsForOpenIndices = Settings.builder();
|
||||||
|
Settings.Builder skipppedSettings = Settings.builder();
|
||||||
|
|
||||||
|
|
||||||
// never allow to change the number of shards
|
// never allow to change the number of shards
|
||||||
for (String key : updatedSettingsBuilder.internalMap().keySet()) {
|
for (Map.Entry<String, String> entry : updatedSettingsBuilder.internalMap().entrySet()) {
|
||||||
if (key.equals(IndexMetaData.SETTING_NUMBER_OF_SHARDS)) {
|
if (entry.getKey().equals(IndexMetaData.SETTING_NUMBER_OF_SHARDS)) {
|
||||||
listener.onFailure(new IllegalArgumentException("can't change the number of shards for an index"));
|
listener.onFailure(new IllegalArgumentException("can't change the number of shards for an index"));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
Setting setting = indexScopeSettings.get(entry.getKey());
|
||||||
|
if (setting == null) {
|
||||||
final Settings closeSettings = updatedSettingsBuilder.build();
|
throw new IllegalArgumentException("setting [" + entry.getKey() + "] is unknown");
|
||||||
|
}
|
||||||
final Set<String> removedSettings = new HashSet<>();
|
indexScopeSettings.validate(entry.getKey(), entry.getValue());
|
||||||
final Set<String> errors = new HashSet<>();
|
settingsForClosedIndices.put(entry.getKey(), entry.getValue());
|
||||||
for (Map.Entry<String, String> setting : updatedSettingsBuilder.internalMap().entrySet()) {
|
if (setting.isDynamic()) {
|
||||||
if (!dynamicSettings.hasDynamicSetting(setting.getKey())) {
|
settingsForOpenIndices.put(entry.getKey(), entry.getValue());
|
||||||
removedSettings.add(setting.getKey());
|
|
||||||
} else {
|
} else {
|
||||||
String error = dynamicSettings.validateDynamicSetting(setting.getKey(), setting.getValue(), clusterService.state());
|
skipppedSettings.put(entry.getKey(), entry.getValue());
|
||||||
if (error != null) {
|
|
||||||
errors.add("[" + setting.getKey() + "] - " + error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
final Settings skippedSettigns = skipppedSettings.build();
|
||||||
if (!errors.isEmpty()) {
|
final Settings closedSettings = settingsForClosedIndices.build();
|
||||||
listener.onFailure(new IllegalArgumentException("can't process the settings: " + errors.toString()));
|
final Settings openSettings = settingsForOpenIndices.build();
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!removedSettings.isEmpty()) {
|
|
||||||
for (String removedSetting : removedSettings) {
|
|
||||||
updatedSettingsBuilder.remove(removedSetting);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
final Settings openSettings = updatedSettingsBuilder.build();
|
|
||||||
|
|
||||||
clusterService.submitStateUpdateTask("update-settings",
|
clusterService.submitStateUpdateTask("update-settings",
|
||||||
new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(Priority.URGENT, request, listener) {
|
new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(Priority.URGENT, request, listener) {
|
||||||
|
@ -208,16 +198,16 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (closeIndices.size() > 0 && closeSettings.get(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) != null) {
|
if (closeIndices.size() > 0 && closedSettings.get(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) != null) {
|
||||||
throw new IllegalArgumentException(String.format(Locale.ROOT,
|
throw new IllegalArgumentException(String.format(Locale.ROOT,
|
||||||
"Can't update [%s] on closed indices [%s] - can leave index in an unopenable state", IndexMetaData.SETTING_NUMBER_OF_REPLICAS,
|
"Can't update [%s] on closed indices [%s] - can leave index in an unopenable state", IndexMetaData.SETTING_NUMBER_OF_REPLICAS,
|
||||||
closeIndices
|
closeIndices
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
if (!removedSettings.isEmpty() && !openIndices.isEmpty()) {
|
if (!skippedSettigns.getAsMap().isEmpty() && !openIndices.isEmpty()) {
|
||||||
throw new IllegalArgumentException(String.format(Locale.ROOT,
|
throw new IllegalArgumentException(String.format(Locale.ROOT,
|
||||||
"Can't update non dynamic settings[%s] for open indices [%s]",
|
"Can't update non dynamic settings[%s] for open indices [%s]",
|
||||||
removedSettings,
|
skippedSettigns.getAsMap().keySet(),
|
||||||
openIndices
|
openIndices
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -272,7 +262,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
|
||||||
|
|
||||||
if (!closeIndices.isEmpty()) {
|
if (!closeIndices.isEmpty()) {
|
||||||
String[] indices = closeIndices.toArray(new String[closeIndices.size()]);
|
String[] indices = closeIndices.toArray(new String[closeIndices.size()]);
|
||||||
metaDataBuilder.updateSettings(closeSettings, indices);
|
metaDataBuilder.updateSettings(closedSettings, indices);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -281,12 +271,18 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
|
||||||
// now, reroute in case things change that require it (like number of replicas)
|
// now, reroute in case things change that require it (like number of replicas)
|
||||||
RoutingAllocation.Result routingResult = allocationService.reroute(updatedState, "settings update");
|
RoutingAllocation.Result routingResult = allocationService.reroute(updatedState, "settings update");
|
||||||
updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build();
|
updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build();
|
||||||
|
for (String index : openIndices) {
|
||||||
|
indexScopeSettings.dryRun(updatedState.metaData().index(index).getSettings());
|
||||||
|
}
|
||||||
|
for (String index : closeIndices) {
|
||||||
|
indexScopeSettings.dryRun(updatedState.metaData().index(index).getSettings());
|
||||||
|
}
|
||||||
return updatedState;
|
return updatedState;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void upgradeIndexSettings(final UpgradeSettingsClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
public void upgradeIndexSettings(final UpgradeSettingsClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,74 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.cluster.settings;
|
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
|
||||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
|
||||||
import org.elasticsearch.common.regex.Regex;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A container for setting names and validation methods for those settings.
|
|
||||||
*/
|
|
||||||
public class DynamicSettings {
|
|
||||||
private final ImmutableOpenMap<String, Validator> dynamicSettings;
|
|
||||||
|
|
||||||
public static class Builder {
|
|
||||||
private ImmutableOpenMap.Builder<String, Validator> settings = ImmutableOpenMap.builder();
|
|
||||||
|
|
||||||
public void addSetting(String setting, Validator validator) {
|
|
||||||
Validator old = settings.put(setting, validator);
|
|
||||||
if (old != null) {
|
|
||||||
throw new IllegalArgumentException("Cannot register setting [" + setting + "] twice");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public DynamicSettings build() {
|
|
||||||
return new DynamicSettings(settings.build());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private DynamicSettings(ImmutableOpenMap<String, Validator> settings) {
|
|
||||||
this.dynamicSettings = settings;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isDynamicOrLoggingSetting(String key) {
|
|
||||||
return hasDynamicSetting(key) || key.startsWith("logger.");
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean hasDynamicSetting(String key) {
|
|
||||||
for (ObjectCursor<String> dynamicSetting : dynamicSettings.keys()) {
|
|
||||||
if (Regex.simpleMatch(dynamicSetting.value, key)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String validateDynamicSetting(String dynamicSetting, String value, ClusterState clusterState) {
|
|
||||||
for (ObjectObjectCursor<String, Validator> setting : dynamicSettings) {
|
|
||||||
if (Regex.simpleMatch(setting.key, dynamicSetting)) {
|
|
||||||
return setting.value.validate(dynamicSetting, value, clusterState);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,307 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.cluster.settings;
|
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
|
||||||
import org.elasticsearch.common.Booleans;
|
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
|
||||||
|
|
||||||
import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue;
|
|
||||||
import static org.elasticsearch.common.unit.MemorySizeValue.parseBytesSizeValueOrHeapRatio;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validates a setting, returning a failure message if applicable.
|
|
||||||
*/
|
|
||||||
public interface Validator {
|
|
||||||
|
|
||||||
String validate(String setting, String value, ClusterState clusterState);
|
|
||||||
|
|
||||||
Validator EMPTY = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator TIME = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
if (value == null) {
|
|
||||||
throw new NullPointerException("value must not be null");
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
// This never returns null:
|
|
||||||
TimeValue.parseTimeValue(value, null, setting);
|
|
||||||
} catch (ElasticsearchParseException ex) {
|
|
||||||
return ex.getMessage();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator TIMEOUT = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
if (value == null) {
|
|
||||||
throw new NullPointerException("value must not be null");
|
|
||||||
}
|
|
||||||
TimeValue timeValue = TimeValue.parseTimeValue(value, null, setting);
|
|
||||||
assert timeValue != null;
|
|
||||||
if (timeValue.millis() < 0 && timeValue.millis() != -1) {
|
|
||||||
return "cannot parse value [" + value + "] as a timeout";
|
|
||||||
}
|
|
||||||
} catch (ElasticsearchParseException ex) {
|
|
||||||
return ex.getMessage();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator TIME_NON_NEGATIVE = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
if (value == null) {
|
|
||||||
throw new NullPointerException("value must not be null");
|
|
||||||
}
|
|
||||||
TimeValue timeValue = TimeValue.parseTimeValue(value, null, setting);
|
|
||||||
assert timeValue != null;
|
|
||||||
if (timeValue.millis() < 0) {
|
|
||||||
return "cannot parse value [" + value + "] as non negative time";
|
|
||||||
}
|
|
||||||
} catch (ElasticsearchParseException ex) {
|
|
||||||
return ex.getMessage();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator FLOAT = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
Float.parseFloat(value);
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return "cannot parse value [" + value + "] as a float";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator NON_NEGATIVE_FLOAT = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
if (Float.parseFloat(value) < 0.0) {
|
|
||||||
return "the value of the setting " + setting + " must be a non negative float";
|
|
||||||
}
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return "cannot parse value [" + value + "] as a double";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator DOUBLE = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
Double.parseDouble(value);
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return "cannot parse value [" + value + "] as a double";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator NON_NEGATIVE_DOUBLE = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
if (Double.parseDouble(value) < 0.0) {
|
|
||||||
return "the value of the setting " + setting + " must be a non negative double";
|
|
||||||
}
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return "cannot parse value [" + value + "] as a double";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator DOUBLE_GTE_2 = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
if (Double.parseDouble(value) < 2.0) {
|
|
||||||
return "the value of the setting " + setting + " must be >= 2.0";
|
|
||||||
}
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return "cannot parse value [" + value + "] as a double";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator INTEGER = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
Integer.parseInt(value);
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return "cannot parse value [" + value + "] as an integer";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator POSITIVE_INTEGER = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
if (Integer.parseInt(value) <= 0) {
|
|
||||||
return "the value of the setting " + setting + " must be a positive integer";
|
|
||||||
}
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return "cannot parse value [" + value + "] as an integer";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator NON_NEGATIVE_INTEGER = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
if (Integer.parseInt(value) < 0) {
|
|
||||||
return "the value of the setting " + setting + " must be a non negative integer";
|
|
||||||
}
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return "cannot parse value [" + value + "] as an integer";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator INTEGER_GTE_2 = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
if (Integer.parseInt(value) < 2) {
|
|
||||||
return "the value of the setting " + setting + " must be >= 2";
|
|
||||||
}
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return "cannot parse value [" + value + "] as an integer";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator BYTES_SIZE = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
parseBytesSizeValue(value, setting);
|
|
||||||
} catch (ElasticsearchParseException ex) {
|
|
||||||
return ex.getMessage();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator POSITIVE_BYTES_SIZE = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState state) {
|
|
||||||
try {
|
|
||||||
ByteSizeValue byteSizeValue = parseBytesSizeValue(value, setting);
|
|
||||||
if (byteSizeValue.getBytes() <= 0) {
|
|
||||||
return setting + " must be a positive byte size value";
|
|
||||||
}
|
|
||||||
} catch (ElasticsearchParseException ex) {
|
|
||||||
return ex.getMessage();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Validator PERCENTAGE = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
if (value == null) {
|
|
||||||
return "the value of " + setting + " can not be null";
|
|
||||||
}
|
|
||||||
if (!value.endsWith("%")) {
|
|
||||||
return "the value [" + value + "] for " + setting + " must end with %";
|
|
||||||
}
|
|
||||||
final double asDouble = Double.parseDouble(value.substring(0, value.length() - 1));
|
|
||||||
if (asDouble < 0.0 || asDouble > 100.0) {
|
|
||||||
return "the value [" + value + "] for " + setting + " must be a percentage between 0% and 100%";
|
|
||||||
}
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
return ex.getMessage();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
Validator BYTES_SIZE_OR_PERCENTAGE = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
String byteSize = BYTES_SIZE.validate(setting, value, clusterState);
|
|
||||||
if (byteSize != null) {
|
|
||||||
String percentage = PERCENTAGE.validate(setting, value, clusterState);
|
|
||||||
if (percentage == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return percentage + " or be a valid bytes size value, like [16mb]";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
Validator MEMORY_SIZE = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
try {
|
|
||||||
parseBytesSizeValueOrHeapRatio(value, setting);
|
|
||||||
} catch (ElasticsearchParseException ex) {
|
|
||||||
return ex.getMessage();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
public static final Validator BOOLEAN = new Validator() {
|
|
||||||
@Override
|
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
|
||||||
|
|
||||||
if (value != null && (Booleans.isExplicitFalse(value) || Booleans.isExplicitTrue(value))) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return "cannot parse value [" + value + "] as a boolean";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.common.settings;
|
||||||
|
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
import org.elasticsearch.common.component.AbstractComponent;
|
||||||
|
import org.elasticsearch.common.regex.Regex;
|
||||||
import org.elasticsearch.common.util.set.Sets;
|
import org.elasticsearch.common.util.set.Sets;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -178,6 +179,18 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
||||||
addSettingsUpdateConsumer(setting, consumer, (s) -> {});
|
addSettingsUpdateConsumer(setting, consumer, (s) -> {});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates that the setting is valid
|
||||||
|
*/
|
||||||
|
public final void validate(String key, String value) {
|
||||||
|
Settings.Builder builder = Settings.builder().put(key, value);
|
||||||
|
Setting setting = get(key);
|
||||||
|
if (setting == null) {
|
||||||
|
throw new IllegalArgumentException("unknown setting [" + key + "]");
|
||||||
|
}
|
||||||
|
setting.get(builder.build());
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Transactional interface to update settings.
|
* Transactional interface to update settings.
|
||||||
* @see Setting
|
* @see Setting
|
||||||
|
@ -283,4 +296,44 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
||||||
return setting.get(this.lastSettingsApplied);
|
return setting.get(this.lastSettingsApplied);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean applyDynamicSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) {
|
||||||
|
boolean changed = false;
|
||||||
|
final Set<String> toRemove = new HashSet<>();
|
||||||
|
Settings.Builder settingsBuilder = Settings.settingsBuilder();
|
||||||
|
for (Map.Entry<String, String> entry : toApply.getAsMap().entrySet()) {
|
||||||
|
if (entry.getValue() == null) {
|
||||||
|
toRemove.add(entry.getKey());
|
||||||
|
} else if (hasDynamicSetting(entry.getKey())) {
|
||||||
|
validate(entry.getKey(), entry.getValue());
|
||||||
|
settingsBuilder.put(entry.getKey(), entry.getValue());
|
||||||
|
updates.put(entry.getKey(), entry.getValue());
|
||||||
|
changed = true;
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
changed |= applyDeletes(toRemove, target);
|
||||||
|
target.put(settingsBuilder.build());
|
||||||
|
return changed;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final boolean applyDeletes(Set<String> deletes, Settings.Builder builder) {
|
||||||
|
boolean changed = false;
|
||||||
|
for (String entry : deletes) {
|
||||||
|
Set<String> keysToRemove = new HashSet<>();
|
||||||
|
Set<String> keySet = builder.internalMap().keySet();
|
||||||
|
for (String key : keySet) {
|
||||||
|
if (Regex.simpleMatch(entry, key)) {
|
||||||
|
keysToRemove.add(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (String key : keysToRemove) {
|
||||||
|
builder.remove(key);
|
||||||
|
changed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return changed;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,6 +84,11 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||||
return settings;
|
return settings;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasDynamicSetting(String key) {
|
||||||
|
return isLoggerSetting(key) || super.hasDynamicSetting(key);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns <code>true</code> if the settings is a logger setting.
|
* Returns <code>true</code> if the settings is a logger setting.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -46,6 +46,8 @@ import org.elasticsearch.index.IndexingSlowLog;
|
||||||
import org.elasticsearch.index.MergePolicyConfig;
|
import org.elasticsearch.index.MergePolicyConfig;
|
||||||
import org.elasticsearch.index.MergeSchedulerConfig;
|
import org.elasticsearch.index.MergeSchedulerConfig;
|
||||||
import org.elasticsearch.index.SearchSlowLog;
|
import org.elasticsearch.index.SearchSlowLog;
|
||||||
|
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||||
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
import org.elasticsearch.index.store.IndexStore;
|
import org.elasticsearch.index.store.IndexStore;
|
||||||
import org.elasticsearch.index.store.IndexStoreConfig;
|
import org.elasticsearch.index.store.IndexStoreConfig;
|
||||||
import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
|
import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
|
||||||
|
@ -68,14 +70,6 @@ import java.util.Set;
|
||||||
*/
|
*/
|
||||||
public final class IndexScopeSettings extends AbstractScopedSettings {
|
public final class IndexScopeSettings extends AbstractScopedSettings {
|
||||||
|
|
||||||
public IndexScopeSettings(Settings settings, Set<Setting<?>> settingsSet) {
|
|
||||||
super(settings, settingsSet, Setting.Scope.INDEX);
|
|
||||||
}
|
|
||||||
|
|
||||||
private IndexScopeSettings(Settings settings, IndexScopeSettings other, IndexMetaData metaData) {
|
|
||||||
super(settings, metaData.getSettings(), other);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Set<Setting<?>> BUILT_IN_INDEX_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
public static Set<Setting<?>> BUILT_IN_INDEX_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||||
IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING,
|
IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING,
|
||||||
IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING,
|
IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING,
|
||||||
|
@ -98,6 +92,7 @@ public final class IndexScopeSettings extends AbstractScopedSettings {
|
||||||
IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING,
|
IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING,
|
||||||
IndexMetaData.INDEX_PRIORITY_SETTING,
|
IndexMetaData.INDEX_PRIORITY_SETTING,
|
||||||
IndexMetaData.INDEX_DATA_PATH_SETTING,
|
IndexMetaData.INDEX_DATA_PATH_SETTING,
|
||||||
|
IndexMetaData.INDEX_CREATION_DATE_SETTING,
|
||||||
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING,
|
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING,
|
||||||
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING,
|
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING,
|
||||||
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING,
|
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING,
|
||||||
|
@ -133,9 +128,23 @@ public final class IndexScopeSettings extends AbstractScopedSettings {
|
||||||
UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING,
|
UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING,
|
||||||
EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING,
|
EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING,
|
||||||
EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING,
|
EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING,
|
||||||
IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING
|
IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING,
|
||||||
|
IndexFieldDataService.INDEX_FIELDDATA_CACHE_KEY,
|
||||||
|
FieldMapper.IGNORE_MALFORMED_SETTING,
|
||||||
|
FieldMapper.COERCE_SETTING,
|
||||||
|
Setting.groupSetting("index.analysis.", false, Setting.Scope.INDEX) // this sucks but we can't really validate all the analyzers
|
||||||
)));
|
)));
|
||||||
|
|
||||||
|
public static final IndexScopeSettings DEFAULT_SCOPED_SETTINGS = new IndexScopeSettings(Settings.EMPTY, IndexScopeSettings.BUILT_IN_INDEX_SETTINGS);
|
||||||
|
|
||||||
|
public IndexScopeSettings(Settings settings, Set<Setting<?>> settingsSet) {
|
||||||
|
super(settings, settingsSet, Setting.Scope.INDEX);
|
||||||
|
}
|
||||||
|
|
||||||
|
private IndexScopeSettings(Settings settings, IndexScopeSettings other, IndexMetaData metaData) {
|
||||||
|
super(settings, metaData.getSettings(), other);
|
||||||
|
}
|
||||||
|
|
||||||
public IndexScopeSettings copy(Settings settings, IndexMetaData metaData) {
|
public IndexScopeSettings copy(Settings settings, IndexMetaData metaData) {
|
||||||
return new IndexScopeSettings(settings, this, metaData);
|
return new IndexScopeSettings(settings, this, metaData);
|
||||||
}
|
}
|
||||||
|
|
|
@ -296,6 +296,10 @@ public class Setting<T> extends ToXContentToBytes {
|
||||||
return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, scope);
|
return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, scope);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Setting<Long> longSetting(String key, long defaultValue, long minValue, boolean dynamic, Scope scope) {
|
||||||
|
return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, scope);
|
||||||
|
}
|
||||||
|
|
||||||
public static int parseInt(String s, int minValue, String key) {
|
public static int parseInt(String s, int minValue, String key) {
|
||||||
int value = Integer.parseInt(s);
|
int value = Integer.parseInt(s);
|
||||||
if (value < minValue) {
|
if (value < minValue) {
|
||||||
|
@ -304,6 +308,14 @@ public class Setting<T> extends ToXContentToBytes {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static long parseLong(String s, long minValue, String key) {
|
||||||
|
long value = Long.parseLong(s);
|
||||||
|
if (value < minValue) {
|
||||||
|
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
public static Setting<Integer> intSetting(String key, int defaultValue, boolean dynamic, Scope scope) {
|
public static Setting<Integer> intSetting(String key, int defaultValue, boolean dynamic, Scope scope) {
|
||||||
return intSetting(key, defaultValue, Integer.MIN_VALUE, dynamic, scope);
|
return intSetting(key, defaultValue, Integer.MIN_VALUE, dynamic, scope);
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,7 @@ public abstract class PriorityComparator implements Comparator<ShardRouting> {
|
||||||
}
|
}
|
||||||
|
|
||||||
private long timeCreated(Settings settings) {
|
private long timeCreated(Settings settings) {
|
||||||
return settings.getAsLong(IndexMetaData.SETTING_CREATION_DATE, -1l);
|
return IndexMetaData.INDEX_CREATION_DATE_SETTING.get(settings);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract Settings getIndexSettings(String index);
|
protected abstract Settings getIndexSettings(String index);
|
||||||
|
|
|
@ -89,8 +89,6 @@ public final class IndexSettings {
|
||||||
public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60);
|
public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60);
|
||||||
public static final Setting<TimeValue> INDEX_GC_DELETES_SETTING = Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MICROSECONDS), true, Setting.Scope.INDEX);
|
public static final Setting<TimeValue> INDEX_GC_DELETES_SETTING = Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MICROSECONDS), true, Setting.Scope.INDEX);
|
||||||
|
|
||||||
private static final IndexScopeSettings DEFAULT_SCOPED_SETTINGS = new IndexScopeSettings(Settings.EMPTY, IndexScopeSettings.BUILT_IN_INDEX_SETTINGS);
|
|
||||||
|
|
||||||
private final String uuid;
|
private final String uuid;
|
||||||
private final Index index;
|
private final Index index;
|
||||||
private final Version version;
|
private final Version version;
|
||||||
|
@ -164,7 +162,7 @@ public final class IndexSettings {
|
||||||
* @param nodeSettings the nodes settings this index is allocated on.
|
* @param nodeSettings the nodes settings this index is allocated on.
|
||||||
*/
|
*/
|
||||||
public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings) {
|
public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings) {
|
||||||
this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()), DEFAULT_SCOPED_SETTINGS);
|
this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()), IndexScopeSettings.DEFAULT_SCOPED_SETTINGS);
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, IndexScopeSettings indexScopedSettings) {
|
IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, IndexScopeSettings indexScopedSettings) {
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.util.Accountable;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.common.collect.MapBuilder;
|
import org.elasticsearch.common.collect.MapBuilder;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.index.AbstractIndexComponent;
|
import org.elasticsearch.index.AbstractIndexComponent;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData;
|
||||||
|
@ -55,9 +56,17 @@ import static java.util.Collections.unmodifiableMap;
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class IndexFieldDataService extends AbstractIndexComponent implements Closeable {
|
public class IndexFieldDataService extends AbstractIndexComponent implements Closeable {
|
||||||
|
|
||||||
public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache";
|
|
||||||
public static final String FIELDDATA_CACHE_VALUE_NODE = "node";
|
public static final String FIELDDATA_CACHE_VALUE_NODE = "node";
|
||||||
|
public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache";
|
||||||
|
public static final Setting<String> INDEX_FIELDDATA_CACHE_KEY = new Setting<>(FIELDDATA_CACHE_KEY, (s) -> FIELDDATA_CACHE_VALUE_NODE, (s) -> {
|
||||||
|
switch (s) {
|
||||||
|
case "node":
|
||||||
|
case "none":
|
||||||
|
return s;
|
||||||
|
default:
|
||||||
|
throw new IllegalArgumentException("failed to parse [" + s + "] must be one of [node,node]");
|
||||||
|
}
|
||||||
|
}, false, Setting.Scope.INDEX);
|
||||||
|
|
||||||
private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> {
|
private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> {
|
||||||
throw new IllegalStateException("Can't load fielddata on [" + fieldType.name()
|
throw new IllegalStateException("Can't load fielddata on [" + fieldType.name()
|
||||||
|
@ -228,7 +237,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
|
||||||
if (cache == null) {
|
if (cache == null) {
|
||||||
// we default to node level cache, which in turn defaults to be unbounded
|
// we default to node level cache, which in turn defaults to be unbounded
|
||||||
// this means changing the node level settings is simple, just set the bounds there
|
// this means changing the node level settings is simple, just set the bounds there
|
||||||
String cacheType = type.getSettings().get("cache", indexSettings.getSettings().get(FIELDDATA_CACHE_KEY, FIELDDATA_CACHE_VALUE_NODE));
|
String cacheType = type.getSettings().get("cache", indexSettings.getValue(INDEX_FIELDDATA_CACHE_KEY));
|
||||||
if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) {
|
if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) {
|
||||||
cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName, type);
|
cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName, type);
|
||||||
} else if ("none".equals(cacheType)){
|
} else if ("none".equals(cacheType)){
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.index.IndexOptions;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||||
|
@ -47,7 +48,8 @@ import java.util.Map;
|
||||||
import java.util.stream.StreamSupport;
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
public abstract class FieldMapper extends Mapper implements Cloneable {
|
public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||||
|
public static final Setting<Boolean> IGNORE_MALFORMED_SETTING = Setting.boolSetting("index.mapping.ignore_malformed", false, false, Setting.Scope.INDEX);
|
||||||
|
public static final Setting<Boolean> COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", false, false, Setting.Scope.INDEX);
|
||||||
public abstract static class Builder<T extends Builder, Y extends FieldMapper> extends Mapper.Builder<T, Y> {
|
public abstract static class Builder<T extends Builder, Y extends FieldMapper> extends Mapper.Builder<T, Y> {
|
||||||
|
|
||||||
protected final MappedFieldType fieldType;
|
protected final MappedFieldType fieldType;
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.lucene.index.IndexableFieldType;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.Explicit;
|
import org.elasticsearch.common.Explicit;
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.Fuzziness;
|
import org.elasticsearch.common.unit.Fuzziness;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -51,6 +52,7 @@ import java.util.List;
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
|
public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
|
||||||
|
private static final Setting<Boolean> COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", true, false, Setting.Scope.INDEX); // this is private since it has a different default
|
||||||
|
|
||||||
public static class Defaults {
|
public static class Defaults {
|
||||||
|
|
||||||
|
@ -89,7 +91,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
||||||
return new Explicit<>(ignoreMalformed, true);
|
return new Explicit<>(ignoreMalformed, true);
|
||||||
}
|
}
|
||||||
if (context.indexSettings() != null) {
|
if (context.indexSettings() != null) {
|
||||||
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false);
|
return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false);
|
||||||
}
|
}
|
||||||
return Defaults.IGNORE_MALFORMED;
|
return Defaults.IGNORE_MALFORMED;
|
||||||
}
|
}
|
||||||
|
@ -104,7 +106,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
||||||
return new Explicit<>(coerce, true);
|
return new Explicit<>(coerce, true);
|
||||||
}
|
}
|
||||||
if (context.indexSettings() != null) {
|
if (context.indexSettings() != null) {
|
||||||
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false);
|
return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false);
|
||||||
}
|
}
|
||||||
return Defaults.COERCE;
|
return Defaults.COERCE;
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.collect.Iterators;
|
import org.elasticsearch.common.collect.Iterators;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.geo.GeoUtils;
|
import org.elasticsearch.common.geo.GeoUtils;
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
@ -59,7 +60,6 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||||
*/
|
*/
|
||||||
public abstract class BaseGeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser {
|
public abstract class BaseGeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser {
|
||||||
public static final String CONTENT_TYPE = "geo_point";
|
public static final String CONTENT_TYPE = "geo_point";
|
||||||
|
|
||||||
public static class Names {
|
public static class Names {
|
||||||
public static final String LAT = "lat";
|
public static final String LAT = "lat";
|
||||||
public static final String LAT_SUFFIX = "." + LAT;
|
public static final String LAT_SUFFIX = "." + LAT;
|
||||||
|
@ -142,7 +142,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||||
return new Explicit<>(ignoreMalformed, true);
|
return new Explicit<>(ignoreMalformed, true);
|
||||||
}
|
}
|
||||||
if (context.indexSettings() != null) {
|
if (context.indexSettings() != null) {
|
||||||
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false);
|
return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false);
|
||||||
}
|
}
|
||||||
return Defaults.IGNORE_MALFORMED;
|
return Defaults.IGNORE_MALFORMED;
|
||||||
}
|
}
|
||||||
|
|
|
@ -102,7 +102,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
|
||||||
return new Explicit<>(coerce, true);
|
return new Explicit<>(coerce, true);
|
||||||
}
|
}
|
||||||
if (context.indexSettings() != null) {
|
if (context.indexSettings() != null) {
|
||||||
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false);
|
return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false);
|
||||||
}
|
}
|
||||||
return Defaults.COERCE;
|
return Defaults.COERCE;
|
||||||
}
|
}
|
||||||
|
|
|
@ -137,7 +137,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
||||||
return new Explicit<>(coerce, true);
|
return new Explicit<>(coerce, true);
|
||||||
}
|
}
|
||||||
if (context.indexSettings() != null) {
|
if (context.indexSettings() != null) {
|
||||||
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false);
|
return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false);
|
||||||
}
|
}
|
||||||
return Defaults.COERCE;
|
return Defaults.COERCE;
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,41 +112,27 @@ public class CreateIndexIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testInvalidShardCountSettings() throws Exception {
|
public void testInvalidShardCountSettings() throws Exception {
|
||||||
|
int value = randomIntBetween(-10, 0);
|
||||||
try {
|
try {
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(-10, 0))
|
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, value)
|
||||||
.build())
|
.build())
|
||||||
.get();
|
.get();
|
||||||
fail("should have thrown an exception about the primary shard count");
|
fail("should have thrown an exception about the primary shard count");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat("message contains error about shard count: " + e.getMessage(),
|
assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_shards] must be >= 1", e.getMessage());
|
||||||
e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true));
|
|
||||||
}
|
}
|
||||||
|
value = randomIntBetween(-10, -1);
|
||||||
try {
|
try {
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(-10, -1))
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, value)
|
||||||
.build())
|
.build())
|
||||||
.get();
|
.get();
|
||||||
fail("should have thrown an exception about the replica shard count");
|
fail("should have thrown an exception about the replica shard count");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat("message contains error about shard count: " + e.getMessage(),
|
assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_replicas] must be >= 0", e.getMessage());
|
||||||
e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(-10, 0))
|
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(-10, -1))
|
|
||||||
.build())
|
|
||||||
.get();
|
|
||||||
fail("should have thrown an exception about the shard count");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat("message contains error about shard count: " + e.getMessage(),
|
|
||||||
e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true));
|
|
||||||
assertThat("message contains error about shard count: " + e.getMessage(),
|
|
||||||
e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCreateIndexWithBlocks() {
|
public void testCreateIndexWithBlocks() {
|
||||||
|
@ -164,39 +150,38 @@ public class CreateIndexIT extends ESIntegTestCase {
|
||||||
disableIndexBlock("test", IndexMetaData.SETTING_BLOCKS_METADATA);
|
disableIndexBlock("test", IndexMetaData.SETTING_BLOCKS_METADATA);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testUnknownSettingFails() {
|
||||||
|
try {
|
||||||
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
|
.put("index.unknown.value", "this must fail")
|
||||||
|
.build())
|
||||||
|
.get();
|
||||||
|
fail("should have thrown an exception about the shard count");
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
assertEquals("unknown setting [index.unknown.value]", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void testInvalidShardCountSettingsWithoutPrefix() throws Exception {
|
public void testInvalidShardCountSettingsWithoutPrefix() throws Exception {
|
||||||
|
int value = randomIntBetween(-10, 0);
|
||||||
try {
|
try {
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, 0))
|
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), value)
|
||||||
.build())
|
.build())
|
||||||
.get();
|
.get();
|
||||||
fail("should have thrown an exception about the shard count");
|
fail("should have thrown an exception about the shard count");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat("message contains error about shard count: " + e.getMessage(),
|
assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_shards] must be >= 1", e.getMessage());
|
||||||
e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true));
|
|
||||||
}
|
}
|
||||||
|
value = randomIntBetween(-10, -1);
|
||||||
try {
|
try {
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, -1))
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), value)
|
||||||
.build())
|
.build())
|
||||||
.get();
|
.get();
|
||||||
fail("should have thrown an exception about the shard count");
|
fail("should have thrown an exception about the shard count");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat("message contains error about shard count: " + e.getMessage(),
|
assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_replicas] must be >= 0", e.getMessage());
|
||||||
e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true));
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, 0))
|
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, -1))
|
|
||||||
.build())
|
|
||||||
.get();
|
|
||||||
fail("should have thrown an exception about the shard count");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat("message contains error about shard count: " + e.getMessage(),
|
|
||||||
e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true));
|
|
||||||
assertThat("message contains error about shard count: " + e.getMessage(),
|
|
||||||
e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,15 +24,23 @@ import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.engine.Segment;
|
import org.elasticsearch.index.engine.Segment;
|
||||||
import org.elasticsearch.index.MergePolicyConfig;
|
import org.elasticsearch.index.MergePolicyConfig;
|
||||||
import org.elasticsearch.indices.IndexClosedException;
|
import org.elasticsearch.indices.IndexClosedException;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.elasticsearch.test.InternalSettingsPluging;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
public class IndicesSegmentsRequestTests extends ESSingleNodeTestCase {
|
public class IndicesSegmentsRequestTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
|
return pluginList(InternalSettingsPluging.class);
|
||||||
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setupIndex() {
|
public void setupIndex() {
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
|
|
|
@ -93,7 +93,7 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
new HashSet<>(),
|
new HashSet<>(),
|
||||||
null,
|
null,
|
||||||
null);
|
null, null);
|
||||||
MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, new AliasValidator(Settings.EMPTY));
|
MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, new AliasValidator(Settings.EMPTY));
|
||||||
|
|
||||||
final List<Throwable> throwables = new ArrayList<>();
|
final List<Throwable> throwables = new ArrayList<>();
|
||||||
|
@ -105,6 +105,7 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onFailure(Throwable t) {
|
public void onFailure(Throwable t) {
|
||||||
|
t.printStackTrace();
|
||||||
throwables.add(t);
|
throwables.add(t);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -31,15 +31,13 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo
|
||||||
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
|
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
|
||||||
import org.elasticsearch.cluster.settings.DynamicSettings;
|
|
||||||
import org.elasticsearch.cluster.settings.Validator;
|
|
||||||
import org.elasticsearch.common.inject.ModuleTestCase;
|
import org.elasticsearch.common.inject.ModuleTestCase;
|
||||||
import org.elasticsearch.common.settings.ClusterSettings;
|
import org.elasticsearch.common.settings.ClusterSettings;
|
||||||
|
import org.elasticsearch.common.settings.IndexScopeSettings;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.settings.SettingsFilter;
|
import org.elasticsearch.common.settings.SettingsFilter;
|
||||||
import org.elasticsearch.common.settings.SettingsModule;
|
import org.elasticsearch.common.settings.SettingsModule;
|
||||||
import org.elasticsearch.index.settings.IndexDynamicSettings;
|
|
||||||
|
|
||||||
public class ClusterModuleTests extends ModuleTestCase {
|
public class ClusterModuleTests extends ModuleTestCase {
|
||||||
|
|
||||||
|
@ -93,18 +91,19 @@ public class ClusterModuleTests extends ModuleTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRegisterIndexDynamicSettingDuplicate() {
|
public void testRegisterIndexDynamicSettingDuplicate() {
|
||||||
ClusterModule module = new ClusterModule(Settings.EMPTY);
|
SettingsModule module = new SettingsModule(Settings.EMPTY, new SettingsFilter(Settings.EMPTY));
|
||||||
try {
|
try {
|
||||||
module.registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Validator.EMPTY);
|
module.registerSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING);
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice");
|
assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRegisterIndexDynamicSetting() {
|
public void testRegisterIndexDynamicSetting() {
|
||||||
ClusterModule module = new ClusterModule(Settings.EMPTY);
|
final SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY);
|
||||||
module.registerIndexDynamicSetting("foo.bar", Validator.EMPTY);
|
SettingsModule module = new SettingsModule(Settings.EMPTY, settingsFilter);
|
||||||
assertInstanceBindingWithAnnotation(module, DynamicSettings.class, dynamicSettings -> dynamicSettings.hasDynamicSetting("foo.bar"), IndexDynamicSettings.class);
|
module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.INDEX));
|
||||||
|
assertInstanceBinding(module, IndexScopeSettings.class, service -> service.hasDynamicSetting("foo.bar"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRegisterAllocationDeciderDuplicate() {
|
public void testRegisterAllocationDeciderDuplicate() {
|
||||||
|
|
|
@ -47,7 +47,7 @@ public class DelayedAllocationIT extends ESIntegTestCase {
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||||
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, 0)).get();
|
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0)).get();
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
indexRandomData();
|
indexRandomData();
|
||||||
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
||||||
|
@ -66,7 +66,7 @@ public class DelayedAllocationIT extends ESIntegTestCase {
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||||
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get();
|
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1))).get();
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
indexRandomData();
|
indexRandomData();
|
||||||
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
||||||
|
@ -90,14 +90,14 @@ public class DelayedAllocationIT extends ESIntegTestCase {
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||||
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get();
|
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(100))).get();
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
indexRandomData();
|
indexRandomData();
|
||||||
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
internalCluster().startNode();
|
internalCluster().startNode();
|
||||||
// do a second round with longer delay to make sure it happens
|
// do a second round with longer delay to make sure it happens
|
||||||
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get());
|
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(100))).get());
|
||||||
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
}
|
}
|
||||||
|
@ -112,7 +112,7 @@ public class DelayedAllocationIT extends ESIntegTestCase {
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||||
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get();
|
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1))).get();
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
indexRandomData();
|
indexRandomData();
|
||||||
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
||||||
|
@ -138,7 +138,7 @@ public class DelayedAllocationIT extends ESIntegTestCase {
|
||||||
prepareCreate("test").setSettings(Settings.builder()
|
prepareCreate("test").setSettings(Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||||
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get();
|
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1))).get();
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
indexRandomData();
|
indexRandomData();
|
||||||
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
|
||||||
|
@ -149,7 +149,7 @@ public class DelayedAllocationIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(1));
|
assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(1));
|
||||||
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(0))).get());
|
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(0))).get());
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(0));
|
assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(0));
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
||||||
public void testNoDelayedUnassigned() throws Exception {
|
public void testNoDelayedUnassigned() throws Exception {
|
||||||
AllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
|
AllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
|
||||||
MetaData metaData = MetaData.builder()
|
MetaData metaData = MetaData.builder()
|
||||||
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "0"))
|
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "0"))
|
||||||
.numberOfShards(1).numberOfReplicas(1))
|
.numberOfShards(1).numberOfReplicas(1))
|
||||||
.build();
|
.build();
|
||||||
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
|
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
|
||||||
|
@ -97,7 +97,7 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
||||||
public void testDelayedUnassignedScheduleReroute() throws Exception {
|
public void testDelayedUnassignedScheduleReroute() throws Exception {
|
||||||
MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
|
MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
|
||||||
MetaData metaData = MetaData.builder()
|
MetaData metaData = MetaData.builder()
|
||||||
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "100ms"))
|
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms"))
|
||||||
.numberOfShards(1).numberOfReplicas(1))
|
.numberOfShards(1).numberOfReplicas(1))
|
||||||
.build();
|
.build();
|
||||||
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
|
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
|
||||||
|
@ -144,9 +144,9 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
||||||
try {
|
try {
|
||||||
MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
|
MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
|
||||||
MetaData metaData = MetaData.builder()
|
MetaData metaData = MetaData.builder()
|
||||||
.put(IndexMetaData.builder("short_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "100ms"))
|
.put(IndexMetaData.builder("short_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms"))
|
||||||
.numberOfShards(1).numberOfReplicas(1))
|
.numberOfShards(1).numberOfReplicas(1))
|
||||||
.put(IndexMetaData.builder("long_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10s"))
|
.put(IndexMetaData.builder("long_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10s"))
|
||||||
.numberOfShards(1).numberOfReplicas(1))
|
.numberOfShards(1).numberOfReplicas(1))
|
||||||
.build();
|
.build();
|
||||||
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData)
|
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData)
|
||||||
|
|
|
@ -259,7 +259,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
||||||
public void testUnassignedDelayedOnlyOnNodeLeft() throws Exception {
|
public void testUnassignedDelayedOnlyOnNodeLeft() throws Exception {
|
||||||
final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, null);
|
final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, null);
|
||||||
long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay
|
long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay
|
||||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
|
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY);
|
||||||
long cachedDelay = unassignedInfo.getLastComputedLeftDelayNanos();
|
long cachedDelay = unassignedInfo.getLastComputedLeftDelayNanos();
|
||||||
assertThat(delay, equalTo(cachedDelay));
|
assertThat(delay, equalTo(cachedDelay));
|
||||||
assertThat(delay, equalTo(TimeValue.timeValueHours(10).nanos() - 1));
|
assertThat(delay, equalTo(TimeValue.timeValueHours(10).nanos() - 1));
|
||||||
|
@ -273,7 +273,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
||||||
reasons.remove(UnassignedInfo.Reason.NODE_LEFT);
|
reasons.remove(UnassignedInfo.Reason.NODE_LEFT);
|
||||||
UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), reasons), null);
|
UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), reasons), null);
|
||||||
long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay
|
long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay
|
||||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
|
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY);
|
||||||
assertThat(delay, equalTo(0l));
|
assertThat(delay, equalTo(0l));
|
||||||
delay = unassignedInfo.getLastComputedLeftDelayNanos();
|
delay = unassignedInfo.getLastComputedLeftDelayNanos();
|
||||||
assertThat(delay, equalTo(0l));
|
assertThat(delay, equalTo(0l));
|
||||||
|
@ -286,7 +286,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
||||||
final long baseTime = System.nanoTime();
|
final long baseTime = System.nanoTime();
|
||||||
final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, baseTime, System.currentTimeMillis());
|
final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, baseTime, System.currentTimeMillis());
|
||||||
final long totalDelayNanos = TimeValue.timeValueMillis(10).nanos();
|
final long totalDelayNanos = TimeValue.timeValueMillis(10).nanos();
|
||||||
final Settings settings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueNanos(totalDelayNanos)).build();
|
final Settings settings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueNanos(totalDelayNanos)).build();
|
||||||
long delay = unassignedInfo.updateDelay(baseTime, settings, Settings.EMPTY);
|
long delay = unassignedInfo.updateDelay(baseTime, settings, Settings.EMPTY);
|
||||||
assertThat(delay, equalTo(totalDelayNanos));
|
assertThat(delay, equalTo(totalDelayNanos));
|
||||||
assertThat(delay, equalTo(unassignedInfo.getLastComputedLeftDelayNanos()));
|
assertThat(delay, equalTo(unassignedInfo.getLastComputedLeftDelayNanos()));
|
||||||
|
@ -336,8 +336,8 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
||||||
final long expectMinDelaySettingsNanos = Math.min(delayTest1.nanos(), delayTest2.nanos());
|
final long expectMinDelaySettingsNanos = Math.min(delayTest1.nanos(), delayTest2.nanos());
|
||||||
|
|
||||||
MetaData metaData = MetaData.builder()
|
MetaData metaData = MetaData.builder()
|
||||||
.put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, delayTest1)).numberOfShards(1).numberOfReplicas(1))
|
.put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayTest1)).numberOfShards(1).numberOfReplicas(1))
|
||||||
.put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, delayTest2)).numberOfShards(1).numberOfReplicas(1))
|
.put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayTest2)).numberOfShards(1).numberOfReplicas(1))
|
||||||
.build();
|
.build();
|
||||||
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
|
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
|
||||||
.metaData(metaData)
|
.metaData(metaData)
|
||||||
|
|
|
@ -1,106 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.cluster.settings;
|
|
||||||
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
import static org.hamcrest.Matchers.notNullValue;
|
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
|
||||||
|
|
||||||
public class SettingsValidatorTests extends ESTestCase {
|
|
||||||
public void testValidators() throws Exception {
|
|
||||||
assertThat(Validator.EMPTY.validate("", "anything goes", null), nullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.TIME.validate("", "10m", null), nullValue());
|
|
||||||
assertThat(Validator.TIME.validate("", "10g", null), notNullValue());
|
|
||||||
assertThat(Validator.TIME.validate("", "bad timing", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.BYTES_SIZE.validate("", "10m", null), nullValue());
|
|
||||||
assertThat(Validator.BYTES_SIZE.validate("", "10g", null), nullValue());
|
|
||||||
assertThat(Validator.BYTES_SIZE.validate("", "bad", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.FLOAT.validate("", "10.2", null), nullValue());
|
|
||||||
assertThat(Validator.FLOAT.validate("", "10.2.3", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "10.2", null), nullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "0.0", null), nullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "-1.0", null), notNullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "10.2.3", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.DOUBLE.validate("", "10.2", null), nullValue());
|
|
||||||
assertThat(Validator.DOUBLE.validate("", "10.2.3", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.DOUBLE_GTE_2.validate("", "10.2", null), nullValue());
|
|
||||||
assertThat(Validator.DOUBLE_GTE_2.validate("", "2.0", null), nullValue());
|
|
||||||
assertThat(Validator.DOUBLE_GTE_2.validate("", "1.0", null), notNullValue());
|
|
||||||
assertThat(Validator.DOUBLE_GTE_2.validate("", "10.2.3", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "10.2", null), nullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "0.0", null), nullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "-1.0", null), notNullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "10.2.3", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.INTEGER.validate("", "10", null), nullValue());
|
|
||||||
assertThat(Validator.INTEGER.validate("", "10.2", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.INTEGER_GTE_2.validate("", "2", null), nullValue());
|
|
||||||
assertThat(Validator.INTEGER_GTE_2.validate("", "1", null), notNullValue());
|
|
||||||
assertThat(Validator.INTEGER_GTE_2.validate("", "0", null), notNullValue());
|
|
||||||
assertThat(Validator.INTEGER_GTE_2.validate("", "10.2.3", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "2", null), nullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "1", null), nullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "0", null), nullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "-1", null), notNullValue());
|
|
||||||
assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "10.2", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.POSITIVE_INTEGER.validate("", "2", null), nullValue());
|
|
||||||
assertThat(Validator.POSITIVE_INTEGER.validate("", "1", null), nullValue());
|
|
||||||
assertThat(Validator.POSITIVE_INTEGER.validate("", "0", null), notNullValue());
|
|
||||||
assertThat(Validator.POSITIVE_INTEGER.validate("", "-1", null), notNullValue());
|
|
||||||
assertThat(Validator.POSITIVE_INTEGER.validate("", "10.2", null), notNullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.PERCENTAGE.validate("", "asdasd", null), notNullValue());
|
|
||||||
assertThat(Validator.PERCENTAGE.validate("", "-1", null), notNullValue());
|
|
||||||
assertThat(Validator.PERCENTAGE.validate("", "20", null), notNullValue());
|
|
||||||
assertThat(Validator.PERCENTAGE.validate("", "-1%", null), notNullValue());
|
|
||||||
assertThat(Validator.PERCENTAGE.validate("", "101%", null), notNullValue());
|
|
||||||
assertThat(Validator.PERCENTAGE.validate("", "100%", null), nullValue());
|
|
||||||
assertThat(Validator.PERCENTAGE.validate("", "99%", null), nullValue());
|
|
||||||
assertThat(Validator.PERCENTAGE.validate("", "0%", null), nullValue());
|
|
||||||
|
|
||||||
assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "asdasd", null), notNullValue());
|
|
||||||
assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "20", null), notNullValue());
|
|
||||||
assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "20mb", null), nullValue());
|
|
||||||
assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "-1%", null), notNullValue());
|
|
||||||
assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "101%", null), notNullValue());
|
|
||||||
assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "100%", null), nullValue());
|
|
||||||
assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "99%", null), nullValue());
|
|
||||||
assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "0%", null), nullValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDynamicValidators() throws Exception {
|
|
||||||
DynamicSettings.Builder ds = new DynamicSettings.Builder();
|
|
||||||
ds.addSetting("my.test.*", Validator.POSITIVE_INTEGER);
|
|
||||||
String valid = ds.build().validateDynamicSetting("my.test.setting", "-1", null);
|
|
||||||
assertThat(valid, equalTo("the value of the setting my.test.setting must be a positive integer"));
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -228,7 +228,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
|
||||||
|
|
||||||
public void testDelayedAllocation() {
|
public void testDelayedAllocation() {
|
||||||
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(),
|
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(),
|
||||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT);
|
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT);
|
||||||
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
|
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
// we sometime return empty list of files, make sure we test this as well
|
// we sometime return empty list of files, make sure we test this as well
|
||||||
|
@ -241,7 +241,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
|
||||||
assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId));
|
assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId));
|
||||||
|
|
||||||
allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(),
|
allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(),
|
||||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT);
|
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT);
|
||||||
testAllocator.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
|
testAllocator.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
|
||||||
AllocationService.updateLeftDelayOfUnassignedShards(allocation, Settings.EMPTY);
|
AllocationService.updateLeftDelayOfUnassignedShards(allocation, Settings.EMPTY);
|
||||||
changed = testAllocator.allocateUnassigned(allocation);
|
changed = testAllocator.allocateUnassigned(allocation);
|
||||||
|
|
|
@ -18,20 +18,17 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index;
|
package org.elasticsearch.index;
|
||||||
|
|
||||||
import org.elasticsearch.cluster.ClusterModule;
|
|
||||||
import org.elasticsearch.cluster.settings.Validator;
|
|
||||||
import org.elasticsearch.common.inject.AbstractModule;
|
import org.elasticsearch.common.inject.AbstractModule;
|
||||||
import org.elasticsearch.common.inject.Module;
|
import org.elasticsearch.common.inject.Module;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.settings.SettingsModule;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
|
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
|
@ -46,7 +43,7 @@ public class SettingsListenerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
public static class SettingsListenerPlugin extends Plugin {
|
public static class SettingsListenerPlugin extends Plugin {
|
||||||
private final SettingsTestingService service = new SettingsTestingService();
|
private final SettingsTestingService service = new SettingsTestingService();
|
||||||
|
private static final Setting<Integer> SETTING = Setting.intSetting("index.test.new.setting", 0, true, Setting.Scope.INDEX);
|
||||||
/**
|
/**
|
||||||
* The name of the plugin.
|
* The name of the plugin.
|
||||||
*/
|
*/
|
||||||
|
@ -63,8 +60,8 @@ public class SettingsListenerIT extends ESIntegTestCase {
|
||||||
return "Settings Listenern Plugin";
|
return "Settings Listenern Plugin";
|
||||||
}
|
}
|
||||||
|
|
||||||
public void onModule(ClusterModule clusterModule) {
|
public void onModule(SettingsModule settingsModule) {
|
||||||
clusterModule.registerIndexDynamicSetting("index.test.new.setting", Validator.INTEGER);
|
settingsModule.registerSetting(SettingsTestingService.VALUE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -96,7 +93,7 @@ public class SettingsListenerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
public static class SettingsTestingService {
|
public static class SettingsTestingService {
|
||||||
public volatile int value;
|
public volatile int value;
|
||||||
public static Setting<Integer> VALUE = Setting.intSetting("index.test.new.setting", -1, true, Setting.Scope.INDEX);
|
public static Setting<Integer> VALUE = Setting.intSetting("index.test.new.setting", -1, -1, true, Setting.Scope.INDEX);
|
||||||
|
|
||||||
public void setValue(int value) {
|
public void setValue(int value) {
|
||||||
this.value = value;
|
this.value = value;
|
||||||
|
@ -133,5 +130,13 @@ public class SettingsListenerIT extends ESIntegTestCase {
|
||||||
for (SettingsTestingService instance : internalCluster().getInstances(SettingsTestingService.class)) {
|
for (SettingsTestingService instance : internalCluster().getInstances(SettingsTestingService.class)) {
|
||||||
assertEquals(42, instance.value);
|
assertEquals(42, instance.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
client().admin().indices().prepareUpdateSettings("other").setSettings(Settings.builder()
|
||||||
|
.put("index.test.new.setting", -5)).get();
|
||||||
|
fail();
|
||||||
|
} catch (IllegalArgumentException ex) {
|
||||||
|
assertEquals("Failed to parse value [-5] for setting [index.test.new.setting] must be >= -1", ex.getMessage());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,9 +27,12 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
|
import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.elasticsearch.test.InternalSettingsPluging;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
|
||||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||||
|
@ -40,6 +43,12 @@ import static org.hamcrest.Matchers.is;
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
|
public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
|
return pluginList(InternalSettingsPluging.class);
|
||||||
|
}
|
||||||
|
|
||||||
public void testThatDefaultAndStandardAnalyzerAreTheSameInstance() {
|
public void testThatDefaultAndStandardAnalyzerAreTheSameInstance() {
|
||||||
Analyzer currentStandardAnalyzer = PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT);
|
Analyzer currentStandardAnalyzer = PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT);
|
||||||
Analyzer currentDefaultAnalyzer = PreBuiltAnalyzers.DEFAULT.getAnalyzer(Version.CURRENT);
|
Analyzer currentDefaultAnalyzer = PreBuiltAnalyzers.DEFAULT.getAnalyzer(Version.CURRENT);
|
||||||
|
|
|
@ -43,7 +43,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase {
|
||||||
long gcDeletes = random().nextLong() & (Long.MAX_VALUE >> 11);
|
long gcDeletes = random().nextLong() & (Long.MAX_VALUE >> 11);
|
||||||
|
|
||||||
Settings build = Settings.builder()
|
Settings build = Settings.builder()
|
||||||
.put(IndexSettings.INDEX_GC_DELETES_SETTING, gcDeletes, TimeUnit.MILLISECONDS)
|
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), gcDeletes, TimeUnit.MILLISECONDS)
|
||||||
.build();
|
.build();
|
||||||
assertEquals(gcDeletes, build.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), null).millis());
|
assertEquals(gcDeletes, build.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), null).millis());
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(IndexSettings.INDEX_GC_DELETES_SETTING, 1000, TimeUnit.MILLISECONDS)
|
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), 1000, TimeUnit.MILLISECONDS)
|
||||||
.build();
|
.build();
|
||||||
client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get();
|
client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get();
|
||||||
assertEquals(engine.getGcDeletesInMillis(), 1000);
|
assertEquals(engine.getGcDeletesInMillis(), 1000);
|
||||||
|
@ -66,7 +66,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
|
||||||
settings = Settings.builder()
|
settings = Settings.builder()
|
||||||
.put(IndexSettings.INDEX_GC_DELETES_SETTING, "0ms")
|
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), "0ms")
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get();
|
client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get();
|
||||||
|
@ -74,7 +74,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase {
|
||||||
assertTrue(engine.config().isEnableGcDeletes());
|
assertTrue(engine.config().isEnableGcDeletes());
|
||||||
|
|
||||||
settings = Settings.builder()
|
settings = Settings.builder()
|
||||||
.put(IndexSettings.INDEX_GC_DELETES_SETTING, 1000, TimeUnit.MILLISECONDS)
|
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), 1000, TimeUnit.MILLISECONDS)
|
||||||
.build();
|
.build();
|
||||||
client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get();
|
client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get();
|
||||||
assertEquals(engine.getGcDeletesInMillis(), 1000);
|
assertEquals(engine.getGcDeletesInMillis(), 1000);
|
||||||
|
|
|
@ -49,12 +49,15 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
|
||||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.elasticsearch.test.InternalSettingsPluging;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
import org.elasticsearch.test.VersionUtils;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
import static org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
import static org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
@ -84,6 +87,11 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
||||||
return getForField(type, fieldName, hasDocValues());
|
return getForField(type, fieldName, hasDocValues());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
|
return pluginList(InternalSettingsPluging.class);
|
||||||
|
}
|
||||||
|
|
||||||
public <IFD extends IndexFieldData<?>> IFD getForField(FieldDataType type, String fieldName, boolean docValues) {
|
public <IFD extends IndexFieldData<?>> IFD getForField(FieldDataType type, String fieldName, boolean docValues) {
|
||||||
final MappedFieldType fieldType;
|
final MappedFieldType fieldType;
|
||||||
final BuilderContext context = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
|
final BuilderContext context = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
|
||||||
|
|
|
@ -26,7 +26,11 @@ import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.elasticsearch.test.InternalSettingsPluging;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
@ -34,6 +38,11 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build();
|
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
|
return pluginList(InternalSettingsPluging.class);
|
||||||
|
}
|
||||||
|
|
||||||
public void testBackCompatCustomBoostValues() throws Exception {
|
public void testBackCompatCustomBoostValues() throws Exception {
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||||
.startObject("s_field").field("type", "string").endObject()
|
.startObject("s_field").field("type", "string").endObject()
|
||||||
|
|
|
@ -29,7 +29,11 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.elasticsearch.test.InternalSettingsPluging;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.closeTo;
|
import static org.hamcrest.Matchers.closeTo;
|
||||||
|
|
||||||
|
@ -39,6 +43,11 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build();
|
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
|
return pluginList(InternalSettingsPluging.class);
|
||||||
|
}
|
||||||
|
|
||||||
public void testBackCompatFieldLevelBoost() throws Exception {
|
public void testBackCompatFieldLevelBoost() throws Exception {
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
|
||||||
.startObject("str_field").field("type", "string").endObject()
|
.startObject("str_field").field("type", "string").endObject()
|
||||||
|
|
|
@ -38,6 +38,7 @@ import static org.hamcrest.Matchers.nullValue;
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class SimpleIpMappingTests extends ESSingleNodeTestCase {
|
public class SimpleIpMappingTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
public void testSimpleMapping() throws Exception {
|
public void testSimpleMapping() throws Exception {
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("ip").field("type", "ip").endObject().endObject()
|
.startObject("properties").startObject("ip").field("type", "ip").endObject().endObject()
|
||||||
|
|
|
@ -33,10 +33,13 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.elasticsearch.test.InternalSettingsPluging;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
import org.elasticsearch.test.VersionUtils;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
@ -44,6 +47,11 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
|
public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
|
return pluginList(InternalSettingsPluging.class);
|
||||||
|
}
|
||||||
|
|
||||||
public void testNoFormat() throws Exception {
|
public void testNoFormat() throws Exception {
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("_source").endObject()
|
.startObject("_source").endObject()
|
||||||
|
|
|
@ -88,10 +88,12 @@ import org.elasticsearch.index.store.Store;
|
||||||
import org.elasticsearch.index.translog.Translog;
|
import org.elasticsearch.index.translog.Translog;
|
||||||
import org.elasticsearch.indices.IndicesService;
|
import org.elasticsearch.indices.IndicesService;
|
||||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.DummyShardLock;
|
import org.elasticsearch.test.DummyShardLock;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
import org.elasticsearch.test.FieldMaskingReader;
|
import org.elasticsearch.test.FieldMaskingReader;
|
||||||
import org.elasticsearch.test.IndexSettingsModule;
|
import org.elasticsearch.test.IndexSettingsModule;
|
||||||
|
import org.elasticsearch.test.InternalSettingsPluging;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
import org.elasticsearch.test.VersionUtils;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -100,6 +102,7 @@ import java.nio.file.Path;
|
||||||
import java.nio.file.StandardCopyOption;
|
import java.nio.file.StandardCopyOption;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -127,6 +130,11 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
*/
|
*/
|
||||||
public class IndexShardTests extends ESSingleNodeTestCase {
|
public class IndexShardTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
|
return pluginList(InternalSettingsPluging.class);
|
||||||
|
}
|
||||||
|
|
||||||
public void testWriteShardState() throws Exception {
|
public void testWriteShardState() throws Exception {
|
||||||
try (NodeEnvironment env = newNodeEnvironment()) {
|
try (NodeEnvironment env = newNodeEnvironment()) {
|
||||||
ShardId id = new ShardId("foo", 1);
|
ShardId id = new ShardId("foo", 1);
|
||||||
|
|
|
@ -63,7 +63,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase {
|
||||||
Settings settings = settingsBuilder()
|
Settings settings = settingsBuilder()
|
||||||
.put(SETTING_NUMBER_OF_SHARDS, 11)
|
.put(SETTING_NUMBER_OF_SHARDS, 11)
|
||||||
.put(SETTING_NUMBER_OF_REPLICAS, 1)
|
.put(SETTING_NUMBER_OF_REPLICAS, 1)
|
||||||
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "0s")
|
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "0s")
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
// start one server
|
// start one server
|
||||||
|
|
|
@ -30,7 +30,9 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||||
import org.elasticsearch.client.Requests;
|
import org.elasticsearch.client.Requests;
|
||||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.settings.SettingsModule;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.MockEngineFactoryPlugin;
|
import org.elasticsearch.index.MockEngineFactoryPlugin;
|
||||||
|
@ -58,7 +60,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
||||||
@Override
|
@Override
|
||||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||||
return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class);
|
return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class, MockEngineFactoryPlugin.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBreakerWithRandomExceptions() throws IOException, InterruptedException, ExecutionException {
|
public void testBreakerWithRandomExceptions() throws IOException, InterruptedException, ExecutionException {
|
||||||
|
@ -195,6 +197,8 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
||||||
// TODO: Generalize this class and add it as a utility
|
// TODO: Generalize this class and add it as a utility
|
||||||
public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper {
|
public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper {
|
||||||
|
|
||||||
|
public static final Setting<Double> EXCEPTION_TOP_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX);
|
||||||
|
public static final Setting<Double> EXCEPTION_LOW_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX);
|
||||||
public static class TestPlugin extends Plugin {
|
public static class TestPlugin extends Plugin {
|
||||||
@Override
|
@Override
|
||||||
public String name() {
|
public String name() {
|
||||||
|
@ -205,6 +209,11 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
||||||
return "a mock reader wrapper that throws random exceptions for testing";
|
return "a mock reader wrapper that throws random exceptions for testing";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void onModule(SettingsModule module) {
|
||||||
|
module.registerSetting(EXCEPTION_TOP_LEVEL_RATIO_SETTING);
|
||||||
|
module.registerSetting(EXCEPTION_LOW_LEVEL_RATIO_SETTING);
|
||||||
|
}
|
||||||
|
|
||||||
public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) {
|
public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) {
|
||||||
module.setReaderClass(RandomExceptionDirectoryReaderWrapper.class);
|
module.setReaderClass(RandomExceptionDirectoryReaderWrapper.class);
|
||||||
}
|
}
|
||||||
|
@ -219,8 +228,8 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
ThrowingSubReaderWrapper(Settings settings) {
|
ThrowingSubReaderWrapper(Settings settings) {
|
||||||
final long seed = settings.getAsLong(SETTING_INDEX_SEED, 0l);
|
final long seed = settings.getAsLong(SETTING_INDEX_SEED, 0l);
|
||||||
this.topLevelRatio = settings.getAsDouble(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d);
|
this.topLevelRatio = EXCEPTION_TOP_LEVEL_RATIO_SETTING.get(settings);
|
||||||
this.lowLevelRatio = settings.getAsDouble(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d);
|
this.lowLevelRatio = EXCEPTION_LOW_LEVEL_RATIO_SETTING.get(settings);
|
||||||
this.random = new Random(seed);
|
this.random = new Random(seed);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -141,7 +141,7 @@ public class FullRollingRestartIT extends ESIntegTestCase {
|
||||||
* to relocating to the restarting node since all had 2 shards and now one node has nothing allocated.
|
* to relocating to the restarting node since all had 2 shards and now one node has nothing allocated.
|
||||||
* We have a fix for this to wait until we have allocated unallocated shards now so this shouldn't happen.
|
* We have a fix for this to wait until we have allocated unallocated shards now so this shouldn't happen.
|
||||||
*/
|
*/
|
||||||
prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMinutes(1))).get();
|
prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get();
|
||||||
|
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
client().prepareIndex("test", "type1", Long.toString(i))
|
client().prepareIndex("test", "type1", Long.toString(i))
|
||||||
|
|
|
@ -1540,7 +1540,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
||||||
|
|
||||||
// Update settings to back to normal
|
// Update settings to back to normal
|
||||||
assertAcked(client.admin().indices().prepareUpdateSettings("test-idx").setSettings(Settings.builder()
|
assertAcked(client.admin().indices().prepareUpdateSettings("test-idx").setSettings(Settings.builder()
|
||||||
.put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "node")
|
.put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "none")
|
||||||
));
|
));
|
||||||
|
|
||||||
logger.info("--> wait for snapshot to complete");
|
logger.info("--> wait for snapshot to complete");
|
||||||
|
|
|
@ -76,7 +76,9 @@ import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.network.NetworkAddress;
|
import org.elasticsearch.common.network.NetworkAddress;
|
||||||
import org.elasticsearch.common.regex.Regex;
|
import org.elasticsearch.common.regex.Regex;
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.settings.SettingsModule;
|
||||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||||
import org.elasticsearch.common.transport.TransportAddress;
|
import org.elasticsearch.common.transport.TransportAddress;
|
||||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
|
@ -478,7 +480,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||||
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
// keep this low so we don't stall tests
|
// keep this low so we don't stall tests
|
||||||
builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, RandomInts.randomIntBetween(random, 1, 15) + "ms");
|
builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), RandomInts.randomIntBetween(random, 1, 15) + "ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
return builder;
|
return builder;
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.test;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.settings.Setting;
|
||||||
|
import org.elasticsearch.common.settings.SettingsModule;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
|
|
||||||
|
public final class InternalSettingsPluging extends Plugin {
|
||||||
|
@Override
|
||||||
|
public String name() {
|
||||||
|
return "internal-settings-plugin";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String description() {
|
||||||
|
return "a plugin that allows to set values for internal settings which are can't be set via the ordinary API without this pluging installed";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final Setting<Integer> VERSION_CREATED = Setting.intSetting("index.version.created", 0, false, Setting.Scope.INDEX);
|
||||||
|
private static final Setting<Boolean> MERGE_ENABLED = Setting.boolSetting("index.merge.enabled", true, false, Setting.Scope.INDEX);
|
||||||
|
|
||||||
|
|
||||||
|
public void onModule(SettingsModule module) {
|
||||||
|
module.registerSetting(VERSION_CREATED);
|
||||||
|
module.registerSetting(MERGE_ENABLED);
|
||||||
|
}
|
||||||
|
}
|
|
@ -463,7 +463,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
}
|
}
|
||||||
|
|
||||||
// always default delayed allocation to 0 to make sure we have tests are not delayed
|
// always default delayed allocation to 0 to make sure we have tests are not delayed
|
||||||
builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, 0);
|
builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0);
|
||||||
|
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue