Unify Settings xcontent reading and writing (#26739)
This change adds a fromXContent method to Settings that allows to read the xcontent that is produced by toXContent. It also replaces the entire settings loader infrastructure and removes the structured map representation. Future PRs will also tackle the `getAsMap` that exposes the internal represenation of settings for better encapsulation.
This commit is contained in:
parent
dee2ae1023
commit
aab4655e63
|
@ -147,10 +147,10 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
}
|
||||
|
||||
/**
|
||||
* A simplified version of settings that takes key value pairs settings.
|
||||
* The settings to create the index with.
|
||||
*/
|
||||
public CreateIndexRequest settings(Object... settings) {
|
||||
this.settings = Settings.builder().put(settings).build();
|
||||
public CreateIndexRequest settings(Settings.Builder settings) {
|
||||
this.settings = settings.build();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -162,14 +162,6 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The settings to create the index with.
|
||||
*/
|
||||
public CreateIndexRequest settings(Settings.Builder settings) {
|
||||
this.settings = settings.build();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The settings to create the index with (either json or yaml format)
|
||||
*/
|
||||
|
|
|
@ -76,16 +76,6 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder<Create
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The settings to create the index with (either json or yaml format)
|
||||
* @deprecated use {@link #setSettings(String, XContentType)} to avoid content type detection
|
||||
*/
|
||||
@Deprecated
|
||||
public CreateIndexRequestBuilder setSettings(String source) {
|
||||
request.settings(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The settings to create the index with (either json or yaml format)
|
||||
*/
|
||||
|
@ -94,14 +84,6 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder<Create
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* A simplified version of settings that takes key value pairs settings.
|
||||
*/
|
||||
public CreateIndexRequestBuilder setSettings(Object... settings) {
|
||||
request.settings(settings);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The settings to create the index with (either json/yaml/properties format)
|
||||
*/
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -1143,7 +1142,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (KEY_SETTINGS.equals(currentFieldName)) {
|
||||
builder.settings(Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())));
|
||||
builder.settings(Settings.fromXContent(parser));
|
||||
} else if (KEY_MAPPINGS.equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -446,9 +445,8 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("settings".equals(currentFieldName)) {
|
||||
Settings.Builder templateSettingsBuilder = Settings.builder();
|
||||
templateSettingsBuilder.put(
|
||||
SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()))
|
||||
.normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
||||
templateSettingsBuilder.put(Settings.fromXContent(parser));
|
||||
templateSettingsBuilder.normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
||||
builder.settings(templateSettingsBuilder.build());
|
||||
} else if ("mappings".equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.elasticsearch.common.regex.Regex;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
|
@ -1054,7 +1053,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
|||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("settings".equals(currentFieldName)) {
|
||||
builder.persistentSettings(Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())).build());
|
||||
builder.persistentSettings(Settings.fromXContent(parser));
|
||||
} else if ("indices".equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
builder.put(IndexMetaData.Builder.fromXContent(parser), false);
|
||||
|
|
|
@ -20,15 +20,12 @@
|
|||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.AbstractNamedDiffable;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.NamedDiff;
|
||||
import org.elasticsearch.cluster.metadata.MetaData.Custom;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -151,7 +148,7 @@ public class RepositoriesMetaData extends AbstractNamedDiffable<Custom> implemen
|
|||
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchParseException("failed to parse repository [{}], incompatible params", name);
|
||||
}
|
||||
settings = Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())).build();
|
||||
settings = Settings.fromXContent(parser);
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse repository [{}], unknown field [{}]", name, currentFieldName);
|
||||
}
|
||||
|
|
|
@ -125,13 +125,13 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
@Override
|
||||
public Settings getValue(Settings current, Settings previous) {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put(current.filter(loggerPredicate).getAsMap());
|
||||
builder.put(current.filter(loggerPredicate));
|
||||
for (String key : previous.getAsMap().keySet()) {
|
||||
if (loggerPredicate.test(key) && builder.internalMap().containsKey(key) == false) {
|
||||
if (ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).exists(settings) == false) {
|
||||
builder.putNull(key);
|
||||
} else {
|
||||
builder.put(key, ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings));
|
||||
builder.put(key, ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings).toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,33 +19,34 @@
|
|||
|
||||
package org.elasticsearch.common.settings;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.LogConfigurator;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoaderFactory;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.MemorySizeValue;
|
||||
import org.elasticsearch.common.unit.RatioValue;
|
||||
import org.elasticsearch.common.unit.SizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.GeneralSecurityException;
|
||||
|
@ -124,10 +125,7 @@ public final class Settings implements ToXContentFragment {
|
|||
return this.settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* The settings as a structured {@link java.util.Map}.
|
||||
*/
|
||||
public Map<String, Object> getAsStructuredMap() {
|
||||
private Map<String, Object> getAsStructuredMap() {
|
||||
Map<String, Object> map = new HashMap<>(2);
|
||||
for (Map.Entry<String, String> entry : settings.entrySet()) {
|
||||
processSetting(map, "", entry.getKey(), entry.getValue());
|
||||
|
@ -622,6 +620,117 @@ public final class Settings implements ToXContentFragment {
|
|||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parsers the generated xconten from {@link Settings#toXContent(XContentBuilder, Params)} into a new Settings object.
|
||||
* Note this method requires the parser to either be positioned on a null token or on
|
||||
* {@link org.elasticsearch.common.xcontent.XContentParser.Token#START_OBJECT}.
|
||||
*/
|
||||
public static Settings fromXContent(XContentParser parser) throws IOException {
|
||||
return fromXContent(parser, true, false);
|
||||
}
|
||||
|
||||
private static Settings fromXContent(XContentParser parser, boolean allowNullValues,
|
||||
boolean validateEndOfStream)
|
||||
throws IOException {
|
||||
if (parser.currentToken() == null) {
|
||||
parser.nextToken();
|
||||
}
|
||||
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
|
||||
Builder innerBuilder = Settings.builder();
|
||||
StringBuilder currentKeyBuilder = new StringBuilder();
|
||||
fromXContent(parser, currentKeyBuilder, innerBuilder, allowNullValues);
|
||||
if (validateEndOfStream) {
|
||||
// ensure we reached the end of the stream
|
||||
XContentParser.Token lastToken = null;
|
||||
try {
|
||||
while (!parser.isClosed() && (lastToken = parser.nextToken()) == null) ;
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchParseException(
|
||||
"malformed, expected end of settings but encountered additional content starting at line number: [{}], "
|
||||
+ "column number: [{}]",
|
||||
e, parser.getTokenLocation().lineNumber, parser.getTokenLocation().columnNumber);
|
||||
}
|
||||
if (lastToken != null) {
|
||||
throw new ElasticsearchParseException(
|
||||
"malformed, expected end of settings but encountered additional content starting at line number: [{}], "
|
||||
+ "column number: [{}]",
|
||||
parser.getTokenLocation().lineNumber, parser.getTokenLocation().columnNumber);
|
||||
}
|
||||
}
|
||||
return innerBuilder.build();
|
||||
}
|
||||
|
||||
private static void fromXContent(XContentParser parser, StringBuilder keyBuilder, Settings.Builder builder,
|
||||
boolean allowNullValues) throws IOException {
|
||||
final int length = keyBuilder.length();
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
if (parser.currentToken() == XContentParser.Token.FIELD_NAME) {
|
||||
keyBuilder.setLength(length);
|
||||
keyBuilder.append(parser.currentName());
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
keyBuilder.append('.');
|
||||
fromXContent(parser, keyBuilder, builder, allowNullValues);
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
|
||||
List<String> list = new ArrayList<>();
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
list.add(parser.text());
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||
list.add(parser.text()); // just use the string representation here
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
list.add(String.valueOf(parser.text()));
|
||||
} else {
|
||||
throw new IllegalStateException("only value lists are allowed in serialized settings");
|
||||
}
|
||||
}
|
||||
String key = keyBuilder.toString();
|
||||
validateValue(key, list, builder, parser, allowNullValues);
|
||||
builder.putArray(key, list);
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
String key = keyBuilder.toString();
|
||||
validateValue(key, null, builder, parser, allowNullValues);
|
||||
builder.putNull(key);
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING
|
||||
|| parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||
String key = keyBuilder.toString();
|
||||
String value = parser.text();
|
||||
validateValue(key, value, builder, parser, allowNullValues);
|
||||
builder.put(key, value);
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
String key = keyBuilder.toString();
|
||||
validateValue(key, parser.text(), builder, parser, allowNullValues);
|
||||
builder.put(key, parser.booleanValue());
|
||||
} else {
|
||||
XContentParserUtils.throwUnknownToken(parser.currentToken(), parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void validateValue(String key, Object currentValue, Settings.Builder builder, XContentParser parser,
|
||||
boolean allowNullValues) {
|
||||
if (builder.map.containsKey(key)) {
|
||||
throw new ElasticsearchParseException(
|
||||
"duplicate settings key [{}] found at line number [{}], column number [{}], previous value [{}], current value [{}]",
|
||||
key,
|
||||
parser.getTokenLocation().lineNumber,
|
||||
parser.getTokenLocation().columnNumber,
|
||||
builder.map.get(key),
|
||||
currentValue
|
||||
);
|
||||
}
|
||||
|
||||
if (currentValue == null && allowNullValues == false) {
|
||||
throw new ElasticsearchParseException(
|
||||
"null-valued setting found for key [{}] found at line number [{}], column number [{}]",
|
||||
key,
|
||||
parser.getTokenLocation().lineNumber,
|
||||
parser.getTokenLocation().columnNumber
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static final Set<String> FORMAT_PARAMS =
|
||||
Collections.unmodifiableSet(new HashSet<>(Arrays.asList("settings_filter", "flat_settings")));
|
||||
|
||||
|
@ -708,27 +817,69 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
|
||||
/**
|
||||
* Puts tuples of key value pairs of settings. Simplified version instead of repeating calling
|
||||
* put for each one.
|
||||
* Sets a path setting with the provided setting key and path.
|
||||
*
|
||||
* @param key The setting key
|
||||
* @param path The setting path
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(Object... settings) {
|
||||
if (settings.length == 1) {
|
||||
// support cases where the actual type gets lost down the road...
|
||||
if (settings[0] instanceof Map) {
|
||||
//noinspection unchecked
|
||||
return put((Map) settings[0]);
|
||||
} else if (settings[0] instanceof Settings) {
|
||||
return put((Settings) settings[0]);
|
||||
}
|
||||
}
|
||||
if ((settings.length % 2) != 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"array settings of key + value order doesn't hold correct number of arguments (" + settings.length + ")");
|
||||
}
|
||||
for (int i = 0; i < settings.length; i++) {
|
||||
put(settings[i++].toString(), settings[i].toString());
|
||||
}
|
||||
return this;
|
||||
public Builder put(String key, Path path) {
|
||||
return put(key, path.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a time value setting with the provided setting key and value.
|
||||
*
|
||||
* @param key The setting key
|
||||
* @param timeValue The setting timeValue
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String key, TimeValue timeValue) {
|
||||
return put(key, timeValue.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a byteSizeValue setting with the provided setting key and byteSizeValue.
|
||||
*
|
||||
* @param key The setting key
|
||||
* @param byteSizeValue The setting value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String key, ByteSizeValue byteSizeValue) {
|
||||
return put(key, byteSizeValue.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets an enum setting with the provided setting key and enum instance.
|
||||
*
|
||||
* @param key The setting key
|
||||
* @param enumValue The setting value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String key, Enum<?> enumValue) {
|
||||
return put(key, enumValue.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets an level setting with the provided setting key and level instance.
|
||||
*
|
||||
* @param key The setting key
|
||||
* @param level The setting value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String key, Level level) {
|
||||
return put(key, level.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets an lucene version setting with the provided setting key and lucene version instance.
|
||||
*
|
||||
* @param key The setting key
|
||||
* @param luceneVersion The setting value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String key, org.apache.lucene.util.Version luceneVersion) {
|
||||
return put(key, luceneVersion.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -743,6 +894,9 @@ public final class Settings implements ToXContentFragment {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a null value for the given setting key
|
||||
*/
|
||||
public Builder putNull(String key) {
|
||||
return put(key, (String) null);
|
||||
}
|
||||
|
@ -848,13 +1002,6 @@ public final class Settings implements ToXContentFragment {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting with the provided setting key and an array of values.
|
||||
*
|
||||
* @param setting The setting key
|
||||
* @param values The values
|
||||
* @return The builder
|
||||
*/
|
||||
|
||||
/**
|
||||
* Sets the setting with the provided setting key and an array of values.
|
||||
|
@ -889,26 +1036,6 @@ public final class Settings implements ToXContentFragment {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting as an array of values, but keeps existing elements for the key.
|
||||
*/
|
||||
public Builder extendArray(String setting, String... values) {
|
||||
// check for a singular (non array) value
|
||||
String oldSingle = remove(setting);
|
||||
// find the highest array index
|
||||
int counter = 0;
|
||||
while (map.containsKey(setting + '.' + counter)) {
|
||||
++counter;
|
||||
}
|
||||
if (oldSingle != null) {
|
||||
put(setting + '.' + counter++, oldSingle);
|
||||
}
|
||||
for (String value : values) {
|
||||
put(setting + '.' + counter++, value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting group.
|
||||
*/
|
||||
|
@ -926,12 +1053,21 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
|
||||
/**
|
||||
* Sets all the provided settings.
|
||||
* Sets all the provided settings including secure settings
|
||||
*/
|
||||
public Builder put(Settings settings) {
|
||||
return put(settings, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets all the provided settings.
|
||||
* @param settings the settings to set
|
||||
* @param copySecureSettings if <code>true</code> all settings including secure settings are copied.
|
||||
*/
|
||||
public Builder put(Settings settings, boolean copySecureSettings) {
|
||||
removeNonArraysFieldsIfNewSettingsContainsFieldAsArray(settings.getAsMap());
|
||||
map.putAll(settings.getAsMap());
|
||||
if (settings.getSecureSettings() != null) {
|
||||
if (copySecureSettings && settings.getSecureSettings() != null) {
|
||||
setSecureSettings(settings.getSecureSettings());
|
||||
}
|
||||
return this;
|
||||
|
@ -988,31 +1124,11 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
|
||||
/**
|
||||
* Loads settings from the actual string content that represents them using the
|
||||
* {@link SettingsLoaderFactory#loaderFromSource(String)}.
|
||||
* @deprecated use {@link #loadFromSource(String, XContentType)} to avoid content type detection
|
||||
*/
|
||||
@Deprecated
|
||||
public Builder loadFromSource(String source) {
|
||||
SettingsLoader settingsLoader = SettingsLoaderFactory.loaderFromSource(source);
|
||||
try {
|
||||
Map<String, String> loadedSettings = settingsLoader.load(source);
|
||||
put(loadedSettings);
|
||||
} catch (Exception e) {
|
||||
throw new SettingsException("Failed to load settings from [" + source + "]", e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from the actual string content that represents them using the
|
||||
* {@link SettingsLoaderFactory#loaderFromXContentType(XContentType)} method to obtain a loader
|
||||
* Loads settings from the actual string content that represents them using {@link #fromXContent(XContentParser)}
|
||||
*/
|
||||
public Builder loadFromSource(String source, XContentType xContentType) {
|
||||
SettingsLoader settingsLoader = SettingsLoaderFactory.loaderFromXContentType(xContentType);
|
||||
try {
|
||||
Map<String, String> loadedSettings = settingsLoader.load(source);
|
||||
put(loadedSettings);
|
||||
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(NamedXContentRegistry.EMPTY, source)) {
|
||||
this.put(fromXContent(parser, true, true));
|
||||
} catch (Exception e) {
|
||||
throw new SettingsException("Failed to load settings from [" + source + "]", e);
|
||||
}
|
||||
|
@ -1020,24 +1136,40 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
|
||||
/**
|
||||
* Loads settings from a url that represents them using the
|
||||
* {@link SettingsLoaderFactory#loaderFromResource(String)}.
|
||||
* Loads settings from a url that represents them using {@link #fromXContent(XContentParser)}
|
||||
* Note: Loading from a path doesn't allow <code>null</code> values in the incoming xcontent
|
||||
*/
|
||||
public Builder loadFromPath(Path path) throws IOException {
|
||||
// NOTE: loadFromStream will close the input stream
|
||||
return loadFromStream(path.getFileName().toString(), Files.newInputStream(path));
|
||||
return loadFromStream(path.getFileName().toString(), Files.newInputStream(path), false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from a stream that represents them using the
|
||||
* {@link SettingsLoaderFactory#loaderFromResource(String)}.
|
||||
* Loads settings from a stream that represents them using {@link #fromXContent(XContentParser)}
|
||||
*/
|
||||
public Builder loadFromStream(String resourceName, InputStream is) throws IOException {
|
||||
SettingsLoader settingsLoader = SettingsLoaderFactory.loaderFromResource(resourceName);
|
||||
// NOTE: copyToString will close the input stream
|
||||
Map<String, String> loadedSettings =
|
||||
settingsLoader.load(Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8)));
|
||||
put(loadedSettings);
|
||||
public Builder loadFromStream(String resourceName, InputStream is, boolean acceptNullValues) throws IOException {
|
||||
final XContentType xContentType;
|
||||
if (resourceName.endsWith(".json")) {
|
||||
xContentType = XContentType.JSON;
|
||||
} else if (resourceName.endsWith(".yml") || resourceName.endsWith(".yaml")) {
|
||||
xContentType = XContentType.YAML;
|
||||
} else {
|
||||
throw new IllegalArgumentException("unable to detect content type from resource name [" + resourceName + "]");
|
||||
}
|
||||
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(NamedXContentRegistry.EMPTY, is)) {
|
||||
if (parser.currentToken() == null) {
|
||||
if (parser.nextToken() == null) {
|
||||
return this; // empty file
|
||||
}
|
||||
}
|
||||
put(fromXContent(parser, acceptNullValues, true));
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new SettingsException("Failed to load settings from [" + resourceName + "]", e);
|
||||
} finally {
|
||||
IOUtils.close(is);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
/**
|
||||
* Settings loader that loads (parses) the settings in a json format by flattening them
|
||||
* into a map.
|
||||
*/
|
||||
public class JsonSettingsLoader extends XContentSettingsLoader {
|
||||
|
||||
public JsonSettingsLoader(boolean allowNullValues) {
|
||||
super(allowNullValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentType contentType() {
|
||||
return XContentType.JSON;
|
||||
}
|
||||
}
|
|
@ -1,106 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Provides the ability to load settings (in the form of a simple Map) from
|
||||
* the actual source content that represents them.
|
||||
*/
|
||||
public interface SettingsLoader {
|
||||
|
||||
class Helper {
|
||||
|
||||
public static Map<String, String> loadNestedFromMap(@Nullable Map map) {
|
||||
Map<String, String> settings = new HashMap<>();
|
||||
if (map == null) {
|
||||
return settings;
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
List<String> path = new ArrayList<>();
|
||||
serializeMap(settings, sb, path, map);
|
||||
return settings;
|
||||
}
|
||||
|
||||
private static void serializeMap(Map<String, String> settings, StringBuilder sb, List<String> path, Map<Object, Object> map) {
|
||||
for (Map.Entry<Object, Object> entry : map.entrySet()) {
|
||||
if (entry.getValue() instanceof Map) {
|
||||
path.add((String) entry.getKey());
|
||||
serializeMap(settings, sb, path, (Map<Object, Object>) entry.getValue());
|
||||
path.remove(path.size() - 1);
|
||||
} else if (entry.getValue() instanceof List) {
|
||||
path.add((String) entry.getKey());
|
||||
serializeList(settings, sb, path, (List) entry.getValue());
|
||||
path.remove(path.size() - 1);
|
||||
} else {
|
||||
serializeValue(settings, sb, path, (String) entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void serializeList(Map<String, String> settings, StringBuilder sb, List<String> path, List list) {
|
||||
int counter = 0;
|
||||
for (Object listEle : list) {
|
||||
if (listEle instanceof Map) {
|
||||
path.add(Integer.toString(counter));
|
||||
serializeMap(settings, sb, path, (Map<Object, Object>) listEle);
|
||||
path.remove(path.size() - 1);
|
||||
} else if (listEle instanceof List) {
|
||||
path.add(Integer.toString(counter));
|
||||
serializeList(settings, sb, path, (List) listEle);
|
||||
path.remove(path.size() - 1);
|
||||
} else {
|
||||
serializeValue(settings, sb, path, Integer.toString(counter), listEle);
|
||||
}
|
||||
counter++;
|
||||
}
|
||||
}
|
||||
|
||||
private static void serializeValue(Map<String, String> settings, StringBuilder sb, List<String> path, String name, Object value) {
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
sb.setLength(0);
|
||||
for (String pathEle : path) {
|
||||
sb.append(pathEle).append('.');
|
||||
}
|
||||
sb.append(name);
|
||||
settings.put(sb.toString(), value.toString());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Loads (parses) the settings from a source string.
|
||||
*/
|
||||
Map<String, String> load(String source) throws IOException;
|
||||
|
||||
/**
|
||||
* Loads (parses) the settings from a source bytes.
|
||||
*/
|
||||
Map<String, String> load(byte[] source) throws IOException;
|
||||
}
|
|
@ -1,97 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
/**
|
||||
* A class holding factory methods for settings loaders that attempts
|
||||
* to infer the type of the underlying settings content.
|
||||
*/
|
||||
public final class SettingsLoaderFactory {
|
||||
|
||||
private SettingsLoaderFactory() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link SettingsLoader} based on the source resource
|
||||
* name. This factory method assumes that if the resource name ends
|
||||
* with ".json" then the content should be parsed as JSON, else if
|
||||
* the resource name ends with ".yml" or ".yaml" then the content
|
||||
* should be parsed as YAML, otherwise throws an exception. Note that the
|
||||
* parsers returned by this method will not accept null-valued
|
||||
* keys.
|
||||
*
|
||||
* @param resourceName The resource name containing the settings
|
||||
* content.
|
||||
* @return A settings loader.
|
||||
*/
|
||||
public static SettingsLoader loaderFromResource(String resourceName) {
|
||||
if (resourceName.endsWith(".json")) {
|
||||
return new JsonSettingsLoader(false);
|
||||
} else if (resourceName.endsWith(".yml") || resourceName.endsWith(".yaml")) {
|
||||
return new YamlSettingsLoader(false);
|
||||
} else {
|
||||
throw new IllegalArgumentException("unable to detect content type from resource name [" + resourceName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link SettingsLoader} based on the source content.
|
||||
* This factory method assumes that if the underlying content
|
||||
* contains an opening and closing brace ('{' and '}') then the
|
||||
* content should be parsed as JSON, else if the underlying content
|
||||
* fails this condition but contains a ':' then the content should
|
||||
* be parsed as YAML, and otherwise throws an exception.
|
||||
* Note that the JSON and YAML parsers returned by this method will
|
||||
* accept null-valued keys.
|
||||
*
|
||||
* @param source The underlying settings content.
|
||||
* @return A settings loader.
|
||||
* @deprecated use {@link #loaderFromXContentType(XContentType)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public static SettingsLoader loaderFromSource(String source) {
|
||||
if (source.indexOf('{') != -1 && source.indexOf('}') != -1) {
|
||||
return new JsonSettingsLoader(true);
|
||||
} else if (source.indexOf(':') != -1) {
|
||||
return new YamlSettingsLoader(true);
|
||||
} else {
|
||||
throw new IllegalArgumentException("unable to detect content type from source [" + source + "]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link SettingsLoader} based on the {@link XContentType}. Note only {@link XContentType#JSON} and
|
||||
* {@link XContentType#YAML} are supported
|
||||
*
|
||||
* @param xContentType The content type
|
||||
* @return A settings loader.
|
||||
*/
|
||||
public static SettingsLoader loaderFromXContentType(XContentType xContentType) {
|
||||
if (xContentType == XContentType.JSON) {
|
||||
return new JsonSettingsLoader(true);
|
||||
} else if (xContentType == XContentType.YAML) {
|
||||
return new YamlSettingsLoader(true);
|
||||
} else {
|
||||
throw new IllegalArgumentException("unsupported content type [" + xContentType + "]");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,177 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Settings loader that loads (parses) the settings in a xcontent format by flattening them
|
||||
* into a map.
|
||||
*/
|
||||
public abstract class XContentSettingsLoader implements SettingsLoader {
|
||||
|
||||
public abstract XContentType contentType();
|
||||
|
||||
private final boolean allowNullValues;
|
||||
|
||||
XContentSettingsLoader(boolean allowNullValues) {
|
||||
this.allowNullValues = allowNullValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> load(String source) throws IOException {
|
||||
// It is safe to use EMPTY here because this never uses namedObject
|
||||
try (XContentParser parser = XContentFactory.xContent(contentType()).createParser(NamedXContentRegistry.EMPTY, source)) {
|
||||
return load(parser);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> load(byte[] source) throws IOException {
|
||||
// It is safe to use EMPTY here because this never uses namedObject
|
||||
try (XContentParser parser = XContentFactory.xContent(contentType()).createParser(NamedXContentRegistry.EMPTY, source)) {
|
||||
return load(parser);
|
||||
}
|
||||
}
|
||||
|
||||
public Map<String, String> load(XContentParser jp) throws IOException {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
Map<String, String> settings = new HashMap<>();
|
||||
List<String> path = new ArrayList<>();
|
||||
XContentParser.Token token = jp.nextToken();
|
||||
if (token == null) {
|
||||
return settings;
|
||||
}
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchParseException("malformed, expected settings to start with 'object', instead was [{}]", token);
|
||||
}
|
||||
serializeObject(settings, sb, path, jp, null);
|
||||
|
||||
// ensure we reached the end of the stream
|
||||
XContentParser.Token lastToken = null;
|
||||
try {
|
||||
while (!jp.isClosed() && (lastToken = jp.nextToken()) == null);
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchParseException(
|
||||
"malformed, expected end of settings but encountered additional content starting at line number: [{}], "
|
||||
+ "column number: [{}]",
|
||||
e, jp.getTokenLocation().lineNumber, jp.getTokenLocation().columnNumber);
|
||||
}
|
||||
if (lastToken != null) {
|
||||
throw new ElasticsearchParseException(
|
||||
"malformed, expected end of settings but encountered additional content starting at line number: [{}], "
|
||||
+ "column number: [{}]",
|
||||
jp.getTokenLocation().lineNumber, jp.getTokenLocation().columnNumber);
|
||||
}
|
||||
|
||||
return settings;
|
||||
}
|
||||
|
||||
private void serializeObject(Map<String, String> settings, StringBuilder sb, List<String> path, XContentParser parser,
|
||||
String objFieldName) throws IOException {
|
||||
if (objFieldName != null) {
|
||||
path.add(objFieldName);
|
||||
}
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
serializeObject(settings, sb, path, parser, currentFieldName);
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
serializeArray(settings, sb, path, parser, currentFieldName);
|
||||
} else if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
serializeValue(settings, sb, path, parser, currentFieldName, true);
|
||||
} else {
|
||||
serializeValue(settings, sb, path, parser, currentFieldName, false);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (objFieldName != null) {
|
||||
path.remove(path.size() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
private void serializeArray(Map<String, String> settings, StringBuilder sb, List<String> path, XContentParser parser, String fieldName)
|
||||
throws IOException {
|
||||
XContentParser.Token token;
|
||||
int counter = 0;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
serializeObject(settings, sb, path, parser, fieldName + '.' + (counter++));
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
serializeArray(settings, sb, path, parser, fieldName + '.' + (counter++));
|
||||
} else if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++), true);
|
||||
// ignore
|
||||
} else {
|
||||
serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++), false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void serializeValue(Map<String, String> settings, StringBuilder sb, List<String> path, XContentParser parser, String fieldName,
|
||||
boolean isNull) throws IOException {
|
||||
sb.setLength(0);
|
||||
for (String pathEle : path) {
|
||||
sb.append(pathEle).append('.');
|
||||
}
|
||||
sb.append(fieldName);
|
||||
String key = sb.toString();
|
||||
String currentValue = isNull ? null : parser.text();
|
||||
|
||||
if (settings.containsKey(key)) {
|
||||
throw new ElasticsearchParseException(
|
||||
"duplicate settings key [{}] found at line number [{}], column number [{}], previous value [{}], current value [{}]",
|
||||
key,
|
||||
parser.getTokenLocation().lineNumber,
|
||||
parser.getTokenLocation().columnNumber,
|
||||
settings.get(key),
|
||||
currentValue
|
||||
);
|
||||
}
|
||||
|
||||
if (currentValue == null && !allowNullValues) {
|
||||
throw new ElasticsearchParseException(
|
||||
"null-valued setting found for key [{}] found at line number [{}], column number [{}]",
|
||||
key,
|
||||
parser.getTokenLocation().lineNumber,
|
||||
parser.getTokenLocation().columnNumber
|
||||
);
|
||||
}
|
||||
|
||||
settings.put(key, currentValue);
|
||||
}
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Settings loader that loads (parses) the settings in a yaml format by flattening them
|
||||
* into a map.
|
||||
*/
|
||||
public class YamlSettingsLoader extends XContentSettingsLoader {
|
||||
|
||||
public YamlSettingsLoader(boolean allowNullValues) {
|
||||
super(allowNullValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentType contentType() {
|
||||
return XContentType.YAML;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> load(String source) throws IOException {
|
||||
/*
|
||||
* #8259: Better handling of tabs vs spaces in elasticsearch.yml
|
||||
*/
|
||||
if (source.indexOf('\t') > -1) {
|
||||
throw new IOException("Tabs are illegal in YAML. Did you mean to use whitespace character instead?");
|
||||
}
|
||||
return super.load(source);
|
||||
}
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Settings loader (parser) allowing to parse different "source" formats into
|
||||
* a {@link org.elasticsearch.common.settings.Settings}.
|
||||
*/
|
||||
package org.elasticsearch.common.settings.loader;
|
|
@ -155,7 +155,7 @@ public class Environment {
|
|||
if (PATH_DATA_SETTING.exists(settings)) {
|
||||
finalSettings.putArray(PATH_DATA_SETTING.getKey(), dataPaths);
|
||||
}
|
||||
finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile);
|
||||
finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString());
|
||||
this.settings = finalSettings.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -118,7 +117,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
iterator.remove();
|
||||
} else if (FIELDDATA.match(fieldName)) {
|
||||
// for bw compat only
|
||||
Map<String, String> fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata"));
|
||||
Map<String, Object> fieldDataSettings = nodeMapValue(fieldNode, "fielddata");
|
||||
if (fieldDataSettings.containsKey("loading")) {
|
||||
builder.eagerGlobalOrdinals("eager_global_ordinals".equals(fieldDataSettings.get("loading")));
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -276,9 +277,10 @@ public final class Script implements ToXContentObject, Writeable {
|
|||
* Parse the script configured in the given settings.
|
||||
*/
|
||||
public static Script parse(Settings settings) {
|
||||
try {
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
builder.map(settings.getAsStructuredMap());
|
||||
try (XContentBuilder builder = JsonXContent.contentBuilder()){
|
||||
builder.startObject();
|
||||
settings.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
return parse(JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, builder.bytes()));
|
||||
} catch (IOException e) {
|
||||
// it should not happen since we are not actually reading from a stream but an in-memory byte[]
|
||||
|
|
|
@ -122,7 +122,7 @@ public class ClusterStatsIT extends ESIntegTestCase {
|
|||
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
|
||||
assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.GREEN));
|
||||
|
||||
prepareCreate("test1").setSettings("number_of_shards", 2, "number_of_replicas", 1).get();
|
||||
prepareCreate("test1").setSettings(Settings.builder().put("number_of_shards", 2).put("number_of_replicas", 1)).get();
|
||||
|
||||
response = client().admin().cluster().prepareClusterStats().get();
|
||||
assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.YELLOW));
|
||||
|
@ -140,7 +140,7 @@ public class ClusterStatsIT extends ESIntegTestCase {
|
|||
assertThat(response.indicesStats.getDocs().getCount(), Matchers.equalTo(1L));
|
||||
assertShardStats(response.getIndicesStats().getShards(), 1, 4, 2, 1.0);
|
||||
|
||||
prepareCreate("test2").setSettings("number_of_shards", 3, "number_of_replicas", 0).get();
|
||||
prepareCreate("test2").setSettings(Settings.builder().put("number_of_shards", 3).put("number_of_replicas", 0)).get();
|
||||
ensureGreen();
|
||||
response = client().admin().cluster().prepareClusterStats().get();
|
||||
assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.GREEN));
|
||||
|
|
|
@ -83,7 +83,7 @@ public class CreateIndexRequestBuilderTests extends ESTestCase {
|
|||
*/
|
||||
public void testSetSettings() throws IOException {
|
||||
CreateIndexRequestBuilder builder = new CreateIndexRequestBuilder(this.testClient, CreateIndexAction.INSTANCE);
|
||||
builder.setSettings(KEY, VALUE);
|
||||
builder.setSettings(Settings.builder().put(KEY, VALUE));
|
||||
assertEquals(VALUE, builder.request().settings().get(KEY));
|
||||
|
||||
builder.setSettings("{\""+KEY+"\" : \""+VALUE+"\"}", XContentType.JSON);
|
||||
|
|
|
@ -54,10 +54,9 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase {
|
|||
PutRequest request = new PutRequest("test", "test_shards");
|
||||
request.patterns(Collections.singletonList("test_shards*"));
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0");
|
||||
map.put("index.shard.check_on_startup", "blargh");
|
||||
request.settings(Settings.builder().put(map).build());
|
||||
request.settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0")
|
||||
.put("index.shard.check_on_startup", "blargh").build());
|
||||
|
||||
List<Throwable> throwables = putTemplate(xContentRegistry(), request);
|
||||
assertEquals(throwables.size(), 1);
|
||||
|
@ -72,10 +71,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase {
|
|||
public void testIndexTemplateValidationAccumulatesValidationErrors() {
|
||||
PutRequest request = new PutRequest("test", "putTemplate shards");
|
||||
request.patterns(Collections.singletonList("_test_shards*"));
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0");
|
||||
request.settings(Settings.builder().put(map).build());
|
||||
request.settings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0").build());
|
||||
|
||||
List<Throwable> throwables = putTemplate(xContentRegistry(), request);
|
||||
assertEquals(throwables.size(), 1);
|
||||
|
|
|
@ -457,7 +457,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
*/
|
||||
public void testBulkUpdateChildMissingParentRouting() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id) // allows for multiple types
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)) // allows for multiple types
|
||||
.addMapping("parent", "{\"parent\":{}}", XContentType.JSON)
|
||||
.addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}", XContentType.JSON));
|
||||
ensureGreen();
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collections;
|
||||
|
@ -34,9 +35,9 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
final int numPrimaries1 = randomIntBetween(2, 10);
|
||||
final int numPrimaries2 = randomIntBetween(1, 10);
|
||||
assertAcked(prepareCreate("test1")
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries1));
|
||||
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries1)));
|
||||
assertAcked(prepareCreate("test2")
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries2));
|
||||
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries2)));
|
||||
|
||||
// no exception
|
||||
client().prepareSearch("test1").get();
|
||||
|
|
|
@ -102,7 +102,7 @@ public class AutoCreateIndexTests extends ESTestCase {
|
|||
|
||||
public void testExistingIndex() {
|
||||
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(true, false,
|
||||
randomAlphaOfLengthBetween(7, 10))).build();
|
||||
randomAlphaOfLengthBetween(7, 10)).toString()).build();
|
||||
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
|
||||
assertThat(autoCreateIndex.shouldAutoCreate(randomFrom("index1", "index2", "index3"),
|
||||
buildClusterState("index1", "index2", "index3")), equalTo(false));
|
||||
|
|
|
@ -42,7 +42,7 @@ public class RecoveryWithUnsupportedIndicesIT extends ESIntegTestCase {
|
|||
/**
|
||||
* Return settings that could be used to start a node that has the given zipped home directory.
|
||||
*/
|
||||
protected Settings prepareBackwardsDataDir(Path backwardsIndex, Object... settings) throws IOException {
|
||||
protected Settings prepareBackwardsDataDir(Path backwardsIndex) throws IOException {
|
||||
Path indexDir = createTempDir();
|
||||
Path dataDir = indexDir.resolve("data");
|
||||
try (InputStream stream = Files.newInputStream(backwardsIndex)) {
|
||||
|
@ -76,7 +76,6 @@ public class RecoveryWithUnsupportedIndicesIT extends ESIntegTestCase {
|
|||
assertFalse(Files.exists(src));
|
||||
assertTrue(Files.exists(dest));
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put(settings)
|
||||
.put(Environment.PATH_DATA_SETTING.getKey(), dataDir.toAbsolutePath());
|
||||
|
||||
return builder.build();
|
||||
|
|
|
@ -144,7 +144,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
protected static void assertHeaders(ThreadPool pool) {
|
||||
assertHeaders(pool.getThreadContext().getHeaders(), (Map)HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsStructuredMap());
|
||||
assertHeaders(pool.getThreadContext().getHeaders(), HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsMap());
|
||||
}
|
||||
|
||||
public static class InternalException extends Exception {
|
||||
|
@ -163,7 +163,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase {
|
|||
private final ThreadPool pool;
|
||||
|
||||
public AssertingActionListener(String action, ThreadPool pool) {
|
||||
this(action, (Map)HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsStructuredMap(), pool);
|
||||
this(action, HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsMap(), pool);
|
||||
}
|
||||
|
||||
public AssertingActionListener(String action, Map<String, String> expectedHeaders, ThreadPool pool) {
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.cluster.ClusterState;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
@ -90,8 +91,11 @@ public class TransportClientNodesServiceTests extends ESTestCase {
|
|||
// map for each address of the nodes a cluster state request should respond with
|
||||
final Map<TransportAddress, DiscoveryNodes> nodeMap;
|
||||
|
||||
TestIteration() {
|
||||
this(Settings.EMPTY);
|
||||
}
|
||||
|
||||
TestIteration(Object... extraSettings) {
|
||||
TestIteration(Settings extraSettings) {
|
||||
Settings settings = Settings.builder().put(extraSettings).put("cluster.name", "test").build();
|
||||
ClusterName clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings);
|
||||
List<TransportAddress> listNodes = new ArrayList<>();
|
||||
|
@ -318,7 +322,7 @@ public class TransportClientNodesServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void checkRemoveAddress(boolean sniff) {
|
||||
Object[] extraSettings = {TransportClient.CLIENT_TRANSPORT_SNIFF.getKey(), sniff};
|
||||
Settings extraSettings = Settings.builder().put(TransportClient.CLIENT_TRANSPORT_SNIFF.getKey(), sniff).build();
|
||||
try(TestIteration iteration = new TestIteration(extraSettings)) {
|
||||
final TransportClientNodesService service = iteration.transportClientNodesService;
|
||||
assertEquals(iteration.listNodesCount + iteration.sniffNodesCount, service.connectedNodes().size());
|
||||
|
|
|
@ -179,7 +179,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase {
|
|||
internalCluster().startNodes(2,
|
||||
// manually control publishing
|
||||
Settings.builder().put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.getKey(), "60m").build());
|
||||
prepareCreate("test").setSettings(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).get();
|
||||
prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get();
|
||||
ensureGreen("test");
|
||||
InternalTestCluster internalTestCluster = internalCluster();
|
||||
InternalClusterInfoService infoService = (InternalClusterInfoService) internalTestCluster.getInstance(ClusterInfoService.class, internalTestCluster.getMasterName());
|
||||
|
|
|
@ -184,8 +184,9 @@ public class NoMasterNodeIT extends ESIntegTestCase {
|
|||
internalCluster().startNode(settings);
|
||||
// start a second node, create an index, and then shut it down so we have no master block
|
||||
internalCluster().startNode(settings);
|
||||
prepareCreate("test1").setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).get();
|
||||
prepareCreate("test2").setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).get();
|
||||
prepareCreate("test1").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)).get();
|
||||
prepareCreate("test2").setSettings(
|
||||
Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get();
|
||||
client().admin().cluster().prepareHealth("_all").setWaitForGreenStatus().get();
|
||||
client().prepareIndex("test1", "type1", "1").setSource("field", "value1").get();
|
||||
client().prepareIndex("test2", "type1", "1").setSource("field", "value1").get();
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap;
|
|||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -209,9 +210,10 @@ public class SimpleClusterStateIT extends ESIntegTestCase {
|
|||
int numberOfShards = scaledRandomIntBetween(1, cluster().numDataNodes());
|
||||
// if the create index is ack'ed, then all nodes have successfully processed the cluster state
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards,
|
||||
IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0,
|
||||
MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), Long.MAX_VALUE)
|
||||
.setSettings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), Long.MAX_VALUE))
|
||||
.addMapping("type", mapping)
|
||||
.setTimeout("60s").get());
|
||||
ensureGreen(); // wait for green state, so its both green, and there are no more pending events
|
||||
|
|
|
@ -166,7 +166,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase {
|
|||
.put("index.similarity.default.type", "BM25")
|
||||
.put("index.version.created", version)
|
||||
.put("index.version.upgraded", upgraded)
|
||||
.put("index.version.minimum_compatible", minCompat.luceneVersion)
|
||||
.put("index.version.minimum_compatible", minCompat.luceneVersion.toString())
|
||||
.put("index.analysis.analyzer.my_analyzer.tokenizer", "keyword")
|
||||
.build())).nodes(DiscoveryNodes.builder().add(newNode("node1")))
|
||||
.build();
|
||||
|
|
|
@ -63,7 +63,7 @@ public class MetaDataMappingServiceTests extends ESSingleNodeTestCase {
|
|||
// Tests _parent meta field logic, because part of the validation is in MetaDataMappingService
|
||||
public void testAddExtraChildTypePointingToAlreadyParentExistingType() throws Exception {
|
||||
IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("parent")
|
||||
.addMapping("child1", "_parent", "type=parent")
|
||||
);
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.cluster.routing.allocation;
|
|||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
|
@ -32,7 +33,8 @@ public class ShardStateIT extends ESIntegTestCase {
|
|||
|
||||
public void testPrimaryFailureIncreasesTerm() throws Exception {
|
||||
internalCluster().ensureAtLeastNumDataNodes(2);
|
||||
prepareCreate("test").setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).get();
|
||||
prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get();
|
||||
ensureGreen();
|
||||
assertPrimaryTerms(1, 1);
|
||||
|
||||
|
|
|
@ -420,7 +420,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("some.group.foo", 5).build(),
|
||||
Settings.builder().put("some.group.foobar", 17, "some.group.foo", 25).build());
|
||||
Settings.builder().put("some.group.foobar", 17).put("some.group.foo", 25).build());
|
||||
assertEquals(6, diff.size()); // 6 since foo.bar.quux has 3 values essentially
|
||||
assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17));
|
||||
assertNull(diff.get("some.group.foo"));
|
||||
|
@ -430,8 +430,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("some.prefix.foo.somekey", 5).build(),
|
||||
Settings.builder().put("some.prefix.foobar.somekey", 17,
|
||||
"some.prefix.foo.somekey", 18).build());
|
||||
Settings.builder().put("some.prefix.foobar.somekey", 17).put("some.prefix.foo.somekey", 18).build());
|
||||
assertEquals(6, diff.size()); // 6 since foo.bar.quux has 3 values essentially
|
||||
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
|
||||
assertNull(diff.get("some.prefix.foo.somekey"));
|
||||
|
@ -464,7 +463,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("some.group.foo", 5).build(),
|
||||
Settings.builder().put("some.group.foobar", 17, "some.group.foo", 25).build());
|
||||
Settings.builder().put("some.group.foobar", 17).put("some.group.foo", 25).build());
|
||||
assertEquals(3, diff.size());
|
||||
assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17));
|
||||
assertNull(diff.get("some.group.foo"));
|
||||
|
@ -474,8 +473,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("some.prefix.foo.somekey", 5).build(),
|
||||
Settings.builder().put("some.prefix.foobar.somekey", 17,
|
||||
"some.prefix.foo.somekey", 18).build());
|
||||
Settings.builder().put("some.prefix.foobar.somekey", 17).put("some.prefix.foo.somekey", 18).build());
|
||||
assertEquals(3, diff.size());
|
||||
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
|
||||
assertNull(diff.get("some.prefix.foo.somekey"));
|
||||
|
@ -485,8 +483,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("some.prefix.foo.somekey", 5).build(),
|
||||
Settings.builder().put("some.prefix.foobar.somekey", 17,
|
||||
"some.prefix.foo.somekey", 18)
|
||||
Settings.builder().put("some.prefix.foobar.somekey", 17).put("some.prefix.foo.somekey", 18)
|
||||
.putArray("foo.bar.quux", "x", "y", "z")
|
||||
.putArray("foo.baz.quux", "d", "e", "f")
|
||||
.build());
|
||||
|
@ -540,15 +537,15 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
settings.validate(Settings.builder().put("index.store.type", "boom"));
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").build());
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom", "i.am.not.a.setting", true)));
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").put("i.am.not.a.setting", true)));
|
||||
assertEquals("unknown setting [i.am.not.a.setting]" + unknownMsgSuffix, e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom", "i.am.not.a.setting", true).build()));
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").put("i.am.not.a.setting", true).build()));
|
||||
assertEquals("unknown setting [i.am.not.a.setting]" + unknownMsgSuffix, e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom", "index.number_of_replicas", true).build()));
|
||||
settings.validate(Settings.builder().put("index.store.type", "boom").put("index.number_of_replicas", true).build()));
|
||||
assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
|
|
|
@ -47,7 +47,7 @@ public class SettingsFilterTests extends ESTestCase {
|
|||
.put("bar1", "bar1_test")
|
||||
.put("bar.2", "bar2_test")
|
||||
.build(),
|
||||
Settings.builder()
|
||||
Settings.builder()
|
||||
.put("foo1", "foo1_test")
|
||||
.build(),
|
||||
"foo", "bar*"
|
||||
|
|
|
@ -19,27 +19,29 @@
|
|||
|
||||
package org.elasticsearch.common.settings;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.loader.YamlSettingsLoader;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
import static org.hamcrest.Matchers.arrayContaining;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -90,55 +92,6 @@ public class SettingsTests extends ESTestCase {
|
|||
assertThat(settings.get("setting2"), is("${prompt.secret}"));
|
||||
}
|
||||
|
||||
public void testUnFlattenedSettings() {
|
||||
Settings settings = Settings.builder()
|
||||
.put("foo", "abc")
|
||||
.put("bar", "def")
|
||||
.put("baz.foo", "ghi")
|
||||
.put("baz.bar", "jkl")
|
||||
.putArray("baz.arr", "a", "b", "c")
|
||||
.build();
|
||||
Map<String, Object> map = settings.getAsStructuredMap();
|
||||
assertThat(map.keySet(), Matchers.<String>hasSize(3));
|
||||
assertThat(map, allOf(
|
||||
Matchers.<String, Object>hasEntry("foo", "abc"),
|
||||
Matchers.<String, Object>hasEntry("bar", "def")));
|
||||
|
||||
@SuppressWarnings("unchecked") Map<String, Object> bazMap = (Map<String, Object>) map.get("baz");
|
||||
assertThat(bazMap.keySet(), Matchers.<String>hasSize(3));
|
||||
assertThat(bazMap, allOf(
|
||||
Matchers.<String, Object>hasEntry("foo", "ghi"),
|
||||
Matchers.<String, Object>hasEntry("bar", "jkl")));
|
||||
@SuppressWarnings("unchecked") List<String> bazArr = (List<String>) bazMap.get("arr");
|
||||
assertThat(bazArr, contains("a", "b", "c"));
|
||||
|
||||
}
|
||||
|
||||
public void testFallbackToFlattenedSettings() {
|
||||
Settings settings = Settings.builder()
|
||||
.put("foo", "abc")
|
||||
.put("foo.bar", "def")
|
||||
.put("foo.baz", "ghi").build();
|
||||
Map<String, Object> map = settings.getAsStructuredMap();
|
||||
assertThat(map.keySet(), Matchers.<String>hasSize(3));
|
||||
assertThat(map, allOf(
|
||||
Matchers.<String, Object>hasEntry("foo", "abc"),
|
||||
Matchers.<String, Object>hasEntry("foo.bar", "def"),
|
||||
Matchers.<String, Object>hasEntry("foo.baz", "ghi")));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put("foo.bar", "def")
|
||||
.put("foo", "abc")
|
||||
.put("foo.baz", "ghi")
|
||||
.build();
|
||||
map = settings.getAsStructuredMap();
|
||||
assertThat(map.keySet(), Matchers.<String>hasSize(3));
|
||||
assertThat(map, allOf(
|
||||
Matchers.<String, Object>hasEntry("foo", "abc"),
|
||||
Matchers.<String, Object>hasEntry("foo.bar", "def"),
|
||||
Matchers.<String, Object>hasEntry("foo.baz", "ghi")));
|
||||
}
|
||||
|
||||
public void testGetAsSettings() {
|
||||
Settings settings = Settings.builder()
|
||||
.put("bar", "hello world")
|
||||
|
@ -216,11 +169,9 @@ public class SettingsTests extends ESTestCase {
|
|||
.put(Settings.builder().putArray("value", "2", "3").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(new YamlSettingsLoader(false).load("value: 1"))
|
||||
.put(new YamlSettingsLoader(false).load("value: [ 2, 3 ]"))
|
||||
.build();
|
||||
settings = Settings.builder().loadFromSource("value: 1", XContentType.YAML)
|
||||
.loadFromSource("value: [ 2, 3 ]", XContentType.YAML)
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
|
||||
|
||||
settings = Settings.builder()
|
||||
|
@ -562,4 +513,145 @@ public class SettingsTests extends ESTestCase {
|
|||
assertThat(e, hasToString(containsString("settings object contains values for [foobar=foo] and [foobar.0=bar]")));
|
||||
}
|
||||
|
||||
public void testToAndFromXContent() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.putArray("foo.bar.baz", "1", "2", "3")
|
||||
.put("foo.foobar", 2)
|
||||
.put("rootfoo", "test")
|
||||
.put("foo.baz", "1,2,3,4")
|
||||
.putNull("foo.null.baz")
|
||||
.build();
|
||||
final boolean flatSettings = randomBoolean();
|
||||
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
|
||||
builder.startObject();
|
||||
settings.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", ""+flatSettings)));
|
||||
builder.endObject();
|
||||
XContentParser parser = createParser(builder);
|
||||
Settings build = Settings.fromXContent(parser);
|
||||
assertEquals(7, build.size()); // each list element is it's own key hence 7 and not 5
|
||||
assertArrayEquals(new String[] {"1", "2", "3"}, build.getAsArray("foo.bar.baz"));
|
||||
assertEquals(2, build.getAsInt("foo.foobar", 0).intValue());
|
||||
assertEquals("test", build.get("rootfoo"));
|
||||
assertEquals("1,2,3,4", build.get("foo.baz"));
|
||||
assertNull(build.get("foo.null.baz"));
|
||||
}
|
||||
|
||||
public void testSimpleJsonSettings() throws Exception {
|
||||
final String json = "/org/elasticsearch/common/settings/loader/test-settings.json";
|
||||
final Settings settings = Settings.builder()
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.build();
|
||||
|
||||
assertThat(settings.get("test1.value1"), equalTo("value1"));
|
||||
assertThat(settings.get("test1.test2.value2"), equalTo("value2"));
|
||||
assertThat(settings.getAsInt("test1.test2.value3", -1), equalTo(2));
|
||||
|
||||
// check array
|
||||
assertThat(settings.get("test1.test3.0"), equalTo("test3-1"));
|
||||
assertThat(settings.get("test1.test3.1"), equalTo("test3-2"));
|
||||
assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
|
||||
assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
|
||||
assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
|
||||
}
|
||||
|
||||
public void testDuplicateKeysThrowsException() {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
final String json = "{\"foo\":\"bar\",\"foo\":\"baz\"}";
|
||||
final SettingsException e = expectThrows(SettingsException.class,
|
||||
() -> Settings.builder().loadFromSource(json, XContentType.JSON).build());
|
||||
assertThat(
|
||||
e.toString(),
|
||||
CoreMatchers.containsString("duplicate settings key [foo] " +
|
||||
"found at line number [1], " +
|
||||
"column number [20], " +
|
||||
"previous value [bar], " +
|
||||
"current value [baz]"));
|
||||
|
||||
String yaml = "foo: bar\nfoo: baz";
|
||||
SettingsException e1 = expectThrows(SettingsException.class, () -> {
|
||||
Settings.builder().loadFromSource(yaml, XContentType.YAML);
|
||||
});
|
||||
assertEquals(e1.getCause().getClass(), ElasticsearchParseException.class);
|
||||
String msg = e1.getCause().getMessage();
|
||||
assertTrue(
|
||||
msg,
|
||||
msg.contains("duplicate settings key [foo] found at line number [2], column number [6], " +
|
||||
"previous value [bar], current value [baz]"));
|
||||
}
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
// this is just terrible but it's the existing behavior!
|
||||
Settings test = Settings.builder().putArray("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build();
|
||||
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
|
||||
builder.startObject();
|
||||
test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap()));
|
||||
builder.endObject();
|
||||
assertEquals("{\"foo\":{\"bar\":{\"0\":\"1\",\"1\":\"2\",\"2\":\"3\",\"baz\":\"test\"}}}", builder.string());
|
||||
|
||||
test = Settings.builder().putArray("foo.bar", "1", "2", "3").build();
|
||||
builder = XContentBuilder.builder(XContentType.JSON.xContent());
|
||||
builder.startObject();
|
||||
test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap()));
|
||||
builder.endObject();
|
||||
assertEquals("{\"foo\":{\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string());
|
||||
|
||||
builder = XContentBuilder.builder(XContentType.JSON.xContent());
|
||||
builder.startObject();
|
||||
test.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true")));
|
||||
builder.endObject();
|
||||
assertEquals("{\"foo.bar.0\":\"1\",\"foo.bar.1\":\"2\",\"foo.bar.2\":\"3\"}", builder.string());
|
||||
}
|
||||
|
||||
public void testLoadEmptyStream() throws IOException {
|
||||
Settings test = Settings.builder().loadFromStream(randomFrom("test.json", "test.yml"), new ByteArrayInputStream(new byte[0]), false)
|
||||
.build();
|
||||
assertEquals(0, test.size());
|
||||
}
|
||||
|
||||
public void testSimpleYamlSettings() throws Exception {
|
||||
final String yaml = "/org/elasticsearch/common/settings/loader/test-settings.yml";
|
||||
final Settings settings = Settings.builder()
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml), false)
|
||||
.build();
|
||||
|
||||
assertThat(settings.get("test1.value1"), equalTo("value1"));
|
||||
assertThat(settings.get("test1.test2.value2"), equalTo("value2"));
|
||||
assertThat(settings.getAsInt("test1.test2.value3", -1), equalTo(2));
|
||||
|
||||
// check array
|
||||
assertThat(settings.get("test1.test3.0"), equalTo("test3-1"));
|
||||
assertThat(settings.get("test1.test3.1"), equalTo("test3-2"));
|
||||
assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
|
||||
assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
|
||||
assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
|
||||
}
|
||||
|
||||
public void testIndentation() throws Exception {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/indentation-settings.yml";
|
||||
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> {
|
||||
Settings.builder().loadFromStream(yaml, getClass().getResourceAsStream(yaml), false);
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("malformed"));
|
||||
}
|
||||
|
||||
public void testIndentationWithExplicitDocumentStart() throws Exception {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml";
|
||||
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> {
|
||||
Settings.builder().loadFromStream(yaml, getClass().getResourceAsStream(yaml), false);
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("malformed"));
|
||||
}
|
||||
|
||||
|
||||
public void testMissingValue() throws Exception {
|
||||
Path tmp = createTempFile("test", ".yaml");
|
||||
Files.write(tmp, Collections.singletonList("foo: # missing value\n"), StandardCharsets.UTF_8);
|
||||
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> {
|
||||
Settings.builder().loadFromPath(tmp);
|
||||
});
|
||||
assertTrue(
|
||||
e.getMessage(),
|
||||
e.getMessage().contains("null-valued setting found for key [foo] found at line number [1], column number [5]"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.settings.foo;
|
||||
|
||||
// used in SettingsTest
|
||||
public class FooTestClass {
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class JsonSettingsLoaderTests extends ESTestCase {
|
||||
|
||||
public void testSimpleJsonSettings() throws Exception {
|
||||
final String json = "/org/elasticsearch/common/settings/loader/test-settings.json";
|
||||
final Settings settings = Settings.builder()
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.build();
|
||||
|
||||
assertThat(settings.get("test1.value1"), equalTo("value1"));
|
||||
assertThat(settings.get("test1.test2.value2"), equalTo("value2"));
|
||||
assertThat(settings.getAsInt("test1.test2.value3", -1), equalTo(2));
|
||||
|
||||
// check array
|
||||
assertThat(settings.get("test1.test3.0"), equalTo("test3-1"));
|
||||
assertThat(settings.get("test1.test3.1"), equalTo("test3-2"));
|
||||
assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
|
||||
assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
|
||||
assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
|
||||
}
|
||||
|
||||
public void testDuplicateKeysThrowsException() {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
final String json = "{\"foo\":\"bar\",\"foo\":\"baz\"}";
|
||||
final SettingsException e = expectThrows(SettingsException.class,
|
||||
() -> Settings.builder().loadFromSource(json, XContentType.JSON).build());
|
||||
assertEquals(e.getCause().getClass(), ElasticsearchParseException.class);
|
||||
assertThat(
|
||||
e.toString(),
|
||||
containsString("duplicate settings key [foo] " +
|
||||
"found at line number [1], " +
|
||||
"column number [20], " +
|
||||
"previous value [bar], " +
|
||||
"current value [baz]"));
|
||||
}
|
||||
|
||||
public void testNullValuedSettingThrowsException() {
|
||||
final String json = "{\"foo\":null}";
|
||||
final ElasticsearchParseException e =
|
||||
expectThrows(ElasticsearchParseException.class, () -> new JsonSettingsLoader(false).load(json));
|
||||
assertThat(e.toString(), containsString("null-valued setting found for key [foo] found at line number [1], column number [8]"));
|
||||
}
|
||||
|
||||
}
|
|
@ -1,98 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class YamlSettingsLoaderTests extends ESTestCase {
|
||||
|
||||
public void testSimpleYamlSettings() throws Exception {
|
||||
final String yaml = "/org/elasticsearch/common/settings/loader/test-settings.yml";
|
||||
final Settings settings = Settings.builder()
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.build();
|
||||
|
||||
assertThat(settings.get("test1.value1"), equalTo("value1"));
|
||||
assertThat(settings.get("test1.test2.value2"), equalTo("value2"));
|
||||
assertThat(settings.getAsInt("test1.test2.value3", -1), equalTo(2));
|
||||
|
||||
// check array
|
||||
assertThat(settings.get("test1.test3.0"), equalTo("test3-1"));
|
||||
assertThat(settings.get("test1.test3.1"), equalTo("test3-2"));
|
||||
assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
|
||||
assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
|
||||
assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
|
||||
}
|
||||
|
||||
public void testIndentation() throws Exception {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/indentation-settings.yml";
|
||||
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> {
|
||||
Settings.builder().loadFromStream(yaml, getClass().getResourceAsStream(yaml));
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("malformed"));
|
||||
}
|
||||
|
||||
public void testIndentationWithExplicitDocumentStart() throws Exception {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml";
|
||||
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> {
|
||||
Settings.builder().loadFromStream(yaml, getClass().getResourceAsStream(yaml));
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("malformed"));
|
||||
}
|
||||
|
||||
public void testDuplicateKeysThrowsException() {
|
||||
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
|
||||
XContent.isStrictDuplicateDetectionEnabled());
|
||||
|
||||
String yaml = "foo: bar\nfoo: baz";
|
||||
SettingsException e = expectThrows(SettingsException.class, () -> {
|
||||
Settings.builder().loadFromSource(yaml, XContentType.YAML);
|
||||
});
|
||||
assertEquals(e.getCause().getClass(), ElasticsearchParseException.class);
|
||||
String msg = e.getCause().getMessage();
|
||||
assertTrue(
|
||||
msg,
|
||||
msg.contains("duplicate settings key [foo] found at line number [2], column number [6], " +
|
||||
"previous value [bar], current value [baz]"));
|
||||
}
|
||||
|
||||
public void testMissingValue() throws Exception {
|
||||
Path tmp = createTempFile("test", ".yaml");
|
||||
Files.write(tmp, Collections.singletonList("foo: # missing value\n"), StandardCharsets.UTF_8);
|
||||
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> {
|
||||
Settings.builder().loadFromPath(tmp);
|
||||
});
|
||||
assertTrue(
|
||||
e.getMessage(),
|
||||
e.getMessage().contains("null-valued setting found for key [foo] found at line number [1], column number [5]"));
|
||||
}
|
||||
}
|
|
@ -257,9 +257,7 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
|
|||
isolatePreferredMaster.startDisrupting();
|
||||
|
||||
assertAcked(client(randomFrom(nonPreferredNodes)).admin().indices().prepareCreate("test").setSettings(
|
||||
INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1,
|
||||
INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0
|
||||
));
|
||||
Settings.builder().put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1).put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0)));
|
||||
|
||||
internalCluster().clearDisruptionScheme(false);
|
||||
internalCluster().setDisruptionScheme(isolateAllNodes);
|
||||
|
|
|
@ -137,7 +137,7 @@ public class ZenFaultDetectionTests extends ESTestCase {
|
|||
Settings.builder()
|
||||
.put(settings)
|
||||
// trace zenfd actions but keep the default otherwise
|
||||
.put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), singleton(TransportLivenessAction.NAME))
|
||||
.putArray(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME)
|
||||
.build(),
|
||||
new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService,
|
||||
namedWriteableRegistry, new NetworkService(Collections.emptyList()), version),
|
||||
|
|
|
@ -179,7 +179,7 @@ public class PublishClusterStateActionTests extends ESTestCase {
|
|||
ThreadPool threadPool, Logger logger, Map<String, MockNode> nodes) throws Exception {
|
||||
final Settings settings = Settings.builder()
|
||||
.put("name", name)
|
||||
.put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "",
|
||||
.put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "").put(
|
||||
TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING")
|
||||
.put(basSettings)
|
||||
.build();
|
||||
|
|
|
@ -48,7 +48,7 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase {
|
|||
// this test checks that index state is written on data only nodes if they have a shard allocated
|
||||
String masterNode = internalCluster().startMasterOnlyNode(Settings.EMPTY);
|
||||
String dataNode = internalCluster().startDataOnlyNode(Settings.EMPTY);
|
||||
assertAcked(prepareCreate("test").setSettings("index.number_of_replicas", 0));
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.builder().put("index.number_of_replicas", 0)));
|
||||
index("test", "doc", "1", jsonBuilder().startObject().field("text", "some text").endObject());
|
||||
ensureGreen("test");
|
||||
assertIndexInMetaState(dataNode, "test");
|
||||
|
|
|
@ -159,10 +159,8 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
|
|||
.endObject().endObject().string();
|
||||
// note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test.
|
||||
int numberOfShards = numberOfShards();
|
||||
assertAcked(prepareCreate("test").setSettings(
|
||||
SETTING_NUMBER_OF_SHARDS, numberOfShards(),
|
||||
SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 1)
|
||||
).addMapping("type1", mapping, XContentType.JSON));
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards())
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 1))).addMapping("type1", mapping, XContentType.JSON));
|
||||
|
||||
int value1Docs;
|
||||
int value2Docs;
|
||||
|
@ -517,7 +515,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
|
|||
public void testStartedShardFoundIfStateNotYetProcessed() throws Exception {
|
||||
// nodes may need to report the shards they processed the initial recovered cluster state from the master
|
||||
final String nodeName = internalCluster().startNode();
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1));
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)));
|
||||
final Index index = resolveIndex("test");
|
||||
final ShardId shardId = new ShardId(index, 0);
|
||||
index("test", "type", "1");
|
||||
|
|
|
@ -302,7 +302,9 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", mapping1, XContentType.JSON)
|
||||
.addMapping("type2", mapping2, XContentType.JSON)
|
||||
.setSettings("index.refresh_interval", -1, "index.version.created", Version.V_5_6_0.id)); // multi types in 5.6
|
||||
// multi types in 5.6
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.version.created", Version.V_5_6_0.id)));
|
||||
|
||||
ensureGreen();
|
||||
|
||||
GetResponse response = client().prepareGet("test", "type1", "1").get();
|
||||
|
@ -576,7 +578,8 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.addMapping("doc", "field1", "type=keyword,store=true")
|
||||
.addAlias(new Alias("alias"))
|
||||
.setSettings("index.refresh_interval", -1, "index.version.created", Version.V_5_6_0.id)); // multi types in 5.6
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.version.created", Version.V_5_6_0.id)));
|
||||
// multi types in 5.6
|
||||
|
||||
client().prepareIndex("test", "doc", "1")
|
||||
.setRouting("1")
|
||||
|
@ -612,7 +615,8 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
.addMapping("parent")
|
||||
.addMapping("my-type1", "_parent", "type=parent", "field1", "type=keyword,store=true")
|
||||
.addAlias(new Alias("alias"))
|
||||
.setSettings("index.refresh_interval", -1, "index.version.created", Version.V_5_6_0.id)); // multi types in 5.6
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.version.created", Version.V_5_6_0.id)));
|
||||
// multi types in 5.6
|
||||
|
||||
client().prepareIndex("test", "my-type1", "1")
|
||||
.setRouting("1")
|
||||
|
@ -676,7 +680,8 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
|
||||
public void testGetFieldsComplexField() throws Exception {
|
||||
assertAcked(prepareCreate("my-index")
|
||||
.setSettings("index.refresh_interval", -1, "index.version.created", Version.V_5_6_0.id) // multi types in 5.6
|
||||
// multi types in 5.6
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("my-type2", jsonBuilder().startObject().startObject("my-type2").startObject("properties")
|
||||
.startObject("field1").field("type", "object").startObject("properties")
|
||||
.startObject("field2").field("type", "object").startObject("properties")
|
||||
|
|
|
@ -380,7 +380,7 @@ public class IndexSettingsTests extends ESTestCase {
|
|||
assertEquals(TimeValue.parseTimeValue(newGCDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS),
|
||||
IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis());
|
||||
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(),
|
||||
randomBoolean() ? -1 : new TimeValue(-1, TimeUnit.MILLISECONDS)).build()));
|
||||
(randomBoolean() ? -1 : new TimeValue(-1, TimeUnit.MILLISECONDS)).toString()).build()));
|
||||
assertEquals(-1, settings.getGcDeletesInMillis());
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase {
|
|||
public void testDefaultsCompoundAnalysis() throws Exception {
|
||||
String json = "/org/elasticsearch/index/analysis/stop.json";
|
||||
Settings settings = Settings.builder()
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
|
|
|
@ -58,7 +58,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
|
||||
String json = "/org/elasticsearch/index/analysis/synonyms/synonyms.json";
|
||||
Settings settings = Settings.builder().
|
||||
loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), home)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
|
||||
|
@ -88,7 +88,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
.putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
|
||||
.put("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym")
|
||||
.put().build();
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
try {
|
||||
indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
|
||||
|
@ -109,7 +109,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
.putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
|
||||
.put("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand")
|
||||
.put().build();
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
try {
|
||||
indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.index.IndexResponse;
|
|||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.indices.IndicesRequestCache;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
@ -38,9 +39,8 @@ public class FieldStatsProviderRefreshTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testQueryRewriteOnRefresh() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("index").addMapping("type", "s", "type=text")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true,
|
||||
IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1,
|
||||
IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.get());
|
||||
|
||||
// Index some documents
|
||||
|
|
|
@ -98,7 +98,8 @@ public class DynamicMappingIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMappingsPropagatedToMasterNodeImmediatelyMultiType() throws IOException {
|
||||
assertAcked(prepareCreate("index").setSettings("index.version.created", Version.V_5_6_0.id)); // allows for multiple types
|
||||
assertAcked(prepareCreate("index").setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)));
|
||||
// allows for multiple types
|
||||
|
||||
// works when the type has been dynamically created
|
||||
client().prepareIndex("index", "type", "1").setSource("foo", 3).get();
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
@ -46,7 +47,8 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
protected void testConflict(String mapping, String mappingUpdate, Version idxVersion, String... errorMessages) throws InterruptedException {
|
||||
assertAcked(prepareCreate(INDEX).setSource(mapping, XContentType.JSON).setSettings("index.version.created", idxVersion.id));
|
||||
assertAcked(prepareCreate(INDEX).setSource(mapping, XContentType.JSON)
|
||||
.setSettings(Settings.builder().put("index.version.created", idxVersion.id)));
|
||||
ensureGreen(INDEX);
|
||||
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
|
||||
try {
|
||||
|
|
|
@ -151,7 +151,7 @@ public class IndexShardIT extends ESSingleNodeTestCase {
|
|||
|
||||
public void testMarkAsInactiveTriggersSyncedFlush() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0));
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)));
|
||||
client().prepareIndex("test", "test").setSource("{}", XContentType.JSON).get();
|
||||
ensureGreen("test");
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
|
@ -220,7 +220,7 @@ public class IndexShardIT extends ESSingleNodeTestCase {
|
|||
|
||||
public void testUpdatePriority() {
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings(IndexMetaData.SETTING_PRIORITY, 200));
|
||||
.setSettings(Settings.builder().put(IndexMetaData.SETTING_PRIORITY, 200)));
|
||||
IndexService indexService = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test"));
|
||||
assertEquals(200, indexService.getIndexSettings().getSettings().getAsInt(IndexMetaData.SETTING_PRIORITY, 0).intValue());
|
||||
client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_PRIORITY, 400)
|
||||
|
@ -247,7 +247,7 @@ public class IndexShardIT extends ESSingleNodeTestCase {
|
|||
|
||||
public void testExpectedShardSizeIsPresent() throws InterruptedException {
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0));
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)));
|
||||
for (int i = 0; i < 50; i++) {
|
||||
client().prepareIndex("test", "test").setSource("{}", XContentType.JSON).get();
|
||||
}
|
||||
|
@ -382,7 +382,6 @@ public class IndexShardIT extends ESSingleNodeTestCase {
|
|||
.builder()
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.translog.generation_threshold_size", generationThreshold + "b")
|
||||
.put()
|
||||
.build();
|
||||
createIndex("test", settings);
|
||||
ensureGreen("test");
|
||||
|
|
|
@ -101,13 +101,15 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase {
|
|||
internalCluster().getInstance(MockIndexEventListener.TestEventListener.class, node3).setNewDelegate(listener);
|
||||
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).get();
|
||||
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get();
|
||||
ensureGreen("test");
|
||||
assertThat("beforeIndexAddedToCluster called only once", beforeAddedCount.get(), equalTo(1));
|
||||
assertThat("beforeIndexCreated called on each data node", allCreatedCount.get(), greaterThanOrEqualTo(3));
|
||||
|
||||
try {
|
||||
client().admin().indices().prepareCreate("failed").setSettings("index.fail", true).get();
|
||||
client().admin().indices().prepareCreate("failed")
|
||||
.setSettings(Settings.builder().put("index.fail", true)).get();
|
||||
fail("should have thrown an exception during creation");
|
||||
} catch (Exception e) {
|
||||
assertTrue(e.getMessage().contains("failing on purpose"));
|
||||
|
@ -122,7 +124,8 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase {
|
|||
*/
|
||||
public void testIndexShardFailedOnRelocation() throws Throwable {
|
||||
String node1 = internalCluster().startNode();
|
||||
client().admin().indices().prepareCreate("index1").setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).get();
|
||||
client().admin().indices().prepareCreate("index1")
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)).get();
|
||||
ensureGreen("index1");
|
||||
String node2 = internalCluster().startNode();
|
||||
internalCluster().getInstance(MockIndexEventListener.TestEventListener.class, node2).setNewDelegate(new IndexShardStateChangeListener() {
|
||||
|
@ -148,7 +151,8 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase {
|
|||
|
||||
//create an index that should fail
|
||||
try {
|
||||
client().admin().indices().prepareCreate("failed").setSettings(SETTING_NUMBER_OF_SHARDS, 1, "index.fail", true).get();
|
||||
client().admin().indices().prepareCreate("failed")
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put("index.fail", true)).get();
|
||||
fail("should have thrown an exception");
|
||||
} catch (ElasticsearchException e) {
|
||||
assertTrue(e.getMessage().contains("failing on purpose"));
|
||||
|
@ -159,7 +163,7 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase {
|
|||
|
||||
//create an index
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 6, SETTING_NUMBER_OF_REPLICAS, 0));
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 6).put(SETTING_NUMBER_OF_REPLICAS, 0)));
|
||||
ensureGreen();
|
||||
assertThat(stateChangeListenerNode1.creationSettings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1), equalTo(6));
|
||||
assertThat(stateChangeListenerNode1.creationSettings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1), equalTo(0));
|
||||
|
|
|
@ -52,7 +52,7 @@ public class IndicesLifecycleListenerSingleNodeTests extends ESSingleNodeTestCas
|
|||
public void testStartDeleteIndexEventCallback() throws Throwable {
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0));
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)));
|
||||
ensureGreen();
|
||||
Index idx = resolveIndex("test");
|
||||
IndexMetaData metaData = indicesService.indexService(idx).getMetaData();
|
||||
|
|
|
@ -599,7 +599,8 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
|
|||
verify(client().admin().indices().preparePutMapping("_all").setType("type1").setSource("field", "type=text"), true);
|
||||
|
||||
for (String index : Arrays.asList("foo", "foobar", "bar", "barbaz")) {
|
||||
assertAcked(prepareCreate(index).setSettings("index.version.created", Version.V_5_6_0.id)); // allows for multiple types
|
||||
assertAcked(prepareCreate(index).setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)));
|
||||
// allows for multiple types
|
||||
}
|
||||
|
||||
verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=text"), false);
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
|
@ -51,7 +52,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
|||
Client client = client();
|
||||
assertAcked(client.admin().indices().prepareCreate("index")
|
||||
.addMapping("type", "f", "type=date")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true).get());
|
||||
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)).get());
|
||||
indexRandom(true,
|
||||
client.prepareIndex("index", "type").setSource("f", "2014-03-10T00:00:00.000Z"),
|
||||
client.prepareIndex("index", "type").setSource("f", "2014-05-13T00:00:00.000Z"));
|
||||
|
@ -93,10 +94,8 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
|||
public void testQueryRewrite() throws Exception {
|
||||
Client client = client();
|
||||
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "s", "type=date")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true,
|
||||
IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5,
|
||||
IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.get());
|
||||
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get());
|
||||
indexRandom(true, client.prepareIndex("index", "type", "1").setRouting("1").setSource("s", "2016-03-19"),
|
||||
client.prepareIndex("index", "type", "2").setRouting("1").setSource("s", "2016-03-20"),
|
||||
client.prepareIndex("index", "type", "3").setRouting("1").setSource("s", "2016-03-21"),
|
||||
|
@ -147,9 +146,8 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
|||
public void testQueryRewriteMissingValues() throws Exception {
|
||||
Client client = client();
|
||||
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "s", "type=date")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS,
|
||||
1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.get());
|
||||
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get());
|
||||
indexRandom(true, client.prepareIndex("index", "type", "1").setSource("s", "2016-03-19"),
|
||||
client.prepareIndex("index", "type", "2").setSource("s", "2016-03-20"),
|
||||
client.prepareIndex("index", "type", "3").setSource("s", "2016-03-21"),
|
||||
|
@ -197,10 +195,8 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
|||
public void testQueryRewriteDates() throws Exception {
|
||||
Client client = client();
|
||||
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "d", "type=date")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true,
|
||||
IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1,
|
||||
IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.get());
|
||||
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get());
|
||||
indexRandom(true, client.prepareIndex("index", "type", "1").setSource("d", "2014-01-01T00:00:00"),
|
||||
client.prepareIndex("index", "type", "2").setSource("d", "2014-02-01T00:00:00"),
|
||||
client.prepareIndex("index", "type", "3").setSource("d", "2014-03-01T00:00:00"),
|
||||
|
@ -250,18 +246,14 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
|||
|
||||
public void testQueryRewriteDatesWithNow() throws Exception {
|
||||
Client client = client();
|
||||
Settings settings = Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).build();
|
||||
assertAcked(client.admin().indices().prepareCreate("index-1").addMapping("type", "d", "type=date")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS,
|
||||
1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.get());
|
||||
.setSettings(settings).get());
|
||||
assertAcked(client.admin().indices().prepareCreate("index-2").addMapping("type", "d", "type=date")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS,
|
||||
1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.get());
|
||||
.setSettings(settings).get());
|
||||
assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS,
|
||||
1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.get());
|
||||
.setSettings(settings).get());
|
||||
DateTime now = new DateTime(ISOChronology.getInstanceUTC());
|
||||
indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now),
|
||||
client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)),
|
||||
|
@ -369,9 +361,10 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
|||
|
||||
public void testCanCache() throws Exception {
|
||||
Client client = client();
|
||||
Settings settings = Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).build();
|
||||
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "s", "type=date")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS,
|
||||
2, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.setSettings(settings)
|
||||
.get());
|
||||
indexRandom(true, client.prepareIndex("index", "type", "1").setRouting("1").setSource("s", "2016-03-19"),
|
||||
client.prepareIndex("index", "type", "2").setRouting("1").setSource("s", "2016-03-20"),
|
||||
|
@ -455,9 +448,10 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
|||
|
||||
public void testCacheWithFilteredAlias() {
|
||||
Client client = client();
|
||||
Settings settings = Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).build();
|
||||
assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "created_at", "type=date")
|
||||
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS,
|
||||
1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.setSettings(settings)
|
||||
.addAlias(new Alias("last_week").filter(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")))
|
||||
.get());
|
||||
DateTime now = new DateTime(DateTimeZone.UTC);
|
||||
|
|
|
@ -108,7 +108,7 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private Settings loadFromClasspath(String path) throws IOException {
|
||||
return Settings.builder().loadFromStream(path, getClass().getResourceAsStream(path))
|
||||
return Settings.builder().loadFromStream(path, getClass().getResourceAsStream(path), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
|
@ -141,7 +141,7 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
public void testVersionedAnalyzers() throws Exception {
|
||||
String yaml = "/org/elasticsearch/index/analysis/test1.yml";
|
||||
Settings settings2 = Settings.builder()
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml), false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
|
||||
.build();
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
|||
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -51,7 +52,7 @@ public class TypesExistsIT extends ESIntegTestCase {
|
|||
public void testSimple() throws Exception {
|
||||
Client client = client();
|
||||
CreateIndexResponse response1 = client.admin().indices().prepareCreate("test1")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject())
|
||||
.addMapping("type2", jsonBuilder().startObject().startObject("type2").endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
|
|
@ -90,7 +90,7 @@ public class FlushIT extends ESIntegTestCase {
|
|||
|
||||
public void testSyncedFlush() throws ExecutionException, InterruptedException, IOException {
|
||||
internalCluster().ensureAtLeastNumDataNodes(2);
|
||||
prepareCreate("test").setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).get();
|
||||
prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)).get();
|
||||
ensureGreen();
|
||||
|
||||
final Index index = client().admin().cluster().prepareState().get().getState().metaData().index("test").getIndex();
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.indices.mapping;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -128,11 +129,11 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase {
|
|||
public void testGetFieldMappingsMultiType() throws Exception {
|
||||
assertTrue("remove this multi type test", Version.CURRENT.before(Version.fromString("7.0.0")));
|
||||
assertAcked(prepareCreate("indexa")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("typeA", getMappingForType("typeA"))
|
||||
.addMapping("typeB", getMappingForType("typeB")));
|
||||
assertAcked(client().admin().indices().prepareCreate("indexb")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("typeA", getMappingForType("typeA"))
|
||||
.addMapping("typeB", getMappingForType("typeB")));
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -66,14 +67,14 @@ public class SimpleGetMappingsIT extends ESIntegTestCase {
|
|||
|
||||
public void testSimpleGetMappings() throws Exception {
|
||||
client().admin().indices().prepareCreate("indexa")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("typeA", getMappingForType("typeA"))
|
||||
.addMapping("typeB", getMappingForType("typeB"))
|
||||
.addMapping("Atype", getMappingForType("Atype"))
|
||||
.addMapping("Btype", getMappingForType("Btype"))
|
||||
.execute().actionGet();
|
||||
client().admin().indices().prepareCreate("indexb")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("typeA", getMappingForType("typeA"))
|
||||
.addMapping("typeB", getMappingForType("typeB"))
|
||||
.addMapping("Atype", getMappingForType("Atype"))
|
||||
|
|
|
@ -353,7 +353,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
|||
public void testUpdateMappingOnAllTypes() throws IOException {
|
||||
assertTrue("remove this multi type test", Version.CURRENT.before(Version.fromString("7.0.0")));
|
||||
assertAcked(prepareCreate("index")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("type1", "f", "type=keyword").addMapping("type2", "f", "type=keyword"));
|
||||
|
||||
assertAcked(client().admin().indices().preparePutMapping("index")
|
||||
|
|
|
@ -114,7 +114,8 @@ public class RareClusterStateIT extends ESIntegTestCase {
|
|||
public void testAssignmentWithJustAddedNodes() throws Exception {
|
||||
internalCluster().startNode();
|
||||
final String index = "index";
|
||||
prepareCreate(index).setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).get();
|
||||
prepareCreate(index).setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get();
|
||||
ensureGreen(index);
|
||||
|
||||
// close to have some unassigned started shards shards..
|
||||
|
@ -176,7 +177,7 @@ public class RareClusterStateIT extends ESIntegTestCase {
|
|||
internalCluster().startMasterOnlyNode();
|
||||
String dataNode = internalCluster().startDataOnlyNode();
|
||||
assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut());
|
||||
prepareCreate("test").setSettings(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).addMapping("type").get();
|
||||
prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).addMapping("type").get();
|
||||
ensureGreen("test");
|
||||
|
||||
// now that the cluster is stable, remove publishing timeout
|
||||
|
@ -193,8 +194,8 @@ public class RareClusterStateIT extends ESIntegTestCase {
|
|||
disruption.startDisrupting();
|
||||
logger.info("--> delete index and recreate it");
|
||||
assertFalse(client().admin().indices().prepareDelete("test").setTimeout("200ms").get().isAcknowledged());
|
||||
assertFalse(prepareCreate("test").setTimeout("200ms").setSettings(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0,
|
||||
IndexMetaData.SETTING_WAIT_FOR_ACTIVE_SHARDS.getKey(), "0").get().isAcknowledged());
|
||||
assertFalse(prepareCreate("test").setTimeout("200ms").setSettings(Settings.builder().put(IndexMetaData
|
||||
.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.SETTING_WAIT_FOR_ACTIVE_SHARDS.getKey(), "0")).get().isAcknowledged());
|
||||
logger.info("--> letting cluster proceed");
|
||||
disruption.stopDisrupting();
|
||||
ensureGreen(TimeValue.timeValueMinutes(30), "test");
|
||||
|
|
|
@ -226,7 +226,8 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testQueryCache() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("idx").setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true).get());
|
||||
assertAcked(client().admin().indices().prepareCreate("idx")
|
||||
.setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)).get());
|
||||
ensureGreen();
|
||||
|
||||
// index docs until we have at least one doc on each shard, otherwise, our tests will not work
|
||||
|
@ -390,7 +391,8 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
|
||||
public void testSimpleStats() throws Exception {
|
||||
// this test has some type stats tests that can be removed in 7.0
|
||||
assertAcked(prepareCreate("test1").setSettings("index.version.created", Version.V_5_6_0.id)); // allows for multiple types
|
||||
assertAcked(prepareCreate("test1")
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))); // allows for multiple types
|
||||
createIndex("test2");
|
||||
ensureGreen();
|
||||
|
||||
|
@ -556,7 +558,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
|
||||
public void testSegmentsStats() {
|
||||
assertAcked(prepareCreate("test_index")
|
||||
.setSettings(SETTING_NUMBER_OF_REPLICAS, between(0, 1)));
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_REPLICAS, between(0, 1))));
|
||||
ensureGreen();
|
||||
|
||||
NumShards test1 = getNumShards("test_index");
|
||||
|
@ -742,7 +744,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
|
||||
public void testFieldDataFieldsParam() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("test1")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("doc", "bar", "type=text,fielddata=true",
|
||||
"baz", "type=text,fielddata=true").get());
|
||||
|
||||
|
|
|
@ -392,7 +392,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.get();
|
||||
|
||||
assertAcked(prepareCreate("test_index")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id) // allow for multiple version
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)) // allow for multiple version
|
||||
.addMapping("type1").addMapping("type2").addMapping("typeX").addMapping("typeY").addMapping("typeZ"));
|
||||
ensureGreen();
|
||||
|
||||
|
|
|
@ -350,7 +350,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
final String p_node = internalCluster().startNode();
|
||||
|
||||
prepareCreate(indexName, Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
).get();
|
||||
|
||||
internalCluster().startNode();
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.script;
|
|||
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -83,5 +84,13 @@ public class ScriptTests extends ESTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
|
||||
public void testParse() throws IOException {
|
||||
Script expectedScript = createScript();
|
||||
try (XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()))) {
|
||||
expectedScript.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
Settings settings = Settings.fromXContent(createParser(builder));
|
||||
Script actualScript = Script.parse(settings);
|
||||
assertThat(actualScript, equalTo(expectedScript));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket;
|
|||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.Sampler;
|
||||
|
@ -62,12 +63,14 @@ public class DiversifiedSamplerIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0).addMapping(
|
||||
assertAcked(prepareCreate("test").setSettings(
|
||||
Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0)).addMapping(
|
||||
"book", "author", "type=keyword", "name", "type=keyword", "genre",
|
||||
"type=keyword", "price", "type=float"));
|
||||
createIndex("idx_unmapped");
|
||||
// idx_unmapped_author is same as main index but missing author field
|
||||
assertAcked(prepareCreate("idx_unmapped_author").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
assertAcked(prepareCreate("idx_unmapped_author").setSettings(
|
||||
Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping("book", "name", "type=keyword", "genre", "type=keyword", "price",
|
||||
"type=float"));
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket;
|
|||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.Sampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator;
|
||||
|
@ -61,13 +62,16 @@ public class SamplerIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0).addMapping(
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping(
|
||||
"book", "author", "type=keyword", "name", "type=text", "genre",
|
||||
"type=keyword", "price", "type=float"));
|
||||
createIndex("idx_unmapped");
|
||||
// idx_unmapped_author is same as main index but missing author field
|
||||
assertAcked(prepareCreate("idx_unmapped_author").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.addMapping("book", "name", "type=text", "genre", "type=keyword", "price", "type=float"));
|
||||
assertAcked(prepareCreate("idx_unmapped_author")
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping("book", "name", "type=text", "genre", "type=keyword", "price", "type=float"));
|
||||
|
||||
ensureGreen();
|
||||
String data[] = {
|
||||
|
|
|
@ -543,8 +543,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
private void indexEqualTestData() throws ExecutionException, InterruptedException {
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).addMapping("doc",
|
||||
"text", "type=text,fielddata=true", "class", "type=keyword"));
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping("doc", "text", "type=text,fielddata=true", "class", "type=keyword"));
|
||||
createIndex("idx_unmapped");
|
||||
|
||||
ensureGreen();
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket;
|
|||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
|
||||
|
@ -56,7 +57,7 @@ public class TermsShardMinDocCountIT extends ESIntegTestCase {
|
|||
} else {
|
||||
textMappings = "type=text,fielddata=true";
|
||||
}
|
||||
assertAcked(prepareCreate(index).setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
assertAcked(prepareCreate(index).setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping(type, "text", textMappings));
|
||||
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
|
||||
|
||||
|
@ -116,7 +117,8 @@ public class TermsShardMinDocCountIT extends ESIntegTestCase {
|
|||
if (termtype.equals("text")) {
|
||||
termMappings += ",fielddata=true";
|
||||
}
|
||||
assertAcked(prepareCreate(index).setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).addMapping(type, "text", termMappings));
|
||||
assertAcked(prepareCreate(index).setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping(type, "text", termMappings));
|
||||
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
|
||||
|
||||
addTermsDocs("1", 1, indexBuilders);//low doc freq but high score
|
||||
|
|
|
@ -635,7 +635,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
|
||||
public void testInnerHitsWithIgnoreUnmapped() throws Exception {
|
||||
assertAcked(prepareCreate("index1")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addMapping("parent_type", "nested_type", "type=nested")
|
||||
.addMapping("child_type", "_parent", "type=parent_type")
|
||||
);
|
||||
|
|
|
@ -2813,7 +2813,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testHighlightQueryRewriteDatesWithNow() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("index-1").addMapping("type", "d", "type=date",
|
||||
"field", "type=text,store=true,term_vector=with_positions_offsets")
|
||||
.setSettings("index.number_of_replicas", 0, "index.number_of_shards", 2)
|
||||
.setSettings(Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2))
|
||||
.get());
|
||||
DateTime now = new DateTime(ISOChronology.getInstanceUTC());
|
||||
indexRandom(true, client().prepareIndex("index-1", "type", "1").setSource("d", now, "field", "hello world"),
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
|
@ -640,7 +641,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
|||
|
||||
public void testGetFieldsComplexField() throws Exception {
|
||||
client().admin().indices().prepareCreate("my-index")
|
||||
.setSettings("index.refresh_interval", -1)
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1))
|
||||
.addMapping("doc", jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("doc")
|
||||
|
|
|
@ -370,7 +370,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
logger.info("Creating index test");
|
||||
int numOfTypes = randomIntBetween(2, 10);
|
||||
CreateIndexRequestBuilder createRequestBuilder = prepareCreate("test")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id);
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id));
|
||||
for (int i = 0; i < numOfTypes; i++) {
|
||||
createRequestBuilder.addMapping("type" + i, jsonBuilder().startObject().startObject("type" + i).startObject("properties")
|
||||
.startObject("text").field("type", "text").endObject()
|
||||
|
@ -403,7 +403,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
logger.info("Creating the index ...");
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=text,analyzer=keyword")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1));
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("Indexing ...");
|
||||
|
@ -435,7 +435,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
logger.info("Creating the index ...");
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=text,analyzer=whitespace")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1));
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("Indexing with each doc having one less term ...");
|
||||
|
|
|
@ -263,9 +263,10 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testCommonTermsQuery() throws Exception {
|
||||
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=text,analyzer=whitespace")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1).get();
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)).get();
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "3").setSource("field1", "quick lazy huge brown pidgin", "field2", "the quick lazy huge brown fox jumps over the tree"),
|
||||
client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree") );
|
||||
|
@ -554,7 +555,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testTypeFilter() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings("index.version.created", Version.V_5_6_0.id));
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)));
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"),
|
||||
client().prepareIndex("test", "type2", "1").setSource("field1", "value1"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("field1", "value1"),
|
||||
|
@ -956,7 +957,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
|
||||
public void testQuotedQueryStringWithBoost() throws InterruptedException, ExecutionException {
|
||||
float boost = 10.0f;
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1));
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)));
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("important", "phrase match", "less_important", "nothing important"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("important", "nothing important", "less_important", "phrase match")
|
||||
);
|
||||
|
@ -1219,7 +1220,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testBasicQueryByIdMultiType() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings("index.version.created", Version.V_5_6_0.id));
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)));
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get();
|
||||
client().prepareIndex("test", "type2", "2").setSource("field1", "value2").get();
|
||||
|
@ -1392,7 +1393,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
public void testMustNot() throws IOException, ExecutionException, InterruptedException {
|
||||
assertAcked(prepareCreate("test")
|
||||
//issue manifested only with shards>=2
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, between(2, DEFAULT_MAX_NUM_SHARDS)));
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, between(2, DEFAULT_MAX_NUM_SHARDS))));
|
||||
|
||||
|
||||
indexRandom(true, client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar"),
|
||||
|
|
|
@ -533,11 +533,12 @@ public class SearchScrollIT extends ESIntegTestCase {
|
|||
public void testScrollInvalidDefaultKeepAlive() throws IOException {
|
||||
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () ->
|
||||
client().admin().cluster().prepareUpdateSettings()
|
||||
.setPersistentSettings(Settings.builder().put("search.max_keep_alive", "1m", "search.default_keep_alive", "2m")).get());
|
||||
.setPersistentSettings(Settings.builder().put("search.max_keep_alive", "1m").put("search.default_keep_alive", "2m")).get
|
||||
());
|
||||
assertThat(exc.getMessage(), containsString("was (2 minutes > 1 minute)"));
|
||||
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings()
|
||||
.setPersistentSettings(Settings.builder().put("search.default_keep_alive", "5m", "search.max_keep_alive", "5m")).get());
|
||||
.setPersistentSettings(Settings.builder().put("search.default_keep_alive", "5m").put("search.max_keep_alive", "5m")).get());
|
||||
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings()
|
||||
.setPersistentSettings(Settings.builder().put("search.default_keep_alive", "2m")).get());
|
||||
|
@ -566,7 +567,7 @@ public class SearchScrollIT extends ESIntegTestCase {
|
|||
}
|
||||
refresh();
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings()
|
||||
.setPersistentSettings(Settings.builder().put("search.default_keep_alive", "5m", "search.max_keep_alive", "5m")).get());
|
||||
.setPersistentSettings(Settings.builder().put("search.default_keep_alive", "5m").put("search.max_keep_alive", "5m")).get());
|
||||
|
||||
Exception exc = expectThrows(Exception.class,
|
||||
() -> client().prepareSearch()
|
||||
|
|
|
@ -258,9 +258,7 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimpleTerminateAfterCount() throws Exception {
|
||||
prepareCreate("test").setSettings(
|
||||
SETTING_NUMBER_OF_SHARDS, 1,
|
||||
SETTING_NUMBER_OF_REPLICAS, 0).get();
|
||||
prepareCreate("test").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)).get();
|
||||
ensureGreen();
|
||||
int max = randomIntBetween(3, 29);
|
||||
List<IndexRequestBuilder> docbuilders = new ArrayList<>(max);
|
||||
|
@ -364,8 +362,8 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testTooLargeFromAndSizeOkBySetting() throws Exception {
|
||||
prepareCreate("idx").setSettings(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(),
|
||||
IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 2).get();
|
||||
prepareCreate("idx").setSettings(Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(),
|
||||
IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 2)).get();
|
||||
indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON));
|
||||
|
||||
assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1);
|
||||
|
@ -390,7 +388,7 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testTooLargeFromAndSizeBackwardsCompatibilityRecommendation() throws Exception {
|
||||
prepareCreate("idx").setSettings(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE).get();
|
||||
prepareCreate("idx").setSettings(Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE)).get();
|
||||
indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON));
|
||||
|
||||
assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1);
|
||||
|
@ -409,8 +407,8 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testTooLargeRescoreOkBySetting() throws Exception {
|
||||
int defaultMaxWindow = IndexSettings.MAX_RESCORE_WINDOW_SETTING.get(Settings.EMPTY);
|
||||
prepareCreate("idx").setSettings(IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey(),
|
||||
defaultMaxWindow * 2).get();
|
||||
prepareCreate("idx").setSettings(Settings.builder().put(IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey(), defaultMaxWindow * 2))
|
||||
.get();
|
||||
indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON));
|
||||
|
||||
assertHitCount(
|
||||
|
@ -420,8 +418,9 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testTooLargeRescoreOkByResultWindowSetting() throws Exception {
|
||||
int defaultMaxWindow = IndexSettings.MAX_RESCORE_WINDOW_SETTING.get(Settings.EMPTY);
|
||||
prepareCreate("idx").setSettings(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), // Note that this is the RESULT window.
|
||||
defaultMaxWindow * 2).get();
|
||||
prepareCreate("idx").setSettings(
|
||||
Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), // Note that this is the RESULT window.
|
||||
defaultMaxWindow * 2)).get();
|
||||
indexRandom(true, client().prepareIndex("idx", "type").setSource("{}", XContentType.JSON));
|
||||
|
||||
assertHitCount(
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder;
|
|||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -70,8 +71,7 @@ public class SearchSliceIT extends ESIntegTestCase {
|
|||
.endObject().string();
|
||||
int numberOfShards = randomIntBetween(1, 7);
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings("number_of_shards", numberOfShards,
|
||||
"index.max_slices_per_scroll", 10000)
|
||||
.setSettings(Settings.builder().put("number_of_shards", numberOfShards).put("index.max_slices_per_scroll", 10000))
|
||||
.addMapping("type", mapping, XContentType.JSON));
|
||||
ensureGreen();
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
|||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -1460,10 +1461,10 @@ public class FieldSortIT extends ESIntegTestCase {
|
|||
public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception {
|
||||
String sortField = "sortField";
|
||||
assertAcked(prepareCreate("test1")
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(2, maximumNumberOfShards()))
|
||||
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(2, maximumNumberOfShards())))
|
||||
.addMapping("type", sortField, "type=long").get());
|
||||
assertAcked(prepareCreate("test2")
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1))
|
||||
.addMapping("type", sortField, "type=long").get());
|
||||
|
||||
for (String index : new String[]{"test1", "test2"}) {
|
||||
|
|
|
@ -226,9 +226,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
|
||||
// see #2729
|
||||
public void testSizeOneShard() throws Exception {
|
||||
prepareCreate("test").setSettings(
|
||||
SETTING_NUMBER_OF_SHARDS, 1,
|
||||
SETTING_NUMBER_OF_REPLICAS, 0).get();
|
||||
prepareCreate("test").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)).get();
|
||||
ensureGreen();
|
||||
|
||||
for (int i = 0; i < 15; i++) {
|
||||
|
|
|
@ -678,7 +678,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
|
|||
}
|
||||
|
||||
public void testThatSensitiveRepositorySettingsAreNotExposed() throws Exception {
|
||||
Settings nodeSettings = Settings.builder().put().build();
|
||||
Settings nodeSettings = Settings.EMPTY;
|
||||
logger.info("--> start two nodes");
|
||||
internalCluster().startNodes(2, nodeSettings);
|
||||
// Register mock repositories
|
||||
|
|
|
@ -462,7 +462,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
|
||||
public void testContextVariables() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))
|
||||
.addAlias(new Alias("alias"))
|
||||
.addMapping("type1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
|
|
@ -215,7 +215,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
|
|||
public void testExplainWithRewriteValidateQuery() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addMapping("type1", "field", "type=text,analyzer=whitespace")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1).get();
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)).get();
|
||||
client().prepareIndex("test", "type1", "1").setSource("field", "quick lazy huge brown pidgin").get();
|
||||
client().prepareIndex("test", "type1", "2").setSource("field", "the quick brown fox").get();
|
||||
client().prepareIndex("test", "type1", "3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get();
|
||||
|
@ -258,7 +258,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
|
|||
public void testExplainWithRewriteValidateQueryAllShards() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addMapping("type1", "field", "type=text,analyzer=whitespace")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 2).get();
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2)).get();
|
||||
// We are relying on specific routing behaviors for the result to be right, so
|
||||
// we cannot randomize the number of shards or change ids here.
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
|
|
|
@ -139,7 +139,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
public void testCommonGramsAnalysis() throws IOException {
|
||||
String json = "/org/elasticsearch/analysis/common/commongrams.json";
|
||||
Settings settings = Settings.builder()
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createHome())
|
||||
.build();
|
||||
{
|
||||
|
@ -226,7 +226,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
public void testQueryModeCommonGramsAnalysis() throws IOException {
|
||||
String json = "/org/elasticsearch/analysis/common/commongrams_query_mode.json";
|
||||
Settings settings = Settings.builder()
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createHome())
|
||||
.build();
|
||||
{
|
||||
|
|
|
@ -98,7 +98,7 @@ public class CompoundAnalysisTests extends ESTestCase {
|
|||
private Settings getJsonSettings() throws IOException {
|
||||
String json = "/org/elasticsearch/analysis/common/test1.json";
|
||||
return Settings.builder()
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
|
@ -107,7 +107,7 @@ public class CompoundAnalysisTests extends ESTestCase {
|
|||
private Settings getYamlSettings() throws IOException {
|
||||
String yaml = "/org/elasticsearch/analysis/common/test1.yml";
|
||||
return Settings.builder()
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
|
|
|
@ -76,7 +76,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
}
|
||||
|
||||
settings = Settings.builder().put(settings)
|
||||
.put("index.analysis.filter.non_broken_keep_filter.keep_words", new String[]{"test"})
|
||||
.putArray("index.analysis.filter.non_broken_keep_filter.keep_words", "test")
|
||||
.build();
|
||||
try {
|
||||
// test our none existing setup is picked up
|
||||
|
|
|
@ -37,7 +37,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
|
|||
String json = "/org/elasticsearch/analysis/common/pattern_capture.json";
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.Operator;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -42,17 +43,18 @@ public class QueryStringWithAnalyzersTests extends ESIntegTestCase {
|
|||
*/
|
||||
public void testCustomWordDelimiterQueryString() {
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings("analysis.analyzer.my_analyzer.type", "custom",
|
||||
"analysis.analyzer.my_analyzer.tokenizer", "whitespace",
|
||||
"analysis.analyzer.my_analyzer.filter", "custom_word_delimiter",
|
||||
"analysis.filter.custom_word_delimiter.type", "word_delimiter",
|
||||
"analysis.filter.custom_word_delimiter.generate_word_parts", "true",
|
||||
"analysis.filter.custom_word_delimiter.generate_number_parts", "false",
|
||||
"analysis.filter.custom_word_delimiter.catenate_numbers", "true",
|
||||
"analysis.filter.custom_word_delimiter.catenate_words", "false",
|
||||
"analysis.filter.custom_word_delimiter.split_on_case_change", "false",
|
||||
"analysis.filter.custom_word_delimiter.split_on_numerics", "false",
|
||||
"analysis.filter.custom_word_delimiter.stem_english_possessive", "false")
|
||||
.setSettings(Settings.builder()
|
||||
.put("analysis.analyzer.my_analyzer.type", "custom")
|
||||
.put("analysis.analyzer.my_analyzer.tokenizer", "whitespace")
|
||||
.put("analysis.analyzer.my_analyzer.filter", "custom_word_delimiter")
|
||||
.put("analysis.filter.custom_word_delimiter.type", "word_delimiter")
|
||||
.put("analysis.filter.custom_word_delimiter.generate_word_parts", "true")
|
||||
.put("analysis.filter.custom_word_delimiter.generate_number_parts", "false")
|
||||
.put("analysis.filter.custom_word_delimiter.catenate_numbers", "true")
|
||||
.put("analysis.filter.custom_word_delimiter.catenate_words", "false")
|
||||
.put("analysis.filter.custom_word_delimiter.split_on_case_change", "false")
|
||||
.put("analysis.filter.custom_word_delimiter.split_on_numerics", "false")
|
||||
.put("analysis.filter.custom_word_delimiter.stem_english_possessive", "false"))
|
||||
.addMapping("type1",
|
||||
"field1", "type=text,analyzer=my_analyzer",
|
||||
"field2", "type=text,analyzer=my_analyzer"));
|
||||
|
|
|
@ -1128,7 +1128,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
.addMapping("child", "_parent", "type=parent"));
|
||||
} else {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings("index.refresh_interval", -1)
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1))
|
||||
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
|
||||
}
|
||||
ensureGreen();
|
||||
|
@ -1511,7 +1511,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
.addMapping("child", "_parent", "type=parent"));
|
||||
} else {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings("index.refresh_interval", -1)
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1))
|
||||
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
|
||||
}
|
||||
ensureGreen();
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.reindex;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
@ -144,7 +145,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
}
|
||||
|
||||
public void testDeleteByQueryWithRouting() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings("number_of_shards", 2));
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.builder().put("number_of_shards", 2)));
|
||||
ensureGreen("test");
|
||||
|
||||
final int docs = randomIntBetween(2, 10);
|
||||
|
@ -313,7 +314,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
*/
|
||||
public void testFilterByType() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)); // allows for multiple types
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))); // allows for multiple types
|
||||
indexRandom(true,
|
||||
client().prepareIndex("test", "test1", "1").setSource("foo", "a"),
|
||||
client().prepareIndex("test", "test2", "2").setSource("foo", "a"),
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.reindex;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.join.ParentJoinPlugin;
|
||||
|
@ -113,7 +114,7 @@ public class ReindexParentChildTests extends ReindexTestCase {
|
|||
*/
|
||||
public void testScriptAddsParent() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("source")
|
||||
.setSettings("index.version.created", Version.V_5_6_0.id)); // allows for multiple types
|
||||
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))); // allows for multiple types
|
||||
|
||||
createParentChildIndex("dest");
|
||||
createParentChildDocs("source", false);
|
||||
|
@ -149,7 +150,7 @@ public class ReindexParentChildTests extends ReindexTestCase {
|
|||
*/
|
||||
private void createParentChildIndex(String indexName) throws Exception {
|
||||
CreateIndexRequestBuilder create = client().admin().indices().prepareCreate(indexName);
|
||||
create.setSettings("index.version.created", Version.V_5_6_0.id); // allows for multiple types
|
||||
create.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)); // allows for multiple types
|
||||
create.addMapping("city", "{\"_parent\": {\"type\": \"country\"}}", XContentType.JSON);
|
||||
create.addMapping("neighborhood", "{\"_parent\": {\"type\": \"city\"}}", XContentType.JSON);
|
||||
assertAcked(create);
|
||||
|
|
|
@ -99,7 +99,7 @@ public class URLSnapshotRestoreTests extends ESIntegTestCase {
|
|||
logger.info("--> create read-only URL repository");
|
||||
assertAcked(client.admin().cluster().preparePutRepository("url-repo")
|
||||
.setType(URLRepository.TYPE).setSettings(Settings.builder()
|
||||
.put(URLRepository.URL_SETTING.getKey(), repositoryLocation.toUri().toURL())
|
||||
.put(URLRepository.URL_SETTING.getKey(), repositoryLocation.toUri().toURL().toString())
|
||||
.put("list_directories", randomBoolean())));
|
||||
logger.info("--> restore index after deletion");
|
||||
RestoreSnapshotResponse restoreSnapshotResponse = client
|
||||
|
|
|
@ -95,7 +95,7 @@ public class IcuTokenizerFactoryTests extends ESTestCase {
|
|||
String json = "/org/elasticsearch/index/analysis/icu_analysis.json";
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.loadFromStream(json, IcuTokenizerFactoryTests.class.getResourceAsStream(json))
|
||||
.loadFromStream(json, IcuTokenizerFactoryTests.class.getResourceAsStream(json), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();
|
||||
|
|
|
@ -208,7 +208,7 @@ public class KuromojiAnalysisTests extends ESTestCase {
|
|||
String json = "/org/elasticsearch/index/analysis/kuromoji_analysis.json";
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.loadFromStream(json, KuromojiAnalysisTests.class.getResourceAsStream(json))
|
||||
.loadFromStream(json, KuromojiAnalysisTests.class.getResourceAsStream(json), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();
|
||||
|
|
|
@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
public class SimplePhoneticAnalysisTests extends ESTestCase {
|
||||
public void testPhoneticTokenFilterFactory() throws IOException {
|
||||
String yaml = "/org/elasticsearch/index/analysis/phonetic-1.yml";
|
||||
Settings settings = Settings.builder().loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
Settings settings = Settings.builder().loadFromStream(yaml, getClass().getResourceAsStream(yaml), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
TestAnalysis analysis = createTestAnalysis(new Index("test", "_na_"), settings, new AnalysisPhoneticPlugin());
|
||||
|
|
|
@ -39,7 +39,7 @@ public class AnalysisTestsHelper {
|
|||
final String resource,
|
||||
final AnalysisPlugin... plugins) throws IOException {
|
||||
final Settings settings = Settings.builder()
|
||||
.loadFromStream(resource, AnalysisTestsHelper.class.getResourceAsStream(resource))
|
||||
.loadFromStream(resource, AnalysisTestsHelper.class.getResourceAsStream(resource), false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), baseDir.toString())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -467,7 +467,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
private static Settings.Builder setRandomIndexMergeSettings(Random random, Settings.Builder builder) {
|
||||
if (random.nextBoolean()) {
|
||||
builder.put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(),
|
||||
random.nextBoolean() ? random.nextDouble() : random.nextBoolean());
|
||||
(random.nextBoolean() ? random.nextDouble() : random.nextBoolean()).toString());
|
||||
}
|
||||
switch (random.nextInt(4)) {
|
||||
case 3:
|
||||
|
|
Loading…
Reference in New Issue