Return List instead of an array from settings (#26903)
Today we return a `String[]` that requires copying values for every access. Yet, we already store the setting as a list so we can also directly return the unmodifiable list directly. This makes list / array access in settings a much cheaper operation especially if lists are large.
This commit is contained in:
parent
bf4c3642b2
commit
cdd7c1e6c2
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.cluster.routing.allocation.decider;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
|
@ -85,7 +86,7 @@ public class AwarenessAllocationDecider extends AllocationDecider {
|
|||
|
||||
private volatile String[] awarenessAttributes;
|
||||
|
||||
private volatile Map<String, String[]> forcedAwarenessAttributes;
|
||||
private volatile Map<String, List<String>> forcedAwarenessAttributes;
|
||||
|
||||
public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) {
|
||||
super(settings);
|
||||
|
@ -97,11 +98,11 @@ public class AwarenessAllocationDecider extends AllocationDecider {
|
|||
}
|
||||
|
||||
private void setForcedAwarenessAttributes(Settings forceSettings) {
|
||||
Map<String, String[]> forcedAwarenessAttributes = new HashMap<>();
|
||||
Map<String, List<String>> forcedAwarenessAttributes = new HashMap<>();
|
||||
Map<String, Settings> forceGroups = forceSettings.getAsGroups();
|
||||
for (Map.Entry<String, Settings> entry : forceGroups.entrySet()) {
|
||||
String[] aValues = entry.getValue().getAsArray("values");
|
||||
if (aValues.length > 0) {
|
||||
List<String> aValues = entry.getValue().getAsList("values");
|
||||
if (aValues.size() > 0) {
|
||||
forcedAwarenessAttributes.put(entry.getKey(), aValues);
|
||||
}
|
||||
}
|
||||
|
@ -169,7 +170,7 @@ public class AwarenessAllocationDecider extends AllocationDecider {
|
|||
}
|
||||
|
||||
int numberOfAttributes = nodesPerAttribute.size();
|
||||
String[] fullValues = forcedAwarenessAttributes.get(awarenessAttribute);
|
||||
List<String> fullValues = forcedAwarenessAttributes.get(awarenessAttribute);
|
||||
if (fullValues != null) {
|
||||
for (String fullValue : fullValues) {
|
||||
if (!shardPerAttribute.containsKey(fullValue)) {
|
||||
|
|
|
@ -804,14 +804,14 @@ public class Setting<T> implements ToXContentObject {
|
|||
|
||||
private ListSetting(String key, Function<Settings, List<String>> defaultStringValue, Function<String, List<T>> parser,
|
||||
Property... properties) {
|
||||
super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser,
|
||||
super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s)), parser,
|
||||
properties);
|
||||
this.defaultStringValue = defaultStringValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getRaw(Settings settings) {
|
||||
String[] array = settings.getAsArray(getKey(), null);
|
||||
List<String> array = settings.getAsList(getKey(), null);
|
||||
return array == null ? defaultValue.apply(settings) : arrayToParsableString(array);
|
||||
}
|
||||
|
||||
|
@ -823,11 +823,11 @@ public class Setting<T> implements ToXContentObject {
|
|||
@Override
|
||||
public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) {
|
||||
if (exists(source) == false) {
|
||||
String[] asArray = defaultSettings.getAsArray(getKey(), null);
|
||||
if (asArray == null) {
|
||||
builder.putArray(getKey(), defaultStringValue.apply(defaultSettings));
|
||||
List<String> asList = defaultSettings.getAsList(getKey(), null);
|
||||
if (asList == null) {
|
||||
builder.putList(getKey(), defaultStringValue.apply(defaultSettings));
|
||||
} else {
|
||||
builder.putArray(getKey(), asArray);
|
||||
builder.putList(getKey(), asList);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1087,7 +1087,7 @@ public class Setting<T> implements ToXContentObject {
|
|||
}
|
||||
}
|
||||
|
||||
private static String arrayToParsableString(String[] array) {
|
||||
private static String arrayToParsableString(List<String> array) {
|
||||
try {
|
||||
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
|
||||
builder.startArray();
|
||||
|
|
|
@ -366,48 +366,48 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
|
||||
/**
|
||||
* The values associated with a setting key as an array.
|
||||
* The values associated with a setting key as an immutable list.
|
||||
* <p>
|
||||
* It will also automatically load a comma separated list under the settingPrefix and merge with
|
||||
* the numbered format.
|
||||
*
|
||||
* @param key The setting prefix to load the array by
|
||||
* @return The setting array values
|
||||
* @param key The setting key to load the list by
|
||||
* @return The setting list values
|
||||
*/
|
||||
public String[] getAsArray(String key) throws SettingsException {
|
||||
return getAsArray(key, Strings.EMPTY_ARRAY, true);
|
||||
public List<String> getAsList(String key) throws SettingsException {
|
||||
return getAsList(key, Collections.emptyList());
|
||||
}
|
||||
|
||||
/**
|
||||
* The values associated with a setting key as an array.
|
||||
* The values associated with a setting key as an immutable list.
|
||||
* <p>
|
||||
* If commaDelimited is true, it will automatically load a comma separated list under the settingPrefix and merge with
|
||||
* the numbered format.
|
||||
*
|
||||
* @param key The setting key to load the array by
|
||||
* @return The setting array values
|
||||
* @param key The setting key to load the list by
|
||||
* @return The setting list values
|
||||
*/
|
||||
public String[] getAsArray(String key, String[] defaultArray) throws SettingsException {
|
||||
return getAsArray(key, defaultArray, true);
|
||||
public List<String> getAsList(String key, List<String> defaultValue) throws SettingsException {
|
||||
return getAsList(key, defaultValue, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* The values associated with a setting key as an array.
|
||||
* The values associated with a setting key as an immutable list.
|
||||
* <p>
|
||||
* It will also automatically load a comma separated list under the settingPrefix and merge with
|
||||
* the numbered format.
|
||||
*
|
||||
* @param key The setting key to load the array by
|
||||
* @param defaultArray The default array to use if no value is specified
|
||||
* @param key The setting key to load the list by
|
||||
* @param defaultValue The default value to use if no value is specified
|
||||
* @param commaDelimited Whether to try to parse a string as a comma-delimited value
|
||||
* @return The setting array values
|
||||
* @return The setting list values
|
||||
*/
|
||||
public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelimited) throws SettingsException {
|
||||
public List<String> getAsList(String key, List<String> defaultValue, Boolean commaDelimited) throws SettingsException {
|
||||
List<String> result = new ArrayList<>();
|
||||
final Object valueFromPrefix = settings.get(key);
|
||||
if (valueFromPrefix != null) {
|
||||
if (valueFromPrefix instanceof List) {
|
||||
result = ((List<String>) valueFromPrefix);
|
||||
return ((List<String>) valueFromPrefix); // it's already unmodifiable since the builder puts it as a such
|
||||
} else if (commaDelimited) {
|
||||
String[] strings = Strings.splitStringByCommaToArray(get(key));
|
||||
if (strings.length > 0) {
|
||||
|
@ -421,9 +421,9 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
|
||||
if (result.isEmpty()) {
|
||||
return defaultArray;
|
||||
return defaultValue;
|
||||
}
|
||||
return result.toArray(new String[result.size()]);
|
||||
return Collections.unmodifiableList(result);
|
||||
}
|
||||
|
||||
|
||||
|
@ -552,7 +552,7 @@ public final class Settings implements ToXContentFragment {
|
|||
if (value == null) {
|
||||
builder.putNull(key);
|
||||
} else if (value instanceof List) {
|
||||
builder.putArray(key, (List<String>) value);
|
||||
builder.putList(key, (List<String>) value);
|
||||
} else {
|
||||
builder.put(key, value.toString());
|
||||
}
|
||||
|
@ -679,7 +679,7 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
String key = keyBuilder.toString();
|
||||
validateValue(key, list, builder, parser, allowNullValues);
|
||||
builder.putArray(key, list);
|
||||
builder.putList(key, list);
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
String key = keyBuilder.toString();
|
||||
validateValue(key, null, builder, parser, allowNullValues);
|
||||
|
@ -898,7 +898,7 @@ public final class Settings implements ToXContentFragment {
|
|||
}
|
||||
final Object value = source.settings.get(sourceKey);
|
||||
if (value instanceof List) {
|
||||
return putArray(key, (List)value);
|
||||
return putList(key, (List)value);
|
||||
} else if (value == null) {
|
||||
return putNull(key);
|
||||
} else {
|
||||
|
@ -1022,8 +1022,8 @@ public final class Settings implements ToXContentFragment {
|
|||
* @param values The values
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putArray(String setting, String... values) {
|
||||
return putArray(setting, Arrays.asList(values));
|
||||
public Builder putList(String setting, String... values) {
|
||||
return putList(setting, Arrays.asList(values));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1033,7 +1033,7 @@ public final class Settings implements ToXContentFragment {
|
|||
* @param values The values
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putArray(String setting, List<String> values) {
|
||||
public Builder putList(String setting, List<String> values) {
|
||||
remove(setting);
|
||||
map.put(setting, Collections.unmodifiableList(new ArrayList<>(values)));
|
||||
return this;
|
||||
|
|
|
@ -153,7 +153,7 @@ public class Environment {
|
|||
Settings.Builder finalSettings = Settings.builder().put(settings);
|
||||
finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile);
|
||||
if (PATH_DATA_SETTING.exists(settings)) {
|
||||
finalSettings.putArray(PATH_DATA_SETTING.getKey(), dataPaths);
|
||||
finalSettings.putList(PATH_DATA_SETTING.getKey(), dataPaths);
|
||||
}
|
||||
finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString());
|
||||
this.settings = finalSettings.build();
|
||||
|
|
|
@ -68,7 +68,6 @@ import java.nio.charset.StandardCharsets;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -105,10 +104,10 @@ public class Analysis {
|
|||
if ("_none_".equals(value)) {
|
||||
return CharArraySet.EMPTY_SET;
|
||||
}
|
||||
String[] stemExclusion = settings.getAsArray("stem_exclusion", null);
|
||||
List<String> stemExclusion = settings.getAsList("stem_exclusion", null);
|
||||
if (stemExclusion != null) {
|
||||
// LUCENE 4 UPGRADE: Should be settings.getAsBoolean("stem_exclusion_case", false)?
|
||||
return new CharArraySet(Arrays.asList(stemExclusion), false);
|
||||
return new CharArraySet(stemExclusion, false);
|
||||
} else {
|
||||
return defaultStemExclusion;
|
||||
}
|
||||
|
@ -161,7 +160,7 @@ public class Analysis {
|
|||
if ("_none_".equals(value)) {
|
||||
return CharArraySet.EMPTY_SET;
|
||||
} else {
|
||||
return resolveNamedWords(Arrays.asList(settings.getAsArray(name)), namedWords, ignoreCase);
|
||||
return resolveNamedWords(settings.getAsList(name), namedWords, ignoreCase);
|
||||
}
|
||||
}
|
||||
List<String> pathLoadedWords = getWordList(env, settings, name);
|
||||
|
@ -225,11 +224,11 @@ public class Analysis {
|
|||
String wordListPath = settings.get(settingPrefix + "_path", null);
|
||||
|
||||
if (wordListPath == null) {
|
||||
String[] explicitWordList = settings.getAsArray(settingPrefix, null);
|
||||
List<String> explicitWordList = settings.getAsList(settingPrefix, null);
|
||||
if (explicitWordList == null) {
|
||||
return null;
|
||||
} else {
|
||||
return Arrays.asList(explicitWordList);
|
||||
return explicitWordList;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -58,8 +58,8 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
|
|||
throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name [" + tokenizerName + "]");
|
||||
}
|
||||
|
||||
String[] charFilterNames = analyzerSettings.getAsArray("char_filter");
|
||||
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.length);
|
||||
List<String> charFilterNames = analyzerSettings.getAsList("char_filter");
|
||||
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.size());
|
||||
for (String charFilterName : charFilterNames) {
|
||||
CharFilterFactory charFilter = charFilters.get(charFilterName);
|
||||
if (charFilter == null) {
|
||||
|
@ -74,8 +74,8 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
|
|||
|
||||
int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);
|
||||
|
||||
String[] tokenFilterNames = analyzerSettings.getAsArray("filter");
|
||||
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.length);
|
||||
List<String> tokenFilterNames = analyzerSettings.getAsList("filter");
|
||||
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.size());
|
||||
for (String tokenFilterName : tokenFilterNames) {
|
||||
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
|
||||
if (tokenFilter == null) {
|
||||
|
|
|
@ -50,8 +50,8 @@ public final class CustomNormalizerProvider extends AbstractIndexAnalyzerProvide
|
|||
throw new IllegalArgumentException("Custom normalizer [" + name() + "] cannot configure a tokenizer");
|
||||
}
|
||||
|
||||
String[] charFilterNames = analyzerSettings.getAsArray("char_filter");
|
||||
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.length);
|
||||
List<String> charFilterNames = analyzerSettings.getAsList("char_filter");
|
||||
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.size());
|
||||
for (String charFilterName : charFilterNames) {
|
||||
CharFilterFactory charFilter = charFilters.get(charFilterName);
|
||||
if (charFilter == null) {
|
||||
|
@ -66,8 +66,8 @@ public final class CustomNormalizerProvider extends AbstractIndexAnalyzerProvide
|
|||
charFiltersList.add(charFilter);
|
||||
}
|
||||
|
||||
String[] tokenFilterNames = analyzerSettings.getAsArray("filter");
|
||||
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.length);
|
||||
List<String> tokenFilterNames = analyzerSettings.getAsList("filter");
|
||||
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.size());
|
||||
for (String tokenFilterName : tokenFilterNames) {
|
||||
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
|
||||
if (tokenFilter == null) {
|
||||
|
|
|
@ -41,7 +41,7 @@ public class EdgeNGramTokenizerFactory extends AbstractTokenizerFactory {
|
|||
super(indexSettings, name, settings);
|
||||
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
|
||||
this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
|
||||
this.matcher = parseTokenChars(settings.getAsList("token_chars"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.index.IndexSettings;
|
|||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -65,8 +66,8 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
|
|||
MATCHERS = unmodifiableMap(matchers);
|
||||
}
|
||||
|
||||
static CharMatcher parseTokenChars(String[] characterClasses) {
|
||||
if (characterClasses == null || characterClasses.length == 0) {
|
||||
static CharMatcher parseTokenChars(List<String> characterClasses) {
|
||||
if (characterClasses == null || characterClasses.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
CharMatcher.Builder builder = new CharMatcher.Builder();
|
||||
|
@ -85,7 +86,7 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
|
|||
super(indexSettings, name, settings);
|
||||
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
|
||||
this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
|
||||
this.matcher = parseTokenChars(settings.getAsList("token_chars"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -62,7 +62,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
|
||||
protected Reader getRulesFromSettings(Environment env) {
|
||||
Reader rulesReader;
|
||||
if (settings.getAsArray("synonyms", null) != null) {
|
||||
if (settings.getAsList("synonyms", null) != null) {
|
||||
List<String> rulesList = Analysis.getWordList(env, settings, "synonyms");
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (String line : rulesList) {
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.elasticsearch.indices.analysis.AnalysisModule;
|
|||
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModuleTests.AppendCharFilter;
|
||||
import org.elasticsearch.plugins.AnalysisPlugin;
|
||||
import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
|
||||
|
@ -73,7 +72,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.custom_analyzer.filter", "mock")
|
||||
.put("index.analysis.normalizer.my_normalizer.type", "custom")
|
||||
.putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase").build();
|
||||
.putList("index.analysis.normalizer.my_normalizer.filter", "lowercase").build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
|
||||
environment = new Environment(settings);
|
||||
AnalysisPlugin plugin = new AnalysisPlugin() {
|
||||
|
|
|
@ -210,7 +210,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase {
|
|||
Settings.Builder settings = Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.tv_test.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.tv_test.filter", "lowercase");
|
||||
.putList("index.analysis.analyzer.tv_test.filter", "lowercase");
|
||||
assertAcked(prepareCreate(index).addMapping("type1", mappingBuilder).setSettings(settings).addAlias(new Alias(alias)));
|
||||
}
|
||||
|
||||
|
|
|
@ -189,7 +189,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
.setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
|
||||
.putList("index.analysis.analyzer.tv_test.filter", "lowercase")));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
client().prepareIndex("test", "type1", Integer.toString(i))
|
||||
.setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
|
||||
|
@ -261,7 +261,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
assertAcked(prepareCreate("test").addMapping("type1", mapping)
|
||||
.setSettings(Settings.builder()
|
||||
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
|
||||
.putList("index.analysis.analyzer.tv_test.filter", "lowercase")));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
client().prepareIndex("test", "type1", Integer.toString(i))
|
||||
.setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
|
||||
|
@ -395,7 +395,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
.setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
|
||||
.putList("index.analysis.analyzer.tv_test.filter", "lowercase")));
|
||||
|
||||
ensureGreen();
|
||||
|
||||
|
|
|
@ -152,7 +152,7 @@ public class GetTermVectorsTests extends ESSingleNodeTestCase {
|
|||
.field("analyzer", "payload_test").endObject().endObject().endObject().endObject();
|
||||
Settings setting = Settings.builder()
|
||||
.put("index.analysis.analyzer.payload_test.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter")
|
||||
.putList("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter")
|
||||
.put("index.analysis.filter.my_delimited_payload_filter.delimiter", delimiter)
|
||||
.put("index.analysis.filter.my_delimited_payload_filter.encoding", encodingString)
|
||||
.put("index.analysis.filter.my_delimited_payload_filter.type", "mock_payload_filter").build();
|
||||
|
|
|
@ -34,6 +34,8 @@ import org.elasticsearch.indices.recovery.RecoverySettings;
|
|||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.junit.After;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -242,11 +244,11 @@ public class ClusterSettingsIT extends ESIntegTestCase {
|
|||
public void testCanUpdateTracerSettings() {
|
||||
ClusterUpdateSettingsResponse clusterUpdateSettingsResponse = client().admin().cluster()
|
||||
.prepareUpdateSettings()
|
||||
.setTransientSettings(Settings.builder().putArray("transport.tracer.include", "internal:index/shard/recovery/*",
|
||||
.setTransientSettings(Settings.builder().putList("transport.tracer.include", "internal:index/shard/recovery/*",
|
||||
"internal:gateway/local*"))
|
||||
.get();
|
||||
assertArrayEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsArray("transport.tracer.include"), new String[] {"internal:index/shard/recovery/*",
|
||||
"internal:gateway/local*"});
|
||||
assertEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsList("transport.tracer.include"),
|
||||
Arrays.asList("internal:index/shard/recovery/*", "internal:gateway/local*"));
|
||||
}
|
||||
|
||||
public void testUpdateDiscoveryPublishTimeout() {
|
||||
|
|
|
@ -179,8 +179,8 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
service.applySettings(Settings.builder()
|
||||
.put("foo.test.bar", 2)
|
||||
.put("foo.test_1.bar", 7)
|
||||
.putArray("foo.test_list.list", "16", "17")
|
||||
.putArray("foo.test_list_1.list", "18", "19", "20")
|
||||
.putList("foo.test_list.list", "16", "17")
|
||||
.putList("foo.test_list_1.list", "18", "19", "20")
|
||||
.build());
|
||||
assertEquals(2, intResults.get("test").intValue());
|
||||
assertEquals(7, intResults.get("test_1").intValue());
|
||||
|
@ -195,7 +195,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
service.applySettings(Settings.builder()
|
||||
.put("foo.test.bar", 2)
|
||||
.put("foo.test_1.bar", 8)
|
||||
.putArray("foo.test_list.list", "16", "17")
|
||||
.putList("foo.test_list.list", "16", "17")
|
||||
.putNull("foo.test_list_1.list")
|
||||
.build());
|
||||
assertNull("test wasn't changed", intResults.get("test"));
|
||||
|
@ -231,8 +231,8 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
service.applySettings(Settings.builder()
|
||||
.put("foo.test.bar", 2)
|
||||
.put("foo.test_1.bar", 7)
|
||||
.putArray("foo.test_list.list", "16", "17")
|
||||
.putArray("foo.test_list_1.list", "18", "19", "20")
|
||||
.putList("foo.test_list.list", "16", "17")
|
||||
.putList("foo.test_list_1.list", "18", "19", "20")
|
||||
.build());
|
||||
assertEquals(2, intResults.get("test").intValue());
|
||||
assertEquals(7, intResults.get("test_1").intValue());
|
||||
|
@ -247,7 +247,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
service.applySettings(Settings.builder()
|
||||
.put("foo.test.bar", 2)
|
||||
.put("foo.test_1.bar", 8)
|
||||
.putArray("foo.test_list.list", "16", "17")
|
||||
.putList("foo.test_list.list", "16", "17")
|
||||
.putNull("foo.test_list_1.list")
|
||||
.build());
|
||||
assertNull("test wasn't changed", intResults.get("test"));
|
||||
|
@ -470,14 +470,14 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY);
|
||||
assertEquals(2, diff.size());
|
||||
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
|
||||
assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"});
|
||||
assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c"));
|
||||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("foo.bar", 5).build(),
|
||||
Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build());
|
||||
Settings.builder().put("foo.bar.baz", 17).putList("foo.bar.quux", "d", "e", "f").build());
|
||||
assertEquals(2, diff.size());
|
||||
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17));
|
||||
assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"});
|
||||
assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("d", "e", "f"));
|
||||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("some.group.foo", 5).build(),
|
||||
|
@ -485,7 +485,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
assertEquals(4, diff.size());
|
||||
assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17));
|
||||
assertNull(diff.get("some.group.foo"));
|
||||
assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"});
|
||||
assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c"));
|
||||
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
|
||||
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
|
||||
|
||||
|
@ -495,7 +495,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
assertEquals(4, diff.size());
|
||||
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
|
||||
assertNull(diff.get("some.prefix.foo.somekey"));
|
||||
assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"});
|
||||
assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c"));
|
||||
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
|
||||
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
|
||||
}
|
||||
|
@ -513,14 +513,14 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY);
|
||||
assertEquals(1, diff.size());
|
||||
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
|
||||
assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys
|
||||
assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys
|
||||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("foo.bar", 5).build(),
|
||||
Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build());
|
||||
Settings.builder().put("foo.bar.baz", 17).putList("foo.bar.quux", "d", "e", "f").build());
|
||||
assertEquals(2, diff.size());
|
||||
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17));
|
||||
assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"});
|
||||
assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("d", "e", "f"));
|
||||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("some.group.foo", 5).build(),
|
||||
|
@ -528,7 +528,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
assertEquals(3, diff.size());
|
||||
assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17));
|
||||
assertNull(diff.get("some.group.foo"));
|
||||
assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys
|
||||
assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys
|
||||
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
|
||||
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
|
||||
|
||||
|
@ -538,21 +538,21 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
assertEquals(3, diff.size());
|
||||
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
|
||||
assertNull(diff.get("some.prefix.foo.somekey"));
|
||||
assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys
|
||||
assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys
|
||||
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
|
||||
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
|
||||
|
||||
diff = settings.diff(
|
||||
Settings.builder().put("some.prefix.foo.somekey", 5).build(),
|
||||
Settings.builder().put("some.prefix.foobar.somekey", 17).put("some.prefix.foo.somekey", 18)
|
||||
.putArray("foo.bar.quux", "x", "y", "z")
|
||||
.putArray("foo.baz.quux", "d", "e", "f")
|
||||
.putList("foo.bar.quux", "x", "y", "z")
|
||||
.putList("foo.baz.quux", "d", "e", "f")
|
||||
.build());
|
||||
assertEquals(5, diff.size());
|
||||
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
|
||||
assertNull(diff.get("some.prefix.foo.somekey"));
|
||||
assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"x", "y", "z"});
|
||||
assertArrayEquals(diff.getAsArray("foo.baz.quux", null), new String[] {"d", "e", "f"});
|
||||
assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("x", "y", "z"));
|
||||
assertEquals(diff.getAsList("foo.baz.quux", null), Arrays.asList("d", "e", "f"));
|
||||
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
|
||||
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
|
||||
}
|
||||
|
@ -562,7 +562,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
AtomicReference<List<String>> ref = new AtomicReference<>();
|
||||
settings.addSettingsUpdateConsumer(TransportService.TRACE_LOG_INCLUDE_SETTING, ref::set);
|
||||
settings.applySettings(Settings.builder()
|
||||
.putArray("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build());
|
||||
.putList("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build());
|
||||
assertNotNull(ref.get().size());
|
||||
assertEquals(ref.get().size(), 2);
|
||||
assertTrue(ref.get().contains("internal:index/shard/recovery/*"));
|
||||
|
|
|
@ -441,7 +441,7 @@ public class SettingTests extends ESTestCase {
|
|||
assertEquals("foo,bar", value.get(0));
|
||||
|
||||
List<String> input = Arrays.asList("test", "test1, test2", "test", ",,,,");
|
||||
Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0]));
|
||||
Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0]));
|
||||
assertTrue(listSetting.exists(builder.build()));
|
||||
value = listSetting.get(builder.build());
|
||||
assertEquals(input.size(), value.size());
|
||||
|
@ -464,11 +464,11 @@ public class SettingTests extends ESTestCase {
|
|||
assertEquals(input.size(), ref.get().size());
|
||||
assertArrayEquals(ref.get().toArray(new String[0]), input.toArray(new String[0]));
|
||||
|
||||
settingUpdater.apply(Settings.builder().putArray("foo.bar", "123").build(), builder.build());
|
||||
settingUpdater.apply(Settings.builder().putList("foo.bar", "123").build(), builder.build());
|
||||
assertEquals(1, ref.get().size());
|
||||
assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"123"});
|
||||
|
||||
settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putArray("foo.bar", "123").build());
|
||||
settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putList("foo.bar", "123").build());
|
||||
assertEquals(3, ref.get().size());
|
||||
assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"1", "2", "3"});
|
||||
|
||||
|
@ -492,17 +492,17 @@ public class SettingTests extends ESTestCase {
|
|||
assertEquals(1, value.size());
|
||||
assertEquals("foo,bar", value.get(0));
|
||||
|
||||
value = settingWithFallback.get(Settings.builder().putArray("foo.bar", "1", "2").build());
|
||||
value = settingWithFallback.get(Settings.builder().putList("foo.bar", "1", "2").build());
|
||||
assertEquals(2, value.size());
|
||||
assertEquals("1", value.get(0));
|
||||
assertEquals("2", value.get(1));
|
||||
|
||||
value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").build());
|
||||
value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").build());
|
||||
assertEquals(2, value.size());
|
||||
assertEquals("3", value.get(0));
|
||||
assertEquals("4", value.get(1));
|
||||
|
||||
value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").putArray("foo.bar", "1", "2").build());
|
||||
value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").putList("foo.bar", "1", "2").build());
|
||||
assertEquals(2, value.size());
|
||||
assertEquals("3", value.get(0));
|
||||
assertEquals("4", value.get(1));
|
||||
|
@ -512,7 +512,7 @@ public class SettingTests extends ESTestCase {
|
|||
Setting<List<String>> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(),
|
||||
Property.Dynamic, Property.NodeScope);
|
||||
List<String> input = Arrays.asList("test", "test1, test2", "test", ",,,,");
|
||||
Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0]));
|
||||
Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0]));
|
||||
// try to parse this really annoying format
|
||||
for (String key : builder.keys()) {
|
||||
assertTrue("key: " + key + " doesn't match", listSetting.match(key));
|
||||
|
@ -601,11 +601,11 @@ public class SettingTests extends ESTestCase {
|
|||
(key) -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope));
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.putArray("foo.1.bar", "1", "2")
|
||||
.putArray("foo.2.bar", "3", "4", "5")
|
||||
.putArray("foo.bar", "6")
|
||||
.putArray("some.other", "6")
|
||||
.putArray("foo.3.bar", "6")
|
||||
.putList("foo.1.bar", "1", "2")
|
||||
.putList("foo.2.bar", "3", "4", "5")
|
||||
.putList("foo.bar", "6")
|
||||
.putList("some.other", "6")
|
||||
.putList("foo.3.bar", "6")
|
||||
.build();
|
||||
Stream<Setting<List<String>>> allConcreteSettings = listAffixSetting.getAllConcreteSettings(settings);
|
||||
Map<String, List<String>> collect = allConcreteSettings.collect(Collectors.toMap(Setting::getKey, (s) -> s.get(settings)));
|
||||
|
|
|
@ -35,11 +35,10 @@ import org.hamcrest.CoreMatchers;
|
|||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.StringBufferInputStream;
|
||||
import java.io.StringReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
|
@ -47,7 +46,7 @@ import java.util.Map;
|
|||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.arrayContaining;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -165,99 +164,99 @@ public class SettingsTests extends ESTestCase {
|
|||
public void testThatArraysAreOverriddenCorrectly() throws IOException {
|
||||
// overriding a single value with an array
|
||||
Settings settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value", "1").build())
|
||||
.put(Settings.builder().putArray("value", "2", "3").build())
|
||||
.put(Settings.builder().putList("value", "1").build())
|
||||
.put(Settings.builder().putList("value", "2", "3").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
|
||||
assertThat(settings.getAsList("value"), contains("2", "3"));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().put("value", "1").build())
|
||||
.put(Settings.builder().putArray("value", "2", "3").build())
|
||||
.put(Settings.builder().putList("value", "2", "3").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
|
||||
assertThat(settings.getAsList("value"), contains("2", "3"));
|
||||
settings = Settings.builder().loadFromSource("value: 1", XContentType.YAML)
|
||||
.loadFromSource("value: [ 2, 3 ]", XContentType.YAML)
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
|
||||
assertThat(settings.getAsList("value"), contains("2", "3"));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().put("value.with.deep.key", "1").build())
|
||||
.put(Settings.builder().putArray("value.with.deep.key", "2", "3").build())
|
||||
.put(Settings.builder().putList("value.with.deep.key", "2", "3").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value.with.deep.key"), arrayContaining("2", "3"));
|
||||
assertThat(settings.getAsList("value.with.deep.key"), contains("2", "3"));
|
||||
|
||||
// overriding an array with a shorter array
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value", "1", "2").build())
|
||||
.put(Settings.builder().putArray("value", "3").build())
|
||||
.put(Settings.builder().putList("value", "1", "2").build())
|
||||
.put(Settings.builder().putList("value", "3").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("3"));
|
||||
assertThat(settings.getAsList("value"), contains("3"));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value", "1", "2", "3").build())
|
||||
.put(Settings.builder().putArray("value", "4", "5").build())
|
||||
.put(Settings.builder().putList("value", "1", "2", "3").build())
|
||||
.put(Settings.builder().putList("value", "4", "5").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("4", "5"));
|
||||
assertThat(settings.getAsList("value"), contains("4", "5"));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value.deep.key", "1", "2", "3").build())
|
||||
.put(Settings.builder().putArray("value.deep.key", "4", "5").build())
|
||||
.put(Settings.builder().putList("value.deep.key", "1", "2", "3").build())
|
||||
.put(Settings.builder().putList("value.deep.key", "4", "5").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5"));
|
||||
assertThat(settings.getAsList("value.deep.key"), contains("4", "5"));
|
||||
|
||||
// overriding an array with a longer array
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value", "1", "2").build())
|
||||
.put(Settings.builder().putArray("value", "3", "4", "5").build())
|
||||
.put(Settings.builder().putList("value", "1", "2").build())
|
||||
.put(Settings.builder().putList("value", "3", "4", "5").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("3", "4", "5"));
|
||||
assertThat(settings.getAsList("value"), contains("3", "4", "5"));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value.deep.key", "1", "2", "3").build())
|
||||
.put(Settings.builder().putArray("value.deep.key", "4", "5").build())
|
||||
.put(Settings.builder().putList("value.deep.key", "1", "2", "3").build())
|
||||
.put(Settings.builder().putList("value.deep.key", "4", "5").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5"));
|
||||
assertThat(settings.getAsList("value.deep.key"), contains("4", "5"));
|
||||
|
||||
// overriding an array with a single value
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value", "1", "2").build())
|
||||
.put(Settings.builder().putList("value", "1", "2").build())
|
||||
.put(Settings.builder().put("value", "3").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("3"));
|
||||
assertThat(settings.getAsList("value"), contains("3"));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value.deep.key", "1", "2").build())
|
||||
.put(Settings.builder().putList("value.deep.key", "1", "2").build())
|
||||
.put(Settings.builder().put("value.deep.key", "3").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value.deep.key"), arrayContaining("3"));
|
||||
assertThat(settings.getAsList("value.deep.key"), contains("3"));
|
||||
|
||||
// test that other arrays are not overridden
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value", "1", "2", "3").putArray("a", "b", "c").build())
|
||||
.put(Settings.builder().putArray("value", "4", "5").putArray("d", "e", "f").build())
|
||||
.put(Settings.builder().putList("value", "1", "2", "3").putList("a", "b", "c").build())
|
||||
.put(Settings.builder().putList("value", "4", "5").putList("d", "e", "f").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("4", "5"));
|
||||
assertThat(settings.getAsArray("a"), arrayContaining("b", "c"));
|
||||
assertThat(settings.getAsArray("d"), arrayContaining("e", "f"));
|
||||
assertThat(settings.getAsList("value"), contains("4", "5"));
|
||||
assertThat(settings.getAsList("a"), contains("b", "c"));
|
||||
assertThat(settings.getAsList("d"), contains("e", "f"));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value.deep.key", "1", "2", "3").putArray("a", "b", "c").build())
|
||||
.put(Settings.builder().putArray("value.deep.key", "4", "5").putArray("d", "e", "f").build())
|
||||
.put(Settings.builder().putList("value.deep.key", "1", "2", "3").putList("a", "b", "c").build())
|
||||
.put(Settings.builder().putList("value.deep.key", "4", "5").putList("d", "e", "f").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5"));
|
||||
assertThat(settings.getAsArray("a"), notNullValue());
|
||||
assertThat(settings.getAsArray("d"), notNullValue());
|
||||
assertThat(settings.getAsList("value.deep.key"), contains("4", "5"));
|
||||
assertThat(settings.getAsList("a"), notNullValue());
|
||||
assertThat(settings.getAsList("d"), notNullValue());
|
||||
|
||||
// overriding a deeper structure with an array
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().put("value.data", "1").build())
|
||||
.put(Settings.builder().putArray("value", "4", "5").build())
|
||||
.put(Settings.builder().putList("value", "4", "5").build())
|
||||
.build();
|
||||
assertThat(settings.getAsArray("value"), arrayContaining("4", "5"));
|
||||
assertThat(settings.getAsList("value"), contains("4", "5"));
|
||||
|
||||
// overriding an array with a deeper structure
|
||||
settings = Settings.builder()
|
||||
.put(Settings.builder().putArray("value", "4", "5").build())
|
||||
.put(Settings.builder().putList("value", "4", "5").build())
|
||||
.put(Settings.builder().put("value.data", "1").build())
|
||||
.build();
|
||||
assertThat(settings.get("value.data"), is("1"));
|
||||
|
@ -477,7 +476,7 @@ public class SettingsTests extends ESTestCase {
|
|||
Settings.Builder builder = Settings.builder();
|
||||
builder.put("test.key1.baz", "blah1");
|
||||
builder.putNull("test.key3.bar");
|
||||
builder.putArray("test.key4.foo", "1", "2");
|
||||
builder.putList("test.key4.foo", "1", "2");
|
||||
builder.setSecureSettings(secureSettings);
|
||||
assertEquals(7, builder.build().size());
|
||||
Settings.writeSettingsToStream(builder.build(), out);
|
||||
|
@ -487,7 +486,7 @@ public class SettingsTests extends ESTestCase {
|
|||
assertEquals("blah1", settings.get("test.key1.baz"));
|
||||
assertNull(settings.get("test.key3.bar"));
|
||||
assertTrue(settings.keySet().contains("test.key3.bar"));
|
||||
assertArrayEquals(new String[] {"1", "2"}, settings.getAsArray("test.key4.foo"));
|
||||
assertEquals(Arrays.asList("1", "2"), settings.getAsList("test.key4.foo"));
|
||||
}
|
||||
|
||||
public void testSecureSettingConflict() {
|
||||
|
@ -508,7 +507,7 @@ public class SettingsTests extends ESTestCase {
|
|||
|
||||
public void testToAndFromXContent() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.putArray("foo.bar.baz", "1", "2", "3")
|
||||
.putList("foo.bar.baz", "1", "2", "3")
|
||||
.put("foo.foobar", 2)
|
||||
.put("rootfoo", "test")
|
||||
.put("foo.baz", "1,2,3,4")
|
||||
|
@ -522,7 +521,7 @@ public class SettingsTests extends ESTestCase {
|
|||
XContentParser parser = createParser(builder);
|
||||
Settings build = Settings.fromXContent(parser);
|
||||
assertEquals(5, build.size());
|
||||
assertArrayEquals(new String[] {"1", "2", "3"}, build.getAsArray("foo.bar.baz"));
|
||||
assertEquals(Arrays.asList("1", "2", "3"), build.getAsList("foo.bar.baz"));
|
||||
assertEquals(2, build.getAsInt("foo.foobar", 0).intValue());
|
||||
assertEquals("test", build.get("rootfoo"));
|
||||
assertEquals("1,2,3,4", build.get("foo.baz"));
|
||||
|
@ -542,9 +541,9 @@ public class SettingsTests extends ESTestCase {
|
|||
// check array
|
||||
assertNull(settings.get("test1.test3.0"));
|
||||
assertNull(settings.get("test1.test3.1"));
|
||||
assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
|
||||
assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
|
||||
assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
|
||||
assertThat(settings.getAsList("test1.test3").size(), equalTo(2));
|
||||
assertThat(settings.getAsList("test1.test3").get(0), equalTo("test3-1"));
|
||||
assertThat(settings.getAsList("test1.test3").get(1), equalTo("test3-2"));
|
||||
}
|
||||
|
||||
public void testDuplicateKeysThrowsException() {
|
||||
|
@ -575,14 +574,14 @@ public class SettingsTests extends ESTestCase {
|
|||
|
||||
public void testToXContent() throws IOException {
|
||||
// this is just terrible but it's the existing behavior!
|
||||
Settings test = Settings.builder().putArray("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build();
|
||||
Settings test = Settings.builder().putList("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build();
|
||||
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
|
||||
builder.startObject();
|
||||
test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap()));
|
||||
builder.endObject();
|
||||
assertEquals("{\"foo\":{\"bar.baz\":\"test\",\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string());
|
||||
|
||||
test = Settings.builder().putArray("foo.bar", "1", "2", "3").build();
|
||||
test = Settings.builder().putList("foo.bar", "1", "2", "3").build();
|
||||
builder = XContentBuilder.builder(XContentType.JSON.xContent());
|
||||
builder.startObject();
|
||||
test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap()));
|
||||
|
@ -615,18 +614,18 @@ public class SettingsTests extends ESTestCase {
|
|||
// check array
|
||||
assertNull(settings.get("test1.test3.0"));
|
||||
assertNull(settings.get("test1.test3.1"));
|
||||
assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
|
||||
assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
|
||||
assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
|
||||
assertThat(settings.getAsList("test1.test3").size(), equalTo(2));
|
||||
assertThat(settings.getAsList("test1.test3").get(0), equalTo("test3-1"));
|
||||
assertThat(settings.getAsList("test1.test3").get(1), equalTo("test3-2"));
|
||||
}
|
||||
|
||||
public void testYamlLegacyList() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.loadFromStream("foo.yml", new ByteArrayInputStream("foo.bar.baz.0: 1\nfoo.bar.baz.1: 2".getBytes(StandardCharsets.UTF_8)),
|
||||
false).build();
|
||||
assertThat(settings.getAsArray("foo.bar.baz").length, equalTo(2));
|
||||
assertThat(settings.getAsArray("foo.bar.baz")[0], equalTo("1"));
|
||||
assertThat(settings.getAsArray("foo.bar.baz")[1], equalTo("2"));
|
||||
assertThat(settings.getAsList("foo.bar.baz").size(), equalTo(2));
|
||||
assertThat(settings.getAsList("foo.bar.baz").get(0), equalTo("1"));
|
||||
assertThat(settings.getAsList("foo.bar.baz").get(1), equalTo("2"));
|
||||
}
|
||||
|
||||
public void testIndentation() throws Exception {
|
||||
|
@ -675,14 +674,14 @@ public class SettingsTests extends ESTestCase {
|
|||
in.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0));
|
||||
Settings settings = Settings.readSettingsFromStream(in);
|
||||
assertEquals(2, settings.size());
|
||||
assertArrayEquals(new String[]{"0", "1", "2", "3"}, settings.getAsArray("foo.bar"));
|
||||
assertEquals(Arrays.asList("0", "1", "2", "3"), settings.getAsList("foo.bar"));
|
||||
assertEquals("baz", settings.get("foo.bar.baz"));
|
||||
}
|
||||
|
||||
public void testWriteLegacyOutput() throws IOException {
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0));
|
||||
Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3")
|
||||
Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3")
|
||||
.put("foo.bar.baz", "baz").putNull("foo.null").build();
|
||||
Settings.writeSettingsToStream(settings, output);
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
|
||||
|
@ -703,7 +702,7 @@ public class SettingsTests extends ESTestCase {
|
|||
in.setVersion(output.getVersion());
|
||||
Settings readSettings = Settings.readSettingsFromStream(in);
|
||||
assertEquals(3, readSettings.size());
|
||||
assertArrayEquals(new String[] {"0", "1", "2", "3"}, readSettings.getAsArray("foo.bar"));
|
||||
assertEquals(Arrays.asList("0", "1", "2", "3"), readSettings.getAsList("foo.bar"));
|
||||
assertEquals(readSettings.get("foo.bar.baz"), "baz");
|
||||
assertTrue(readSettings.keySet().contains("foo.null"));
|
||||
assertNull(readSettings.get("foo.null"));
|
||||
|
@ -712,18 +711,18 @@ public class SettingsTests extends ESTestCase {
|
|||
public void testReadWriteArray() throws IOException {
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.setVersion(randomFrom(Version.CURRENT, Version.V_6_1_0));
|
||||
Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build();
|
||||
Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build();
|
||||
Settings.writeSettingsToStream(settings, output);
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
|
||||
Settings build = Settings.readSettingsFromStream(in);
|
||||
assertEquals(2, build.size());
|
||||
assertArrayEquals(build.getAsArray("foo.bar"), new String[] {"0", "1", "2", "3"});
|
||||
assertEquals(build.getAsList("foo.bar"), Arrays.asList("0", "1", "2", "3"));
|
||||
assertEquals(build.get("foo.bar.baz"), "baz");
|
||||
}
|
||||
|
||||
public void testCopy() {
|
||||
Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("test").build();
|
||||
assertArrayEquals(new String[] {"0", "1", "2", "3"}, Settings.builder().copy("foo.bar", settings).build().getAsArray("foo.bar"));
|
||||
Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("test").build();
|
||||
assertEquals(Arrays.asList("0", "1", "2", "3"), Settings.builder().copy("foo.bar", settings).build().getAsList("foo.bar"));
|
||||
assertEquals("baz", Settings.builder().copy("foo.bar.baz", settings).build().get("foo.bar.baz"));
|
||||
assertNull(Settings.builder().copy("foo.bar.baz", settings).build().get("test"));
|
||||
assertTrue(Settings.builder().copy("test", settings).build().keySet().contains("test"));
|
||||
|
|
|
@ -59,7 +59,6 @@ import java.util.concurrent.CountDownLatch;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
|
||||
|
@ -137,7 +136,7 @@ public class ZenFaultDetectionTests extends ESTestCase {
|
|||
Settings.builder()
|
||||
.put(settings)
|
||||
// trace zenfd actions but keep the default otherwise
|
||||
.putArray(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME)
|
||||
.putList(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME)
|
||||
.build(),
|
||||
new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService,
|
||||
namedWriteableRegistry, new NetworkService(Collections.emptyList()), version),
|
||||
|
|
|
@ -179,7 +179,7 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomNonNegativeLong()).build();
|
||||
|
||||
Settings hostsSettings = Settings.builder()
|
||||
.putArray("discovery.zen.ping.unicast.hosts",
|
||||
.putList("discovery.zen.ping.unicast.hosts",
|
||||
NetworkAddress.format(new InetSocketAddress(handleA.address.address().getAddress(), handleA.address.address().getPort())),
|
||||
NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())),
|
||||
NetworkAddress.format(new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort())),
|
||||
|
@ -305,7 +305,7 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort()))});
|
||||
|
||||
final Settings hostsSettings = Settings.builder()
|
||||
.putArray("discovery.zen.ping.unicast.hosts", "UZP_A", "UZP_B", "UZP_C")
|
||||
.putList("discovery.zen.ping.unicast.hosts", "UZP_A", "UZP_B", "UZP_C")
|
||||
.put("cluster.name", "test")
|
||||
.build();
|
||||
|
||||
|
@ -589,7 +589,7 @@ public class UnicastZenPingTests extends ESTestCase {
|
|||
final boolean useHosts = randomBoolean();
|
||||
final Settings.Builder hostsSettingsBuilder = Settings.builder().put("cluster.name", "test");
|
||||
if (useHosts) {
|
||||
hostsSettingsBuilder.putArray("discovery.zen.ping.unicast.hosts",
|
||||
hostsSettingsBuilder.putList("discovery.zen.ping.unicast.hosts",
|
||||
NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort()))
|
||||
);
|
||||
} else {
|
||||
|
|
|
@ -42,7 +42,7 @@ public class EnvironmentTests extends ESTestCase {
|
|||
Settings build = Settings.builder()
|
||||
.put(settings)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
|
||||
return new Environment(build);
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ public class EnvironmentTests extends ESTestCase {
|
|||
Environment environment = newEnvironment();
|
||||
assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue());
|
||||
assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue());
|
||||
environment = newEnvironment(Settings.builder().putArray(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
|
||||
environment = newEnvironment(Settings.builder().putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
|
||||
assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue());
|
||||
assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue());
|
||||
assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue());
|
||||
|
|
|
@ -483,7 +483,7 @@ public class NodeEnvironmentTests extends ESTestCase {
|
|||
public Settings buildEnvSettings(Settings settings) {
|
||||
return Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
|
||||
.put(settings).build();
|
||||
}
|
||||
|
||||
|
@ -491,7 +491,7 @@ public class NodeEnvironmentTests extends ESTestCase {
|
|||
Settings build = Settings.builder()
|
||||
.put(settings)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
|
||||
return new NodeEnvironment(build, new Environment(build));
|
||||
}
|
||||
|
||||
|
@ -500,7 +500,7 @@ public class NodeEnvironmentTests extends ESTestCase {
|
|||
.put(settings)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||
.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath)
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
|
||||
return new NodeEnvironment(build, new Environment(build));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -543,7 +543,7 @@ public class IndexSettingsTests extends ESTestCase {
|
|||
);
|
||||
assertThat(index.getDefaultFields(), equalTo(Collections.singletonList("body")));
|
||||
index.updateIndexMetaData(
|
||||
newIndexMeta("index", Settings.builder().putArray("index.query.default_field", "body", "title").build())
|
||||
newIndexMeta("index", Settings.builder().putList("index.query.default_field", "body", "title").build())
|
||||
);
|
||||
assertThat(index.getDefaultFields(), equalTo(Arrays.asList("body", "title")));
|
||||
}
|
||||
|
|
|
@ -26,8 +26,6 @@ import org.apache.lucene.search.SortedSetSortField;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -80,7 +78,7 @@ public class IndexSortIT extends ESIntegTestCase {
|
|||
.put(indexSettings())
|
||||
.put("index.number_of_shards", "1")
|
||||
.put("index.number_of_replicas", "1")
|
||||
.putArray("index.sort.field", "date", "numeric_dv", "keyword_dv")
|
||||
.putList("index.sort.field", "date", "numeric_dv", "keyword_dv")
|
||||
)
|
||||
.addMapping("test", TEST_MAPPING)
|
||||
.get();
|
||||
|
@ -99,7 +97,7 @@ public class IndexSortIT extends ESIntegTestCase {
|
|||
() -> prepareCreate("test")
|
||||
.setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.putArray("index.sort.field", "invalid_field")
|
||||
.putList("index.sort.field", "invalid_field")
|
||||
)
|
||||
.addMapping("test", TEST_MAPPING)
|
||||
.get()
|
||||
|
@ -110,7 +108,7 @@ public class IndexSortIT extends ESIntegTestCase {
|
|||
() -> prepareCreate("test")
|
||||
.setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.putArray("index.sort.field", "numeric")
|
||||
.putList("index.sort.field", "numeric")
|
||||
)
|
||||
.addMapping("test", TEST_MAPPING)
|
||||
.get()
|
||||
|
@ -121,7 +119,7 @@ public class IndexSortIT extends ESIntegTestCase {
|
|||
() -> prepareCreate("test")
|
||||
.setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.putArray("index.sort.field", "keyword")
|
||||
.putList("index.sort.field", "keyword")
|
||||
)
|
||||
.addMapping("test", TEST_MAPPING)
|
||||
.get()
|
||||
|
|
|
@ -76,9 +76,9 @@ public class IndexSortSettingsTests extends ESTestCase {
|
|||
|
||||
public void testIndexSortWithArrays() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.putArray("index.sort.field", "field1", "field2")
|
||||
.putArray("index.sort.order", "asc", "desc")
|
||||
.putArray("index.sort.missing", "_last", "_first")
|
||||
.putList("index.sort.field", "field1", "field2")
|
||||
.putList("index.sort.order", "asc", "desc")
|
||||
.putList("index.sort.missing", "_last", "_first")
|
||||
.build();
|
||||
IndexSettings indexSettings = indexSettings(settings);
|
||||
IndexSortConfig config = indexSettings.getIndexSortConfig();
|
||||
|
@ -108,7 +108,7 @@ public class IndexSortSettingsTests extends ESTestCase {
|
|||
public void testInvalidIndexSortWithArray() throws IOException {
|
||||
final Settings settings = Settings.builder()
|
||||
.put("index.sort.field", "field1")
|
||||
.putArray("index.sort.order", new String[] {"asc", "desc"})
|
||||
.putList("index.sort.order", new String[] {"asc", "desc"})
|
||||
.build();
|
||||
IllegalArgumentException exc =
|
||||
expectThrows(IllegalArgumentException.class, () -> indexSettings(settings));
|
||||
|
|
|
@ -129,9 +129,9 @@ public class AnalysisRegistryTests extends ESTestCase {
|
|||
.put("index.analysis.filter.testFilter.type", "mock")
|
||||
.put("index.analysis.filter.test_filter.type", "mock")
|
||||
.put("index.analysis.analyzer.custom_analyzer_with_camel_case.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.custom_analyzer_with_camel_case.filter", "lowercase", "testFilter")
|
||||
.putList("index.analysis.analyzer.custom_analyzer_with_camel_case.filter", "lowercase", "testFilter")
|
||||
.put("index.analysis.analyzer.custom_analyzer_with_snake_case.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.custom_analyzer_with_snake_case.filter", "lowercase", "test_filter").build();
|
||||
.putList("index.analysis.analyzer.custom_analyzer_with_snake_case.filter", "lowercase", "test_filter").build();
|
||||
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
|
||||
|
||||
|
@ -209,8 +209,8 @@ public class AnalysisRegistryTests extends ESTestCase {
|
|||
.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, version)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.putArray("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"})
|
||||
.putArray("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"})
|
||||
.putList("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"})
|
||||
.putList("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"})
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
||||
|
|
|
@ -29,7 +29,6 @@ import java.io.FileNotFoundException;
|
|||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.CharacterCodingException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.MalformedInputException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
|
@ -50,7 +49,7 @@ public class AnalysisTests extends ESTestCase {
|
|||
assertThat(set.contains("baz"), is(false));
|
||||
|
||||
/* Array */
|
||||
settings = Settings.builder().putArray("stem_exclusion", "foo","bar").build();
|
||||
settings = Settings.builder().putList("stem_exclusion", "foo","bar").build();
|
||||
set = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET);
|
||||
assertThat(set.contains("foo"), is(true));
|
||||
assertThat(set.contains("bar"), is(true));
|
||||
|
|
|
@ -42,7 +42,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
|
|||
|
||||
public void testBasics() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase")
|
||||
.putList("index.analysis.normalizer.my_normalizer.filter", "lowercase")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN);
|
||||
|
@ -57,7 +57,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
|
|||
public void testUnknownType() {
|
||||
Settings settings = Settings.builder()
|
||||
.put("index.analysis.normalizer.my_normalizer.type", "foobar")
|
||||
.putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase", "asciifolding")
|
||||
.putList("index.analysis.normalizer.my_normalizer.filter", "lowercase", "asciifolding")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
|
@ -78,7 +78,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
|
|||
public void testCharFilters() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put("index.analysis.char_filter.my_mapping.type", "mock_char_filter")
|
||||
.putArray("index.analysis.normalizer.my_normalizer.char_filter", "my_mapping")
|
||||
.putList("index.analysis.normalizer.my_normalizer.char_filter", "my_mapping")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN);
|
||||
|
@ -92,7 +92,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
|
|||
|
||||
public void testIllegalFilters() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.putArray("index.analysis.normalizer.my_normalizer.filter", "mock_forbidden")
|
||||
.putList("index.analysis.normalizer.my_normalizer.filter", "mock_forbidden")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
|
@ -102,7 +102,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
|
|||
|
||||
public void testIllegalCharFilters() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.putArray("index.analysis.normalizer.my_normalizer.char_filter", "mock_forbidden")
|
||||
.putList("index.analysis.normalizer.my_normalizer.char_filter", "mock_forbidden")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
|
|
|
@ -83,11 +83,11 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("index.analysis.filter.synonym.type", "synonym")
|
||||
.putArray("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!")
|
||||
.putList("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!")
|
||||
.put("index.analysis.filter.stop_within_synonym.type", "stop")
|
||||
.putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
|
||||
.putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
|
||||
.put("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym")
|
||||
.putList("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym")
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
try {
|
||||
|
@ -104,11 +104,11 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("index.analysis.filter.synonym_expand.type", "synonym")
|
||||
.putArray("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!")
|
||||
.putList("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!")
|
||||
.put("index.analysis.filter.stop_within_synonym.type", "stop")
|
||||
.putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
|
||||
.putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
|
||||
.put("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand")
|
||||
.putList("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand")
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
try {
|
||||
|
|
|
@ -70,9 +70,9 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
|||
public void setup() {
|
||||
indexService = createIndex("test", Settings.builder()
|
||||
.put("index.analysis.normalizer.my_lowercase.type", "custom")
|
||||
.putArray("index.analysis.normalizer.my_lowercase.filter", "lowercase")
|
||||
.putList("index.analysis.normalizer.my_lowercase.filter", "lowercase")
|
||||
.put("index.analysis.normalizer.my_other_lowercase.type", "custom")
|
||||
.putArray("index.analysis.normalizer.my_other_lowercase.filter", "mock_other_lowercase").build());
|
||||
.putList("index.analysis.normalizer.my_other_lowercase.filter", "mock_other_lowercase").build());
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
}
|
||||
|
||||
|
|
|
@ -993,7 +993,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
QueryShardContext context = createShardContext();
|
||||
context.getIndexSettings().updateIndexMetaData(
|
||||
newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field",
|
||||
newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field",
|
||||
STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build())
|
||||
);
|
||||
Query query = new QueryStringQueryBuilder("hello")
|
||||
|
@ -1008,7 +1008,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
// Reset the default value
|
||||
context.getIndexSettings().updateIndexMetaData(
|
||||
newIndexMeta("index",
|
||||
context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", "*").build())
|
||||
context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build())
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -575,7 +575,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
QueryShardContext context = createShardContext();
|
||||
context.getIndexSettings().updateIndexMetaData(
|
||||
newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field",
|
||||
newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field",
|
||||
STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build())
|
||||
);
|
||||
Query query = new SimpleQueryStringBuilder("hello")
|
||||
|
@ -590,7 +590,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
// Reset the default value
|
||||
context.getIndexSettings().updateIndexMetaData(
|
||||
newIndexMeta("index",
|
||||
context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", "*").build())
|
||||
context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build())
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -52,15 +52,15 @@ public class MatchQueryIT extends ESIntegTestCase {
|
|||
Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.filter.syns.type", "synonym")
|
||||
.putArray("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz")
|
||||
.putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz")
|
||||
.put("index.analysis.analyzer.lower_syns.type", "custom")
|
||||
.put("index.analysis.analyzer.lower_syns.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns")
|
||||
.putList("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns")
|
||||
.put("index.analysis.filter.graphsyns.type", "synonym_graph")
|
||||
.putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
|
||||
.putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
|
||||
.put("index.analysis.analyzer.lower_graphsyns.type", "custom")
|
||||
.put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
|
||||
.putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
|
||||
);
|
||||
|
||||
assertAcked(builder.addMapping(INDEX, createMapping()));
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.search.SynonymQuery;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
|
@ -47,7 +46,6 @@ import java.util.Arrays;
|
|||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class MultiMatchQueryTests extends ESSingleNodeTestCase {
|
||||
|
||||
|
@ -57,7 +55,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
|
|||
public void setup() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put("index.analysis.filter.syns.type","synonym")
|
||||
.putArray("index.analysis.filter.syns.synonyms","quick,fast")
|
||||
.putList("index.analysis.filter.syns.synonyms","quick,fast")
|
||||
.put("index.analysis.analyzer.syns.tokenizer","standard")
|
||||
.put("index.analysis.analyzer.syns.filter","syns").build();
|
||||
IndexService indexService = createIndex("test", settings);
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.NodeEnvironment.NodePath;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
|
@ -168,7 +167,7 @@ public class NewPathForShardTests extends ESTestCase {
|
|||
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), path)
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
|
||||
NodeEnvironment nodeEnv = new NodeEnvironment(settings, new Environment(settings));
|
||||
|
||||
// Make sure all our mocking above actually worked:
|
||||
|
|
|
@ -21,9 +21,7 @@ package org.elasticsearch.index.store;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FileSwitchDirectory;
|
||||
import org.apache.lucene.store.MMapDirectory;
|
||||
import org.apache.lucene.store.SimpleFSDirectory;
|
||||
import org.apache.lucene.store.SleepingLockWrapper;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -48,7 +46,7 @@ public class FsDirectoryServiceTests extends ESTestCase {
|
|||
private void doTestPreload(String...preload) throws IOException {
|
||||
Settings build = Settings.builder()
|
||||
.put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "mmapfs")
|
||||
.putArray(IndexModule.INDEX_STORE_PRE_LOAD_SETTING.getKey(), preload)
|
||||
.putList(IndexModule.INDEX_STORE_PRE_LOAD_SETTING.getKey(), preload)
|
||||
.build();
|
||||
IndexSettings settings = IndexSettingsModule.newIndexSettings("foo", build);
|
||||
IndexStore store = new IndexStore(settings);
|
||||
|
|
|
@ -117,9 +117,9 @@ public class AnalyzeActionIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
|
||||
.setSettings(Settings.builder().put(indexSettings())
|
||||
.put("index.analysis.filter.syns.type", "synonym")
|
||||
.putArray("index.analysis.filter.syns.synonyms", "wtf, what the fudge")
|
||||
.putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge")
|
||||
.put("index.analysis.analyzer.custom_syns.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns")));
|
||||
.putList("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns")));
|
||||
ensureGreen();
|
||||
|
||||
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("say what the fudge").setIndex("test").setAnalyzer("custom_syns").get();
|
||||
|
@ -446,7 +446,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
|
||||
.setSettings(Settings.builder().put(indexSettings())
|
||||
.put("index.analysis.normalizer.my_normalizer.type", "custom")
|
||||
.putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase"))
|
||||
.putList("index.analysis.normalizer.my_normalizer.filter", "lowercase"))
|
||||
.addMapping("test", "keyword", "type=keyword,normalizer=my_normalizer"));
|
||||
ensureGreen("test");
|
||||
|
||||
|
|
|
@ -1358,9 +1358,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
Builder builder = Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.synonym.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
|
||||
.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
|
||||
.put("index.analysis.filter.synonym.type", "synonym")
|
||||
.putArray("index.analysis.filter.synonym.synonyms", "quick => fast");
|
||||
.putList("index.analysis.filter.synonym.synonyms", "quick => fast");
|
||||
|
||||
assertAcked(prepareCreate("first_test_index").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping()));
|
||||
|
||||
|
@ -2773,9 +2773,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
Builder builder = Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.synonym.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
|
||||
.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
|
||||
.put("index.analysis.filter.synonym.type", "synonym")
|
||||
.putArray("index.analysis.filter.synonym.synonyms", "fast,quick");
|
||||
.putList("index.analysis.filter.synonym.synonyms", "fast,quick");
|
||||
|
||||
assertAcked(prepareCreate("test").setSettings(builder.build())
|
||||
.addMapping("type1", "field1",
|
||||
|
|
|
@ -38,9 +38,7 @@ import org.elasticsearch.search.SearchHit;
|
|||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.rescore.QueryRescoreMode;
|
||||
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
@ -159,9 +157,9 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
|||
public void testMoreDocs() throws Exception {
|
||||
Builder builder = Settings.builder();
|
||||
builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace");
|
||||
builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
|
||||
builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
|
||||
builder.put("index.analysis.filter.synonym.type", "synonym");
|
||||
builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
|
||||
builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
|
||||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
|
||||
|
@ -237,9 +235,9 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
|||
public void testSmallRescoreWindow() throws Exception {
|
||||
Builder builder = Settings.builder();
|
||||
builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace");
|
||||
builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
|
||||
builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
|
||||
builder.put("index.analysis.filter.synonym.type", "synonym");
|
||||
builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
|
||||
builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
|
||||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
|
||||
|
@ -309,9 +307,9 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
|||
public void testRescorerMadeScoresWorse() throws Exception {
|
||||
Builder builder = Settings.builder();
|
||||
builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace");
|
||||
builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
|
||||
builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
|
||||
builder.put("index.analysis.filter.synonym.type", "synonym");
|
||||
builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
|
||||
builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
|
||||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.query;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
|
@ -52,7 +51,6 @@ import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
|
|||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -266,10 +264,10 @@ public class QueryStringIT extends ESIntegTestCase {
|
|||
Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.filter.graphsyns.type", "synonym_graph")
|
||||
.putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
|
||||
.putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
|
||||
.put("index.analysis.analyzer.lower_graphsyns.type", "custom")
|
||||
.put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
|
||||
.putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
|
||||
);
|
||||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject(index).startObject("properties")
|
||||
|
|
|
@ -349,7 +349,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
.put(indexSettings())
|
||||
.put(SETTING_NUMBER_OF_SHARDS,1)
|
||||
.put("index.analysis.filter.syns.type","synonym")
|
||||
.putArray("index.analysis.filter.syns.synonyms","quick,fast")
|
||||
.putList("index.analysis.filter.syns.synonyms","quick,fast")
|
||||
.put("index.analysis.analyzer.syns.tokenizer","whitespace")
|
||||
.put("index.analysis.analyzer.syns.filter","syns")
|
||||
)
|
||||
|
@ -1572,9 +1572,9 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
.put("index.analysis.analyzer.index.filter", "lowercase")
|
||||
.put("index.analysis.analyzer.search.type", "custom")
|
||||
.put("index.analysis.analyzer.search.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.search.filter", "lowercase", "synonym")
|
||||
.putList("index.analysis.analyzer.search.filter", "lowercase", "synonym")
|
||||
.put("index.analysis.filter.synonym.type", "synonym")
|
||||
.putArray("index.analysis.filter.synonym.synonyms", "fast, quick"));
|
||||
.putList("index.analysis.filter.synonym.synonyms", "fast, quick"));
|
||||
assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search"));
|
||||
|
||||
client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get();
|
||||
|
@ -1602,9 +1602,9 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
.put("index.analysis.analyzer.index.filter", "lowercase")
|
||||
.put("index.analysis.analyzer.search.type", "custom")
|
||||
.put("index.analysis.analyzer.search.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.search.filter", "lowercase", "synonym")
|
||||
.putList("index.analysis.analyzer.search.filter", "lowercase", "synonym")
|
||||
.put("index.analysis.filter.synonym.type", "synonym")
|
||||
.putArray("index.analysis.filter.synonym.synonyms", "fast, quick"));
|
||||
.putList("index.analysis.filter.synonym.synonyms", "fast, quick"));
|
||||
assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search"));
|
||||
|
||||
client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get();
|
||||
|
@ -1807,7 +1807,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
.put("index.analysis.tokenizer.my_ngram_tokenizer.type", "nGram")
|
||||
.put("index.analysis.tokenizer.my_ngram_tokenizer.min_gram", "1")
|
||||
.put("index.analysis.tokenizer.my_ngram_tokenizer.max_gram", "10")
|
||||
.putArray("index.analysis.tokenizer.my_ngram_tokenizer.token_chars", new String[0]));
|
||||
.putList("index.analysis.tokenizer.my_ngram_tokenizer.token_chars", new String[0]));
|
||||
assertAcked(builder.addMapping("test", "origin", "type=text,copy_to=meta", "meta", "type=text,analyzer=my_ngram_analyzer"));
|
||||
// we only have ngrams as the index analyzer so searches will get standard analyzer
|
||||
|
||||
|
|
|
@ -528,9 +528,9 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
Settings.Builder settingsBuilder = Settings.builder()
|
||||
.put("analysis.analyzer.suggest_analyzer_synonyms.type", "custom")
|
||||
.put("analysis.analyzer.suggest_analyzer_synonyms.tokenizer", "standard")
|
||||
.putArray("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms")
|
||||
.putList("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms")
|
||||
.put("analysis.filter.my_synonyms.type", "synonym")
|
||||
.putArray("analysis.filter.my_synonyms.synonyms", "foo,renamed");
|
||||
.putList("analysis.filter.my_synonyms.synonyms", "foo,renamed");
|
||||
completionMappingBuilder.searchAnalyzer("suggest_analyzer_synonyms").indexAnalyzer("suggest_analyzer_synonyms");
|
||||
createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder);
|
||||
|
||||
|
@ -806,7 +806,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
public void testThatSuggestStopFilterWorks() throws Exception {
|
||||
Settings.Builder settingsBuilder = Settings.builder()
|
||||
.put("index.analysis.analyzer.stoptest.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter")
|
||||
.putList("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter")
|
||||
.put("index.analysis.filter.suggest_stop_filter.type", "stop")
|
||||
.put("index.analysis.filter.suggest_stop_filter.remove_trailing", false);
|
||||
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptEngine;
|
||||
import org.elasticsearch.script.TemplateScript;
|
||||
|
@ -173,7 +172,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
.put(SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put("index.analysis.analyzer.biword.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
|
||||
.putList("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
|
||||
.put("index.analysis.filter.shingler.type", "shingle")
|
||||
.put("index.analysis.filter.shingler.min_shingle_size", 2)
|
||||
.put("index.analysis.filter.shingler.max_shingle_size", 3));
|
||||
|
@ -253,7 +252,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.biword.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
|
||||
.putList("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
|
||||
.put("index.analysis.filter.shingler.type", "shingle")
|
||||
.put("index.analysis.filter.shingler.min_shingle_size", 2)
|
||||
.put("index.analysis.filter.shingler.max_shingle_size", 3));
|
||||
|
@ -427,7 +426,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=text,analyzer=stopwd").setSettings(
|
||||
Settings.builder()
|
||||
.put("index.analysis.analyzer.stopwd.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.stopwd.filter", "stop")
|
||||
.putList("index.analysis.analyzer.stopwd.filter", "stop")
|
||||
));
|
||||
ensureGreen();
|
||||
index("test", "typ1", "1", "body", "this is a test");
|
||||
|
@ -444,9 +443,9 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put("index.analysis.analyzer.body.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.body.filter", "lowercase")
|
||||
.putList("index.analysis.analyzer.body.filter", "lowercase")
|
||||
.put("index.analysis.analyzer.bigram.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
|
||||
.putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
|
||||
.put("index.analysis.filter.my_shingle.type", "shingle")
|
||||
.put("index.analysis.filter.my_shingle.output_unigrams", false)
|
||||
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
|
||||
|
@ -482,9 +481,9 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.body.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.body.filter", "lowercase")
|
||||
.putList("index.analysis.analyzer.body.filter", "lowercase")
|
||||
.put("index.analysis.analyzer.bigram.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
|
||||
.putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
|
||||
.put("index.analysis.filter.my_shingle.type", "shingle")
|
||||
.put("index.analysis.filter.my_shingle.output_unigrams", false)
|
||||
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
|
||||
|
@ -615,9 +614,9 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put("index.analysis.analyzer.body.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.body.filter", "lowercase")
|
||||
.putList("index.analysis.analyzer.body.filter", "lowercase")
|
||||
.put("index.analysis.analyzer.bigram.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
|
||||
.putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
|
||||
.put("index.analysis.filter.my_shingle.type", "shingle")
|
||||
.put("index.analysis.filter.my_shingle.output_unigrams", false)
|
||||
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
|
||||
|
@ -685,7 +684,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.suggest.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
|
||||
.putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
|
||||
.put("index.analysis.filter.shingler.type", "shingle")
|
||||
.put("index.analysis.filter.shingler.min_shingle_size", 2)
|
||||
.put("index.analysis.filter.shingler.max_shingle_size", 5)
|
||||
|
@ -745,7 +744,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.suggest.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
|
||||
.putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
|
||||
.put("index.analysis.filter.shingler.type", "shingle")
|
||||
.put("index.analysis.filter.shingler.min_shingle_size", 2)
|
||||
.put("index.analysis.filter.shingler.max_shingle_size", 5)
|
||||
|
@ -781,7 +780,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.body.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.body.filter", "lowercase", "my_shingle")
|
||||
.putList("index.analysis.analyzer.body.filter", "lowercase", "my_shingle")
|
||||
.put("index.analysis.filter.my_shingle.type", "shingle")
|
||||
.put("index.analysis.filter.my_shingle.output_unigrams", true)
|
||||
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
|
||||
|
@ -836,7 +835,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
.put(indexSettings())
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
|
||||
.put("index.analysis.analyzer.text.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
|
||||
.putList("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
|
||||
.put("index.analysis.filter.my_shingle.type", "shingle")
|
||||
.put("index.analysis.filter.my_shingle.output_unigrams", true)
|
||||
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
|
||||
|
@ -1026,7 +1025,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
.put(indexSettings())
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
|
||||
.put("index.analysis.analyzer.text.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
|
||||
.putList("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
|
||||
.put("index.analysis.filter.my_shingle.type", "shingle")
|
||||
.put("index.analysis.filter.my_shingle.output_unigrams", true)
|
||||
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
|
||||
|
|
|
@ -1827,7 +1827,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
|||
.put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s")
|
||||
.put("index.analysis.analyzer.my_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym")
|
||||
.putList("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym")
|
||||
.put("index.analysis.filter.my_synonym.type", "synonym")
|
||||
.put("index.analysis.filter.my_synonym.synonyms", "foo => bar");
|
||||
|
||||
|
|
|
@ -125,8 +125,8 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
|||
transportService.start();
|
||||
transportService.acceptIncomingRequests();
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
|
||||
builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
|
||||
builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
|
||||
builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
|
||||
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
|
||||
assertFalse(service.isCrossClusterSearchEnabled());
|
||||
service.initializeRemoteClusters();
|
||||
|
@ -171,8 +171,8 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
|||
transportService.start();
|
||||
transportService.acceptIncomingRequests();
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
|
||||
builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
|
||||
builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
|
||||
builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
|
||||
try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) {
|
||||
assertFalse(service.isCrossClusterSearchEnabled());
|
||||
service.initializeRemoteClusters();
|
||||
|
@ -225,9 +225,9 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
|||
transportService.start();
|
||||
transportService.acceptIncomingRequests();
|
||||
final Settings.Builder builder = Settings.builder();
|
||||
builder.putArray(
|
||||
builder.putList(
|
||||
"search.remote.cluster_1.seeds", c1N1Node.getAddress().toString());
|
||||
builder.putArray(
|
||||
builder.putList(
|
||||
"search.remote.cluster_2.seeds", c2N1Node.getAddress().toString());
|
||||
try (RemoteClusterService service =
|
||||
new RemoteClusterService(settings, transportService)) {
|
||||
|
@ -302,9 +302,9 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
|||
transportService.start();
|
||||
transportService.acceptIncomingRequests();
|
||||
final Settings.Builder builder = Settings.builder();
|
||||
builder.putArray(
|
||||
builder.putList(
|
||||
"search.remote.cluster_1.seeds", c1N1Node.getAddress().toString());
|
||||
builder.putArray(
|
||||
builder.putList(
|
||||
"search.remote.cluster_2.seeds", c2N1Node.getAddress().toString());
|
||||
try (RemoteClusterService service =
|
||||
new RemoteClusterService(settings, transportService)) {
|
||||
|
|
|
@ -180,9 +180,9 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").setSettings(
|
||||
Settings.builder().put(indexSettings())
|
||||
.put("index.analysis.filter.syns.type", "synonym")
|
||||
.putArray("index.analysis.filter.syns.synonyms", "one,two")
|
||||
.putList("index.analysis.filter.syns.synonyms", "one,two")
|
||||
.put("index.analysis.analyzer.syns.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.syns.filter", "syns")
|
||||
.putList("index.analysis.analyzer.syns.filter", "syns")
|
||||
).addMapping("test", "field","type=text,analyzer=syns"));
|
||||
ensureGreen();
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
|
|||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -53,10 +54,10 @@ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory {
|
|||
CJKBigramFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
outputUnigrams = settings.getAsBoolean("output_unigrams", false);
|
||||
final String[] asArray = settings.getAsArray("ignored_scripts");
|
||||
final List<String> asArray = settings.getAsList("ignored_scripts");
|
||||
Set<String> scripts = new HashSet<>(Arrays.asList("han", "hiragana", "katakana", "hangul"));
|
||||
if (asArray != null) {
|
||||
scripts.removeAll(Arrays.asList(asArray));
|
||||
scripts.removeAll(asArray);
|
||||
}
|
||||
int flags = 0;
|
||||
for (String script : scripts) {
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.index.IndexSettings;
|
|||
import org.elasticsearch.index.analysis.AbstractCharFilterFactory;
|
||||
|
||||
import java.io.Reader;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.unmodifiableSet;
|
||||
|
@ -36,8 +37,8 @@ public class HtmlStripCharFilterFactory extends AbstractCharFilterFactory {
|
|||
|
||||
HtmlStripCharFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name);
|
||||
String[] escapedTags = settings.getAsArray("escaped_tags");
|
||||
if (escapedTags.length > 0) {
|
||||
List<String> escapedTags = settings.getAsList("escaped_tags");
|
||||
if (escapedTags.size() > 0) {
|
||||
this.escapedTags = unmodifiableSet(newHashSet(escapedTags));
|
||||
} else {
|
||||
this.escapedTags = null;
|
||||
|
|
|
@ -27,8 +27,8 @@ import org.elasticsearch.index.IndexSettings;
|
|||
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -48,12 +48,12 @@ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory {
|
|||
KeepTypesFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
|
||||
final String[] arrayKeepTypes = settings.getAsArray(KEEP_TYPES_KEY, null);
|
||||
final List<String> arrayKeepTypes = settings.getAsList(KEEP_TYPES_KEY, null);
|
||||
if ((arrayKeepTypes == null)) {
|
||||
throw new IllegalArgumentException("keep_types requires `" + KEEP_TYPES_KEY + "` to be configured");
|
||||
}
|
||||
|
||||
this.keepTypes = new HashSet<>(Arrays.asList(arrayKeepTypes));
|
||||
this.keepTypes = new HashSet<>(arrayKeepTypes);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.analysis.common;
|
|||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.miscellaneous.KeepWordFilter;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -31,6 +30,8 @@ import org.elasticsearch.index.analysis.Analysis;
|
|||
import org.elasticsearch.index.analysis.StopTokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A {@link TokenFilterFactory} for {@link KeepWordFilter}. This filter only
|
||||
* keep tokens that are contained in the term set configured via
|
||||
|
@ -61,7 +62,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory {
|
|||
KeepWordFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
|
||||
final String[] arrayKeepWords = settings.getAsArray(KEEP_WORDS_KEY, null);
|
||||
final List<String> arrayKeepWords = settings.getAsList(KEEP_WORDS_KEY, null);
|
||||
final String keepWordsPath = settings.get(KEEP_WORDS_PATH_KEY, null);
|
||||
if ((arrayKeepWords == null && keepWordsPath == null) || (arrayKeepWords != null && keepWordsPath != null)) {
|
||||
// we don't allow both or none
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.env.Environment;
|
|||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class PatternCaptureGroupTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
@ -37,13 +38,13 @@ public class PatternCaptureGroupTokenFilterFactory extends AbstractTokenFilterFa
|
|||
|
||||
PatternCaptureGroupTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
String[] regexes = settings.getAsArray(PATTERNS_KEY, null, false);
|
||||
List<String> regexes = settings.getAsList(PATTERNS_KEY, null, false);
|
||||
if (regexes == null) {
|
||||
throw new IllegalArgumentException("required setting '" + PATTERNS_KEY + "' is missing for token filter [" + name + "]");
|
||||
}
|
||||
patterns = new Pattern[regexes.length];
|
||||
for (int i = 0; i < regexes.length; i++) {
|
||||
patterns[i] = Pattern.compile(regexes[i]);
|
||||
patterns = new Pattern[regexes.size()];
|
||||
for (int i = 0; i < regexes.size(); i++) {
|
||||
patterns[i] = Pattern.compile(regexes.get(i));
|
||||
}
|
||||
|
||||
preserveOriginal = settings.getAsBoolean(PRESERVE_ORIG_KEY, true);
|
||||
|
|
|
@ -56,7 +56,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
public void testWithoutCommonWordsMatch() throws IOException {
|
||||
{
|
||||
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_default.type", "common_grams")
|
||||
.putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
|
||||
.putList("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
|
||||
|
@ -75,7 +75,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_default.type", "common_grams")
|
||||
.put("index.analysis.filter.common_grams_default.query_mode", false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
|
||||
.putList("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
|
||||
{
|
||||
|
@ -94,7 +94,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_1.type", "common_grams")
|
||||
.put("index.analysis.filter.common_grams_1.ignore_case", true)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
|
||||
.putList("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
|
||||
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_1");
|
||||
|
@ -109,7 +109,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_2.type", "common_grams")
|
||||
.put("index.analysis.filter.common_grams_2.ignore_case", false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
|
||||
.putList("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
|
||||
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_2");
|
||||
|
@ -122,7 +122,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
}
|
||||
{
|
||||
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_3.type", "common_grams")
|
||||
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
|
||||
.putList("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
|
||||
|
@ -166,7 +166,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
{
|
||||
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_1.type", "common_grams")
|
||||
.put("index.analysis.filter.common_grams_1.query_mode", true)
|
||||
.putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
|
||||
.putList("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
|
||||
.put("index.analysis.filter.common_grams_1.ignore_case", true)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
|
@ -181,7 +181,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
{
|
||||
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_2.type", "common_grams")
|
||||
.put("index.analysis.filter.common_grams_2.query_mode", true)
|
||||
.putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
|
||||
.putList("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
|
||||
.put("index.analysis.filter.common_grams_2.ignore_case", false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
|
@ -196,7 +196,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
{
|
||||
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_3.type", "common_grams")
|
||||
.put("index.analysis.filter.common_grams_3.query_mode", true)
|
||||
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
|
||||
.putList("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
|
||||
|
@ -210,7 +210,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
{
|
||||
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_4.type", "common_grams")
|
||||
.put("index.analysis.filter.common_grams_4.query_mode", true)
|
||||
.putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
|
||||
.putList("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
|
||||
|
|
|
@ -71,7 +71,7 @@ public class HighlighterWithAnalyzersTests extends ESIntegTestCase {
|
|||
.put("analysis.tokenizer.autocomplete.token_chars", "letter,digit")
|
||||
.put("analysis.tokenizer.autocomplete.type", "nGram")
|
||||
.put("analysis.filter.wordDelimiter.type", "word_delimiter")
|
||||
.putArray("analysis.filter.wordDelimiter.type_table",
|
||||
.putList("analysis.filter.wordDelimiter.type_table",
|
||||
"& => ALPHANUM", "| => ALPHANUM", "! => ALPHANUM",
|
||||
"? => ALPHANUM", ". => ALPHANUM", "- => ALPHANUM",
|
||||
"# => ALPHANUM", "% => ALPHANUM", "+ => ALPHANUM",
|
||||
|
@ -88,10 +88,10 @@ public class HighlighterWithAnalyzersTests extends ESIntegTestCase {
|
|||
.put("analysis.filter.wordDelimiter.catenate_all", false)
|
||||
|
||||
.put("analysis.analyzer.autocomplete.tokenizer", "autocomplete")
|
||||
.putArray("analysis.analyzer.autocomplete.filter",
|
||||
.putList("analysis.analyzer.autocomplete.filter",
|
||||
"lowercase", "wordDelimiter")
|
||||
.put("analysis.analyzer.search_autocomplete.tokenizer", "whitespace")
|
||||
.putArray("analysis.analyzer.search_autocomplete.filter",
|
||||
.putList("analysis.analyzer.search_autocomplete.filter",
|
||||
"lowercase", "wordDelimiter")));
|
||||
client().prepareIndex("test", "test", "1")
|
||||
.setSource("name", "ARCOTEL Hotels Deutschland").get();
|
||||
|
@ -121,7 +121,7 @@ public class HighlighterWithAnalyzersTests extends ESIntegTestCase {
|
|||
.put("analysis.filter.wordDelimiter.catenate_numbers", true)
|
||||
.put("analysis.filter.wordDelimiter.catenate_all", false)
|
||||
.put("analysis.analyzer.custom_analyzer.tokenizer", "whitespace")
|
||||
.putArray("analysis.analyzer.custom_analyzer.filter",
|
||||
.putList("analysis.analyzer.custom_analyzer.filter",
|
||||
"lowercase", "wordDelimiter"))
|
||||
);
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
}
|
||||
|
||||
settings = Settings.builder().put(settings)
|
||||
.putArray("index.analysis.filter.non_broken_keep_filter.keep_words", "test")
|
||||
.putList("index.analysis.filter.non_broken_keep_filter.keep_words", "test")
|
||||
.build();
|
||||
try {
|
||||
// test our none existing setup is picked up
|
||||
|
|
|
@ -38,7 +38,7 @@ public class KeepTypesFilterFactoryTests extends ESTokenStreamTestCase {
|
|||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put("index.analysis.filter.keep_numbers.type", "keep_types")
|
||||
.putArray("index.analysis.filter.keep_numbers.types", new String[] {"<NUM>", "<SOMETHINGELSE>"})
|
||||
.putList("index.analysis.filter.keep_numbers.types", new String[] {"<NUM>", "<SOMETHINGELSE>"})
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
|
||||
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers");
|
||||
|
|
|
@ -42,9 +42,9 @@ public class MassiveWordListTests extends ESSingleNodeTestCase {
|
|||
.put("index.number_of_shards", 1)
|
||||
.put("analysis.analyzer.test_analyzer.type", "custom")
|
||||
.put("analysis.analyzer.test_analyzer.tokenizer", "standard")
|
||||
.putArray("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase")
|
||||
.putList("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase")
|
||||
.put("analysis.filter.dictionary_decompounder.type", "dictionary_decompounder")
|
||||
.putArray("analysis.filter.dictionary_decompounder.word_list", wordList)
|
||||
.putList("analysis.filter.dictionary_decompounder.word_list", wordList)
|
||||
).get();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase {
|
|||
final String name = "ngr";
|
||||
final Settings indexSettings = newAnalysisSettingsBuilder().build();
|
||||
final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 4)
|
||||
.putArray("token_chars", new String[0]).build();
|
||||
.putList("token_chars", new String[0]).build();
|
||||
Tokenizer tokenizer = new NGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings)
|
||||
.create();
|
||||
tokenizer.setReader(new StringReader("1.34"));
|
||||
|
|
|
@ -37,6 +37,7 @@ import java.io.IOException;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -63,7 +64,7 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory {
|
|||
Map<Integer, String> tailored = new HashMap<>();
|
||||
|
||||
try {
|
||||
String[] ruleFiles = settings.getAsArray(RULE_FILES);
|
||||
List<String> ruleFiles = settings.getAsList(RULE_FILES);
|
||||
|
||||
for (String scriptAndResourcePath : ruleFiles) {
|
||||
int colonPos = scriptAndResourcePath.indexOf(":");
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.codec.Encoder;
|
||||
import org.apache.commons.codec.language.Caverphone1;
|
||||
|
@ -50,7 +50,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
private final Encoder encoder;
|
||||
private final boolean replace;
|
||||
private int maxcodelength;
|
||||
private String[] languageset;
|
||||
private List<String> languageset;
|
||||
private NameType nametype;
|
||||
private RuleType ruletype;
|
||||
|
||||
|
@ -82,7 +82,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
this.maxcodelength = settings.getAsInt("max_code_len", 4);
|
||||
} else if ("bm".equalsIgnoreCase(encodername) || "beider_morse".equalsIgnoreCase(encodername) || "beidermorse".equalsIgnoreCase(encodername)) {
|
||||
this.encoder = null;
|
||||
this.languageset = settings.getAsArray("languageset");
|
||||
this.languageset = settings.getAsList("languageset");
|
||||
String ruleType = settings.get("rule_type", "approx");
|
||||
if ("approx".equalsIgnoreCase(ruleType)) {
|
||||
ruletype = RuleType.APPROX;
|
||||
|
@ -117,7 +117,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
if (encoder == null) {
|
||||
if (ruletype != null && nametype != null) {
|
||||
if (languageset != null) {
|
||||
final LanguageSet languages = LanguageSet.from(new HashSet<>(Arrays.asList(languageset)));
|
||||
final LanguageSet languages = LanguageSet.from(new HashSet<>(languageset));
|
||||
return new BeiderMorseFilter(tokenStream, new PhoneticEngine(nametype, ruletype, true), languages);
|
||||
}
|
||||
return new BeiderMorseFilter(tokenStream, new PhoneticEngine(nametype, ruletype, true));
|
||||
|
|
|
@ -229,7 +229,7 @@ public class Ec2DiscoveryTests extends ESTestCase {
|
|||
public void testFilterByMultipleTags() throws InterruptedException {
|
||||
int nodes = randomIntBetween(5, 10);
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.putArray(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod")
|
||||
.putList(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod")
|
||||
.build();
|
||||
|
||||
int prodInstances = 0;
|
||||
|
|
|
@ -128,7 +128,7 @@ public class GceDiscoveryTests extends ESTestCase {
|
|||
Settings nodeSettings = Settings.builder()
|
||||
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
|
||||
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
|
||||
.putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
|
||||
.putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
|
||||
.build();
|
||||
mock = new GceInstancesServiceMock(nodeSettings);
|
||||
List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
|
||||
|
@ -140,7 +140,7 @@ public class GceDiscoveryTests extends ESTestCase {
|
|||
Settings nodeSettings = Settings.builder()
|
||||
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
|
||||
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
|
||||
.putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
|
||||
.putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
|
||||
.build();
|
||||
mock = new GceInstancesServiceMock(nodeSettings);
|
||||
List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
|
||||
|
@ -162,7 +162,7 @@ public class GceDiscoveryTests extends ESTestCase {
|
|||
Settings nodeSettings = Settings.builder()
|
||||
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
|
||||
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
|
||||
.putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
|
||||
.putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
|
||||
.build();
|
||||
mock = new GceInstancesServiceMock(nodeSettings);
|
||||
List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
|
||||
|
@ -173,7 +173,7 @@ public class GceDiscoveryTests extends ESTestCase {
|
|||
Settings nodeSettings = Settings.builder()
|
||||
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
|
||||
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
|
||||
.putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
|
||||
.putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
|
||||
.build();
|
||||
mock = new GceInstancesServiceMock(nodeSettings);
|
||||
List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
|
||||
|
@ -183,7 +183,7 @@ public class GceDiscoveryTests extends ESTestCase {
|
|||
public void testMultipleZonesAndTwoNodesInSameZone() {
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
|
||||
.putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
|
||||
.putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
|
||||
.build();
|
||||
mock = new GceInstancesServiceMock(nodeSettings);
|
||||
List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
|
||||
|
@ -193,7 +193,7 @@ public class GceDiscoveryTests extends ESTestCase {
|
|||
public void testMultipleZonesAndTwoNodesInDifferentZones() {
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
|
||||
.putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
|
||||
.putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
|
||||
.build();
|
||||
mock = new GceInstancesServiceMock(nodeSettings);
|
||||
List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
|
||||
|
@ -206,7 +206,7 @@ public class GceDiscoveryTests extends ESTestCase {
|
|||
public void testZeroNode43() {
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
|
||||
.putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
|
||||
.putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
|
||||
.build();
|
||||
mock = new GceInstancesServiceMock(nodeSettings);
|
||||
List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
|
||||
|
@ -226,7 +226,7 @@ public class GceDiscoveryTests extends ESTestCase {
|
|||
|
||||
public void testIllegalSettingsMissingProject() {
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
|
||||
.putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
|
||||
.build();
|
||||
mock = new GceInstancesServiceMock(nodeSettings);
|
||||
try {
|
||||
|
@ -258,7 +258,7 @@ public class GceDiscoveryTests extends ESTestCase {
|
|||
public void testNoRegionReturnsEmptyList() {
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
|
||||
.putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a")
|
||||
.putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a")
|
||||
.build();
|
||||
mock = new GceInstancesServiceMock(nodeSettings);
|
||||
List<DiscoveryNode> discoveryNodes = buildDynamicNodes(mock, nodeSettings);
|
||||
|
|
|
@ -39,7 +39,7 @@ public class AzureRepositorySettingsTests extends ESTestCase {
|
|||
private AzureRepository azureRepository(Settings settings) throws StorageException, IOException, URISyntaxException {
|
||||
Settings internalSettings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
|
||||
.put(settings)
|
||||
.build();
|
||||
return new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), new Environment(internalSettings),
|
||||
|
|
|
@ -80,7 +80,7 @@ public class EvilSecurityTests extends ESTestCase {
|
|||
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.resolve("home").toString());
|
||||
settingsBuilder.putArray(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(),
|
||||
settingsBuilder.putList(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(),
|
||||
esHome.resolve("data2").toString());
|
||||
settingsBuilder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), esHome.resolve("custom").toString());
|
||||
settingsBuilder.put(Environment.PATH_LOGS_SETTING.getKey(), esHome.resolve("logs").toString());
|
||||
|
@ -153,7 +153,7 @@ public class EvilSecurityTests extends ESTestCase {
|
|||
Settings
|
||||
.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), home.toString())
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), data.toString(), duplicate.toString())
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), data.toString(), duplicate.toString())
|
||||
.build();
|
||||
|
||||
final Environment environment = new Environment(settings);
|
||||
|
|
|
@ -50,7 +50,7 @@ public class NodeEnvironmentEvilTests extends ESTestCase {
|
|||
PosixFilePermission.OWNER_READ)));
|
||||
Settings build = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||
IOException ioException = expectThrows(IOException.class, () -> {
|
||||
new NodeEnvironment(build, new Environment(build));
|
||||
});
|
||||
|
@ -70,7 +70,7 @@ public class NodeEnvironmentEvilTests extends ESTestCase {
|
|||
PosixFilePermission.OWNER_READ)));
|
||||
Settings build = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||
IOException ioException = expectThrows(IOException.class, () -> {
|
||||
new NodeEnvironment(build, new Environment(build));
|
||||
});
|
||||
|
@ -95,7 +95,7 @@ public class NodeEnvironmentEvilTests extends ESTestCase {
|
|||
PosixFilePermission.OWNER_READ)));
|
||||
Settings build = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||
IOException ioException = expectThrows(IOException.class, () -> {
|
||||
new NodeEnvironment(build, new Environment(build));
|
||||
});
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.apache.logging.log4j.status.StatusData;
|
|||
import org.apache.logging.log4j.status.StatusLogger;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.apache.lucene.util.TestRuleMarkFailure;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.TimeUnits;
|
||||
|
@ -134,7 +133,6 @@ import java.util.Random;
|
|||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.BooleanSupplier;
|
||||
import java.util.function.Consumer;
|
||||
|
@ -812,7 +810,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
Settings build = Settings.builder()
|
||||
.put(settings)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
|
||||
.putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
|
||||
return new NodeEnvironment(build, new Environment(build));
|
||||
}
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ public class ClusterDiscoveryConfiguration extends NodeConfigurationSource {
|
|||
unicastHosts[i] = IP_ADDR + ":" + (unicastHostPorts[unicastHostOrdinals[i]]);
|
||||
}
|
||||
}
|
||||
builder.putArray("discovery.zen.ping.unicast.hosts", unicastHosts);
|
||||
builder.putList("discovery.zen.ping.unicast.hosts", unicastHosts);
|
||||
return builder.put(super.nodeSettings(nodeOrdinal)).build();
|
||||
}
|
||||
|
||||
|
|
|
@ -2455,8 +2455,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
.put("transport.profiles.some_profile.port", "8900-9000")
|
||||
.put("transport.profiles.some_profile.bind_host", "_local:ipv4_")
|
||||
.put("transport.profiles.some_other_profile.port", "8700-8800")
|
||||
.putArray("transport.profiles.some_other_profile.bind_host", hosts)
|
||||
.putArray("transport.profiles.some_other_profile.publish_host", "_local:ipv4_")
|
||||
.putList("transport.profiles.some_other_profile.bind_host", hosts)
|
||||
.putList("transport.profiles.some_other_profile.publish_host", "_local:ipv4_")
|
||||
.build(), version0, null, true)) {
|
||||
|
||||
serviceC.start();
|
||||
|
|
Loading…
Reference in New Issue