Merge branch 'master' into feature/multi_cluster_search

This commit is contained in:
Simon Willnauer 2017-01-11 23:40:46 +01:00
commit 00781d24ce
45 changed files with 951 additions and 276 deletions

View File

@ -19,6 +19,9 @@
package org.apache.lucene.queryparser.classic;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@ -30,6 +33,7 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.GraphQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
@ -55,9 +59,6 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
/**
* A query parser that uses the {@link MapperService} in order to build smarter
* queries based on the mapping information.
@ -739,27 +740,48 @@ public class MapperQueryParser extends AnalyzingQueryParser {
private Query applySlop(Query q, int slop) {
if (q instanceof PhraseQuery) {
PhraseQuery pq = (PhraseQuery) q;
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.setSlop(slop);
final Term[] terms = pq.getTerms();
final int[] positions = pq.getPositions();
for (int i = 0; i < terms.length; ++i) {
builder.add(terms[i], positions[i]);
}
pq = builder.build();
//make sure that the boost hasn't been set beforehand, otherwise we'd lose it
assert q instanceof BoostQuery == false;
return pq;
return addSlopToPhrase((PhraseQuery) q, slop);
} else if (q instanceof MultiPhraseQuery) {
MultiPhraseQuery.Builder builder = new MultiPhraseQuery.Builder((MultiPhraseQuery) q);
builder.setSlop(slop);
return builder.build();
} else if (q instanceof GraphQuery && ((GraphQuery) q).hasPhrase()) {
// we have a graph query that has at least one phrase sub-query
// re-build and set slop on all phrase queries
List<Query> oldQueries = ((GraphQuery) q).getQueries();
Query[] queries = new Query[oldQueries.size()];
for (int i = 0; i < queries.length; i++) {
Query oldQuery = oldQueries.get(i);
if (oldQuery instanceof PhraseQuery) {
queries[i] = addSlopToPhrase((PhraseQuery) oldQuery, slop);
} else {
queries[i] = oldQuery;
}
}
return new GraphQuery(queries);
} else {
return q;
}
}
/**
* Rebuild a phrase query with a slop value
*/
private PhraseQuery addSlopToPhrase(PhraseQuery query, int slop) {
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.setSlop(slop);
final Term[] terms = query.getTerms();
final int[] positions = query.getPositions();
for (int i = 0; i < terms.length; ++i) {
builder.add(terms[i], positions[i]);
}
return builder.build();
}
private Collection<String> extractMultiFields(String field) {
Collection<String> fields;
if (field != null) {

View File

@ -40,6 +40,8 @@ import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.IfConfig;
import org.elasticsearch.common.settings.KeyStoreWrapper;
import org.elasticsearch.common.settings.SecureSetting;
import org.elasticsearch.common.settings.SecureSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.env.Environment;
@ -227,7 +229,7 @@ final class Bootstrap {
};
}
private static KeyStoreWrapper loadKeyStore(Environment initialEnv) throws BootstrapException {
private static SecureSettings loadSecureSettings(Environment initialEnv) throws BootstrapException {
final KeyStoreWrapper keystore;
try {
keystore = KeyStoreWrapper.load(initialEnv.configFile());
@ -246,16 +248,17 @@ final class Bootstrap {
return keystore;
}
private static Environment createEnvironment(boolean foreground, Path pidFile,
KeyStoreWrapper keystore, Settings initialSettings) {
SecureSettings secureSettings, Settings initialSettings) {
Terminal terminal = foreground ? Terminal.DEFAULT : null;
Settings.Builder builder = Settings.builder();
if (pidFile != null) {
builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile);
}
builder.put(initialSettings);
if (keystore != null) {
builder.setKeyStore(keystore);
if (secureSettings != null) {
builder.setSecureSettings(secureSettings);
}
return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal, Collections.emptyMap());
}
@ -297,7 +300,7 @@ final class Bootstrap {
INSTANCE = new Bootstrap();
final KeyStoreWrapper keystore = loadKeyStore(initialEnv);
final SecureSettings keystore = loadSecureSettings(initialEnv);
Environment environment = createEnvironment(foreground, pidFile, keystore, initialEnv.settings());
try {
LogConfigurator.configure(environment);

View File

@ -20,10 +20,12 @@
package org.elasticsearch.common.lucene.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.GraphQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PrefixQuery;
@ -135,6 +137,33 @@ public class Queries {
}
}
/**
* Potentially apply minimum should match value if we have a query that it can be applied to,
* otherwise return the original query.
*/
public static Query maybeApplyMinimumShouldMatch(Query query, @Nullable String minimumShouldMatch) {
// If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
// This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
// and multiple variations of the same word in the query (synonyms for instance).
if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
return applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
} else if (query instanceof ExtendedCommonTermsQuery) {
((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch);
} else if (query instanceof GraphQuery && ((GraphQuery) query).hasBoolean()) {
// we have a graph query that has at least one boolean sub-query
// re-build and set minimum should match value on all boolean queries
List<Query> oldQueries = ((GraphQuery) query).getQueries();
Query[] queries = new Query[oldQueries.size()];
for (int i = 0; i < queries.length; i++) {
queries[i] = maybeApplyMinimumShouldMatch(oldQueries.get(i), minimumShouldMatch);
}
return new GraphQuery(queries);
}
return query;
}
private static Pattern spaceAroundLessThanPattern = Pattern.compile("(\\s+<\\s*)|(\\s*<\\s+)");
private static Pattern spacePattern = Pattern.compile(" ");
private static Pattern lessThanPattern = Pattern.compile("<");

View File

@ -80,7 +80,7 @@ class AddStringKeyStoreCommand extends EnvironmentAwareCommand {
}
try {
keystore.setStringSetting(setting, value);
keystore.setString(setting, value);
} catch (IllegalArgumentException e) {
throw new UserException(ExitCodes.DATA_ERROR, "String value must contain only ASCII");
}

View File

@ -54,6 +54,7 @@ import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.ElasticsearchException;
/**
* A wrapper around a Java KeyStore which provides supplements the keystore with extra metadata.
@ -64,7 +65,7 @@ import org.apache.lucene.util.SetOnce;
* in a single thread. Once decrypted, keys may be read with the wrapper in
* multiple threads.
*/
public class KeyStoreWrapper implements Closeable {
public class KeyStoreWrapper implements SecureSettings {
/** The name of the keystore file to read and write. */
private static final String KEYSTORE_FILENAME = "elasticsearch.keystore";
@ -159,7 +160,7 @@ public class KeyStoreWrapper implements Closeable {
}
}
/** Returns true iff {@link #decrypt(char[])} has been called. */
@Override
public boolean isLoaded() {
return keystore.get() != null;
}
@ -225,20 +226,25 @@ public class KeyStoreWrapper implements Closeable {
}
}
/** Returns the names of all settings in this keystore. */
public Set<String> getSettings() {
return settingNames;
}
@Override
public boolean hasSetting(String setting) {
return settingNames.contains(setting);
}
// TODO: make settings accessible only to code that registered the setting
/** Retrieve a string setting. The {@link SecureString} should be closed once it is used. */
SecureString getStringSetting(String setting) throws GeneralSecurityException {
@Override
public SecureString getString(String setting) throws GeneralSecurityException {
KeyStore.Entry entry = keystore.get().getEntry(setting, keystorePassword.get());
if (entry instanceof KeyStore.SecretKeyEntry == false) {
throw new IllegalStateException("Secret setting " + setting + " is not a string");
}
// TODO: only allow getting a setting once?
KeyStore.SecretKeyEntry secretKeyEntry = (KeyStore.SecretKeyEntry)entry;
KeyStore.SecretKeyEntry secretKeyEntry = (KeyStore.SecretKeyEntry) entry;
PBEKeySpec keySpec = (PBEKeySpec) secretFactory.getKeySpec(secretKeyEntry.getSecretKey(), PBEKeySpec.class);
SecureString value = new SecureString(keySpec.getPassword());
keySpec.clearPassword();
@ -250,7 +256,7 @@ public class KeyStoreWrapper implements Closeable {
*
* @throws IllegalArgumentException if the value is not ASCII
*/
void setStringSetting(String setting, char[] value) throws GeneralSecurityException {
void setString(String setting, char[] value) throws GeneralSecurityException {
if (ASCII_ENCODER.canEncode(CharBuffer.wrap(value)) == false) {
throw new IllegalArgumentException("Value must be ascii");
}

View File

@ -25,6 +25,9 @@ import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.util.ArrayUtils;
/**
* A secure setting.
@ -36,8 +39,16 @@ public abstract class SecureSetting<T> extends Setting<T> {
Arrays.asList(Property.Deprecated, Property.Shared)
);
private SecureSetting(String key, Setting.Property... properties) {
super(key, (String)null, null, properties);
private static final Property[] FIXED_PROPERTIES = {
Property.NodeScope
};
private static final Property[] LEGACY_PROPERTIES = {
Property.NodeScope, Property.Deprecated, Property.Filtered
};
private SecureSetting(String key, Property... properties) {
super(key, (String)null, null, ArrayUtils.concat(properties, FIXED_PROPERTIES, Property.class));
assert assertAllowedProperties(properties);
}
@ -65,22 +76,28 @@ public abstract class SecureSetting<T> extends Setting<T> {
throw new UnsupportedOperationException("secure settings are not strings");
}
@Override
public boolean exists(Settings settings) {
final SecureSettings secureSettings = settings.getSecureSettings();
return secureSettings != null && secureSettings.hasSetting(getKey());
}
@Override
public T get(Settings settings) {
checkDeprecation(settings);
final KeyStoreWrapper keystore = Objects.requireNonNull(settings.getKeyStore());
if (keystore.getSettings().contains(getKey()) == false) {
final SecureSettings secureSettings = settings.getSecureSettings();
if (secureSettings == null || secureSettings.hasSetting(getKey()) == false) {
return getFallback(settings);
}
try {
return getSecret(keystore);
return getSecret(secureSettings);
} catch (GeneralSecurityException e) {
throw new RuntimeException("failed to read secure setting " + getKey(), e);
}
}
/** Returns the secret setting from the keyStoreReader store. */
abstract T getSecret(KeyStoreWrapper keystore) throws GeneralSecurityException;
abstract T getSecret(SecureSettings secureSettings) throws GeneralSecurityException;
/** Returns the value from a fallback setting. Returns null if no fallback exists. */
abstract T getFallback(Settings settings);
@ -92,18 +109,41 @@ public abstract class SecureSetting<T> extends Setting<T> {
*
* This may be any sensitive string, e.g. a username, a password, an auth token, etc.
*/
public static SecureSetting<SecureString> stringSetting(String name, Setting<String> fallback, Property... properties) {
public static SecureSetting<SecureString> secureString(String name, SecureSetting<SecureString> fallback,
boolean allowLegacy, Property... properties) {
final Setting<String> legacy;
if (allowLegacy) {
Property[] legacyProperties = ArrayUtils.concat(properties, LEGACY_PROPERTIES, Property.class);
legacy = Setting.simpleString(name, legacyProperties);
} else {
legacy = null;
}
return new SecureSetting<SecureString>(name, properties) {
@Override
protected SecureString getSecret(KeyStoreWrapper keystore) throws GeneralSecurityException {
return keystore.getStringSetting(getKey());
protected SecureString getSecret(SecureSettings secureSettings) throws GeneralSecurityException {
return secureSettings.getString(getKey());
}
@Override
SecureString getFallback(Settings settings) {
if (fallback != null) {
return new SecureString(fallback.get(settings).toCharArray());
if (legacy != null && legacy.exists(settings)) {
return new SecureString(legacy.get(settings).toCharArray());
}
return null;
if (fallback != null) {
return fallback.get(settings);
}
return new SecureString(new char[0]); // this means "setting does not exist"
}
@Override
protected void checkDeprecation(Settings settings) {
super.checkDeprecation(settings);
if (legacy != null) {
legacy.checkDeprecation(settings);
}
}
@Override
public boolean exists(Settings settings) {
// handle legacy, which is internal to this setting
return super.exists(settings) || legacy != null && legacy.exists(settings);
}
};
}

View File

@ -0,0 +1,38 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.settings;
import java.io.Closeable;
import java.security.GeneralSecurityException;
/**
* An accessor for settings which are securely stored. See {@link SecureSetting}.
*/
public interface SecureSettings extends Closeable {
/** Returns true iff the settings are loaded and retrievable. */
boolean isLoaded();
/** Returns true iff the given setting exists in this secure settings. */
boolean hasSetting(String setting);
/** Return a string setting. The {@link SecureString} should be closed once it is used. */
SecureString getString(String setting) throws GeneralSecurityException;
}

View File

@ -19,13 +19,14 @@
package org.elasticsearch.common.settings;
import java.io.Closeable;
import java.util.Arrays;
import java.util.Objects;
/**
* A String implementations which allows clearing the underlying char array.
*/
public final class SecureString implements CharSequence, AutoCloseable {
public final class SecureString implements CharSequence, Closeable {
private char[] chars;

View File

@ -123,7 +123,8 @@ public class Setting<T> extends ToXContentToBytes {
private Setting(Key key, @Nullable Setting<T> fallbackSetting, Function<Settings, String> defaultValue, Function<String, T> parser,
Property... properties) {
assert parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting(): "parser returned null";
assert this instanceof SecureSetting || parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting()
: "parser returned null";
this.key = key;
this.fallbackSetting = fallbackSetting;
this.defaultValue = defaultValue;
@ -294,7 +295,7 @@ public class Setting<T> extends ToXContentToBytes {
* Returns <code>true</code> iff this setting is present in the given settings object. Otherwise <code>false</code>
*/
public boolean exists(Settings settings) {
return settings.contains(getKey());
return settings.getAsMap().containsKey(getKey());
}
/**

View File

@ -43,6 +43,7 @@ import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.GeneralSecurityException;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayList;
@ -80,26 +81,21 @@ public final class Settings implements ToXContent {
/** The raw settings from the full key to raw string value. */
private Map<String, String> settings;
/** The keystore storage associated with these settings. */
private KeyStoreWrapper keystore;
/** The secure settings storage associated with these settings. */
private SecureSettings secureSettings;
Settings(Map<String, String> settings, KeyStoreWrapper keystore) {
Settings(Map<String, String> settings, SecureSettings secureSettings) {
// we use a sorted map for consistent serialization when using getAsMap()
this.settings = Collections.unmodifiableSortedMap(new TreeMap<>(settings));
this.keystore = keystore;
this.secureSettings = secureSettings;
}
/**
* Retrieve the keystore that contains secure settings.
* Retrieve the secure settings in these settings.
*/
KeyStoreWrapper getKeyStore() {
SecureSettings getSecureSettings() {
// pkg private so it can only be accessed by local subclasses of SecureSetting
return keystore;
}
/** Returns true if the setting exists, false otherwise. */
public boolean contains(String key) {
return settings.containsKey(key) || keystore != null && keystore.getSettings().contains(key);
return secureSettings;
}
/**
@ -205,10 +201,10 @@ public final class Settings implements ToXContent {
/**
* A settings that are filtered (and key is removed) with the specified prefix.
* Secure settings may not be access through the prefixed settings.
*/
public Settings getByPrefix(String prefix) {
return new Settings(new FilteredMap(this.settings, (k) -> k.startsWith(prefix), prefix), null);
return new Settings(new FilteredMap(this.settings, (k) -> k.startsWith(prefix), prefix),
secureSettings == null ? null : new PrefixedSecureSettings(secureSettings, prefix));
}
/**
@ -478,7 +474,7 @@ public final class Settings implements ToXContent {
}
Map<String, Settings> retVal = new LinkedHashMap<>();
for (Map.Entry<String, Map<String, String>> entry : map.entrySet()) {
retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue()), keystore));
retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue()), secureSettings));
}
return Collections.unmodifiableMap(retVal);
}
@ -613,7 +609,7 @@ public final class Settings implements ToXContent {
// we use a sorted map for consistent serialization when using getAsMap()
private final Map<String, String> map = new TreeMap<>();
private SetOnce<KeyStoreWrapper> keystore = new SetOnce<>();
private SetOnce<SecureSettings> secureSettings = new SetOnce<>();
private Builder() {
@ -637,12 +633,12 @@ public final class Settings implements ToXContent {
return map.get(key);
}
/** Sets the secret store for these settings. */
public void setKeyStore(KeyStoreWrapper keystore) {
if (keystore.isLoaded()) {
throw new IllegalStateException("The keystore wrapper must already be loaded");
public Builder setSecureSettings(SecureSettings secureSettings) {
if (secureSettings.isLoaded() == false) {
throw new IllegalStateException("Secure settings must already be loaded");
}
this.keystore.set(keystore);
this.secureSettings.set(secureSettings);
return this;
}
/**
@ -1051,7 +1047,7 @@ public final class Settings implements ToXContent {
* set on this builder.
*/
public Settings build() {
return new Settings(map, keystore.get());
return new Settings(map, secureSettings.get());
}
}
@ -1171,4 +1167,34 @@ public final class Settings implements ToXContent {
return size;
}
}
private static class PrefixedSecureSettings implements SecureSettings {
private SecureSettings delegate;
private String prefix;
PrefixedSecureSettings(SecureSettings delegate, String prefix) {
this.delegate = delegate;
this.prefix = prefix;
}
@Override
public boolean isLoaded() {
return delegate.isLoaded();
}
@Override
public boolean hasSetting(String setting) {
return delegate.hasSetting(prefix + setting);
}
@Override
public SecureString getString(String setting) throws GeneralSecurityException{
return delegate.getString(prefix + setting);
}
@Override
public void close() throws IOException {
delegate.close();
}
}
}

View File

@ -20,20 +20,14 @@
package org.elasticsearch.common.xcontent.cbor;
import com.fasterxml.jackson.core.JsonGenerator;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContentGenerator;
import java.io.OutputStream;
import java.util.Collections;
import java.util.Set;
public class CborXContentGenerator extends JsonXContentGenerator {
public CborXContentGenerator(JsonGenerator jsonGenerator, OutputStream os) {
this(jsonGenerator, os, Collections.emptySet(), Collections.emptySet());
}
public CborXContentGenerator(JsonGenerator jsonGenerator, OutputStream os, Set<String> includes, Set<String> excludes) {
super(jsonGenerator, os, includes, excludes);
}

View File

@ -44,7 +44,6 @@ import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Collections;
import java.util.Objects;
import java.util.Set;
@ -73,10 +72,6 @@ public class JsonXContentGenerator implements XContentGenerator {
private static final DefaultPrettyPrinter.Indenter INDENTER = new DefaultIndenter(" ", LF.getValue());
private boolean prettyPrint = false;
public JsonXContentGenerator(JsonGenerator jsonGenerator, OutputStream os) {
this(jsonGenerator, os, Collections.emptySet(), Collections.emptySet());
}
public JsonXContentGenerator(JsonGenerator jsonGenerator, OutputStream os, Set<String> includes, Set<String> excludes) {
Objects.requireNonNull(includes, "Including filters must not be null");
Objects.requireNonNull(excludes, "Excluding filters must not be null");

View File

@ -20,20 +20,14 @@
package org.elasticsearch.common.xcontent.smile;
import com.fasterxml.jackson.core.JsonGenerator;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContentGenerator;
import java.io.OutputStream;
import java.util.Collections;
import java.util.Set;
public class SmileXContentGenerator extends JsonXContentGenerator {
public SmileXContentGenerator(JsonGenerator jsonGenerator, OutputStream os) {
this(jsonGenerator, os, Collections.emptySet(), Collections.emptySet());
}
public SmileXContentGenerator(JsonGenerator jsonGenerator, OutputStream os, Set<String> includes, Set<String> excludes) {
super(jsonGenerator, os, includes, excludes);
}

View File

@ -20,20 +20,14 @@
package org.elasticsearch.common.xcontent.yaml;
import com.fasterxml.jackson.core.JsonGenerator;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContentGenerator;
import java.io.OutputStream;
import java.util.Collections;
import java.util.Set;
public class YamlXContentGenerator extends JsonXContentGenerator {
public YamlXContentGenerator(JsonGenerator jsonGenerator, OutputStream os) {
this(jsonGenerator, os, Collections.emptySet(), Collections.emptySet());
}
public YamlXContentGenerator(JsonGenerator jsonGenerator, OutputStream os, Set<String> includes, Set<String> excludes) {
super(jsonGenerator, os, includes, excludes);
}

View File

@ -461,35 +461,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
matchQuery.setZeroTermsQuery(zeroTermsQuery);
Query query = matchQuery.parse(type, fieldName, value);
if (query == null) {
return null;
}
// If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
// This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
// and multiple variations of the same word in the query (synonyms for instance).
if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
} else if (query instanceof GraphQuery && ((GraphQuery) query).hasBoolean()) {
// we have a graph query that has at least one boolean sub-query
// re-build and set minimum should match value on all boolean queries
List<Query> oldQueries = ((GraphQuery) query).getQueries();
Query[] queries = new Query[oldQueries.size()];
for (int i = 0; i < queries.length; i++) {
Query oldQuery = oldQueries.get(i);
if (oldQuery instanceof BooleanQuery) {
queries[i] = Queries.applyMinimumShouldMatch((BooleanQuery) oldQuery, minimumShouldMatch);
} else {
queries[i] = oldQuery;
}
}
query = new GraphQuery(queries);
} else if (query instanceof ExtendedCommonTermsQuery) {
((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch);
}
return query;
return Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch);
}
@Override

View File

@ -1042,12 +1042,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
}
query = Queries.fixNegativeQueryIfNeeded(query);
// If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
// This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
// and multiple variations of the same word in the query (synonyms for instance).
if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, this.minimumShouldMatch());
}
query = Queries.maybeApplyMinimumShouldMatch(query, this.minimumShouldMatch);
//restore the previous BoostQuery wrapping
for (int i = boosts.size() - 1; i >= 0; i--) {

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.GraphQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
@ -37,6 +38,7 @@ import org.elasticsearch.index.query.SimpleQueryParser.Settings;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
@ -412,15 +414,8 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
SimpleQueryParser sqp = new SimpleQueryParser(luceneAnalyzer, resolvedFieldsAndWeights, flags, newSettings, context);
sqp.setDefaultOperator(defaultOperator.toBooleanClauseOccur());
Query query = sqp.parse(queryText);
// If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
// This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
// and multiple variations of the same word in the query (synonyms for instance).
if (minimumShouldMatch != null && query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
}
return query;
return Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch);
}
private static String resolveIndexName(String fieldName, QueryShardContext context) {

View File

@ -61,12 +61,7 @@ public class MultiMatchQuery extends MatchQuery {
private Query parseAndApply(Type type, String fieldName, Object value, String minimumShouldMatch, Float boostValue) throws IOException {
Query query = parse(type, fieldName, value);
// If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
// This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
// and multiple variations of the same word in the query (synonyms for instance).
if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
}
query = Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch);
if (query != null && boostValue != null && boostValue != AbstractQueryBuilder.DEFAULT_BOOST) {
query = new BoostQuery(query, boostValue);
}

View File

@ -855,7 +855,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
in.readLong(); // ttl
}
this.versionType = VersionType.fromValue(in.readByte());
assert versionType.validateVersionForWrites(this.version);
assert versionType.validateVersionForWrites(this.version) : "invalid version for writes: " + this.version;
if (format >= FORMAT_AUTO_GENERATED_IDS) {
this.autoGeneratedIdTimestamp = in.readLong();
} else {
@ -1036,8 +1036,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
this.versionType = VersionType.fromValue(in.readByte());
assert versionType.validateVersionForWrites(this.version);
if (format >= FORMAT_SEQ_NO) {
seqNo = in.readVLong();
primaryTerm = in.readVLong();
seqNo = in.readLong();
primaryTerm = in.readLong();
}
}
@ -1100,8 +1100,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
out.writeString(uid.text());
out.writeLong(version);
out.writeByte(versionType.getValue());
out.writeVLong(seqNo);
out.writeVLong(primaryTerm);
out.writeLong(seqNo);
out.writeLong(primaryTerm);
}
@Override

View File

@ -620,6 +620,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
// EMPTY is safe here because RepositoryData#fromXContent calls namedObject
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, out.bytes())) {
repositoryData = RepositoryData.snapshotsFromXContent(parser, indexGen);
} catch (NotXContentException e) {
logger.warn("[{}] index blob is not valid x-content [{} bytes]", snapshotsIndexBlobName, out.bytes().length());
throw e;
}
}

View File

@ -840,7 +840,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
}
@Override
protected void doClose() {
protected final void doClose() {
}
@Override

View File

@ -75,7 +75,7 @@ public abstract class KeyStoreCommandTestCase extends CommandTestCase {
KeyStoreWrapper keystore = KeyStoreWrapper.create(password.toCharArray());
assertEquals(0, settings.length % 2);
for (int i = 0; i < settings.length; i += 2) {
keystore.setStringSetting(settings[i], settings[i + 1].toCharArray());
keystore.setString(settings[i], settings[i + 1].toCharArray());
}
keystore.save(env.configFile());
return keystore;
@ -92,6 +92,6 @@ public abstract class KeyStoreCommandTestCase extends CommandTestCase {
}
void assertSecureString(KeyStoreWrapper keystore, String setting, String value) throws Exception {
assertEquals(value, keystore.getStringSetting(setting).toString());
assertEquals(value, keystore.getString(setting).toString());
}
}

View File

@ -19,6 +19,14 @@
package org.elasticsearch.index.query;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery;
import static org.hamcrest.CoreMatchers.either;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
import org.apache.lucene.analysis.MockSynonymAnalyzer;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.MapperQueryParser;
import org.apache.lucene.queryparser.classic.QueryParserSettings;
@ -27,6 +35,7 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.GraphQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiTermQuery;
@ -43,6 +52,7 @@ import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matchers;
@ -53,13 +63,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery;
import static org.hamcrest.CoreMatchers.either;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStringQueryBuilder> {
@Override
@ -376,6 +379,121 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
}
}
public void testToQueryWithGraph() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
for (Operator op : Operator.values()) {
BooleanClause.Occur defaultOp = op.toBooleanClauseOccur();
MapperQueryParser queryParser = new MapperQueryParser(createShardContext());
QueryParserSettings settings = new QueryParserSettings("");
settings.defaultField(STRING_FIELD_NAME);
settings.fieldsAndWeights(Collections.emptyMap());
settings.fuzziness(Fuzziness.AUTO);
settings.analyzeWildcard(true);
settings.rewriteMethod(MultiTermQuery.CONSTANT_SCORE_REWRITE);
settings.defaultOperator(op.toQueryParserOperator());
settings.forceAnalyzer(new MockSynonymAnalyzer());
settings.forceQuoteAnalyzer(new MockSynonymAnalyzer());
queryParser.reset(settings);
// simple multi-term
Query query = queryParser.parse("guinea pig");
Query expectedQuery = new GraphQuery(
new BooleanQuery.Builder()
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), defaultOp))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), defaultOp))
.build(),
new TermQuery(new Term(STRING_FIELD_NAME, "cavy"))
);
assertThat(query, Matchers.equalTo(expectedQuery));
// simple with additional tokens
query = queryParser.parse("that guinea pig smells");
expectedQuery = new GraphQuery(
new BooleanQuery.Builder()
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "that")), defaultOp))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), defaultOp))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), defaultOp))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "smells")), defaultOp))
.build(),
new BooleanQuery.Builder()
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "that")), defaultOp))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "cavy")), defaultOp))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "smells")), defaultOp))
.build()
);
assertThat(query, Matchers.equalTo(expectedQuery));
// complex
query = queryParser.parse("+that -(guinea pig) +smells");
expectedQuery = new BooleanQuery.Builder()
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "that")), BooleanClause.Occur.MUST))
.add(new BooleanClause(new GraphQuery(
new BooleanQuery.Builder()
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), defaultOp))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), defaultOp))
.build(),
new TermQuery(new Term(STRING_FIELD_NAME, "cavy"))
), BooleanClause.Occur.MUST_NOT))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "smells")), BooleanClause.Occur.MUST))
.build();
assertThat(query, Matchers.equalTo(expectedQuery));
// no paren should cause guinea and pig to be treated as separate tokens
query = queryParser.parse("+that -guinea pig +smells");
expectedQuery = new BooleanQuery.Builder()
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "that")), BooleanClause.Occur.MUST))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), BooleanClause.Occur.MUST_NOT))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), defaultOp))
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "smells")), BooleanClause.Occur.MUST))
.build();
assertThat(query, Matchers.equalTo(expectedQuery));
// phrase
query = queryParser.parse("\"that guinea pig smells\"");
expectedQuery = new BooleanQuery.Builder()
.setDisableCoord(true)
.add(new BooleanClause(new GraphQuery(
new PhraseQuery.Builder()
.add(new Term(STRING_FIELD_NAME, "that"))
.add(new Term(STRING_FIELD_NAME, "guinea"))
.add(new Term(STRING_FIELD_NAME, "pig"))
.add(new Term(STRING_FIELD_NAME, "smells"))
.build(),
new PhraseQuery.Builder()
.add(new Term(STRING_FIELD_NAME, "that"))
.add(new Term(STRING_FIELD_NAME, "cavy"))
.add(new Term(STRING_FIELD_NAME, "smells"))
.build()
), BooleanClause.Occur.SHOULD)).build();
assertThat(query, Matchers.equalTo(expectedQuery));
// phrase with slop
query = queryParser.parse("\"that guinea pig smells\"~2");
expectedQuery = new BooleanQuery.Builder()
.setDisableCoord(true)
.add(new BooleanClause(new GraphQuery(
new PhraseQuery.Builder()
.add(new Term(STRING_FIELD_NAME, "that"))
.add(new Term(STRING_FIELD_NAME, "guinea"))
.add(new Term(STRING_FIELD_NAME, "pig"))
.add(new Term(STRING_FIELD_NAME, "smells"))
.setSlop(2)
.build(),
new PhraseQuery.Builder()
.add(new Term(STRING_FIELD_NAME, "that"))
.add(new Term(STRING_FIELD_NAME, "cavy"))
.add(new Term(STRING_FIELD_NAME, "smells"))
.setSlop(2)
.build()
), BooleanClause.Occur.SHOULD)).build();
assertThat(query, Matchers.equalTo(expectedQuery));
}
}
public void testToQueryRegExpQuery() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query query = queryStringQuery("/foo*bar/").defaultField(STRING_FIELD_NAME)

View File

@ -20,10 +20,13 @@
package org.elasticsearch.index.query;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockSynonymAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.GraphQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
@ -113,6 +116,53 @@ public class SimpleQueryParserTests extends ESTestCase {
}
}
public void testAnalyzerWithGraph() {
SimpleQueryParser.Settings settings = new SimpleQueryParser.Settings();
settings.analyzeWildcard(true);
Map<String, Float> weights = new HashMap<>();
weights.put("field1", 1.0f);
SimpleQueryParser parser = new MockSimpleQueryParser(new MockSynonymAnalyzer(), weights, -1, settings);
for (Operator op : Operator.values()) {
BooleanClause.Occur defaultOp = op.toBooleanClauseOccur();
parser.setDefaultOperator(defaultOp);
// non-phrase won't detect multi-word synonym because of whitespace splitting
Query query = parser.parse("guinea pig");
Query expectedQuery = new BooleanQuery.Builder()
.add(new BooleanClause(new TermQuery(new Term("field1", "guinea")), defaultOp))
.add(new BooleanClause(new TermQuery(new Term("field1", "pig")), defaultOp))
.build();
assertThat(query, equalTo(expectedQuery));
// phrase will pick it up
query = parser.parse("\"guinea pig\"");
expectedQuery = new GraphQuery(
new PhraseQuery("field1", "guinea", "pig"),
new TermQuery(new Term("field1", "cavy")));
assertThat(query, equalTo(expectedQuery));
// phrase with slop
query = parser.parse("big \"guinea pig\"~2");
expectedQuery = new BooleanQuery.Builder()
.add(new BooleanClause(new TermQuery(new Term("field1", "big")), defaultOp))
.add(new BooleanClause(new GraphQuery(
new PhraseQuery.Builder()
.add(new Term("field1", "guinea"))
.add(new Term("field1", "pig"))
.setSlop(2)
.build(),
new TermQuery(new Term("field1", "cavy"))), defaultOp))
.build();
assertThat(query, equalTo(expectedQuery));
}
}
public void testQuoteFieldSuffix() {
SimpleQueryParser.Settings sqpSettings = new SimpleQueryParser.Settings();
sqpSettings.quoteFieldSuffix(".quote");

View File

@ -23,6 +23,9 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.Term;
import org.apache.lucene.mockfile.FilterFileChannel;
import org.apache.lucene.store.AlreadyClosedException;
@ -31,6 +34,7 @@ import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@ -47,6 +51,12 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.Engine.Operation.Origin;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.seqno.SequenceNumbersService;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.Translog.Location;
@ -67,6 +77,7 @@ import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
@ -297,14 +308,14 @@ public class TranslogTests extends ESTestCase {
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(2L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(125L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(139L));
}
translog.add(new Translog.Delete(newUid("3")));
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(3L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(153L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(181L));
}
final long seqNo = 1;
@ -313,10 +324,10 @@ public class TranslogTests extends ESTestCase {
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(4L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(195L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(223L));
}
final long expectedSizeInBytes = 238L;
final long expectedSizeInBytes = 266L;
translog.prepareCommit();
{
final TranslogStats stats = stats();
@ -1993,4 +2004,47 @@ public class TranslogTests extends ESTestCase {
public static Translog.Location randomTranslogLocation() {
return new Translog.Location(randomLong(), randomLong(), randomInt());
}
public void testTranslogOpSerialization() throws Exception {
BytesReference B_1 = new BytesArray(new byte[]{1});
SeqNoFieldMapper.SequenceID seqID = SeqNoFieldMapper.SequenceID.emptySeqID();
assert Version.CURRENT.major <= 6 : "Using UNASSIGNED_SEQ_NO can be removed in 7.0, because 6.0+ nodes have actual sequence numbers";
long randomSeqNum = randomBoolean() ? SequenceNumbersService.UNASSIGNED_SEQ_NO : randomNonNegativeLong();
long randomPrimaryTerm = randomBoolean() ? 0 : randomNonNegativeLong();
seqID.seqNo.setLongValue(randomSeqNum);
seqID.seqNoDocValue.setLongValue(randomSeqNum);
seqID.primaryTerm.setLongValue(randomPrimaryTerm);
Field uidField = new Field("_uid", "1", UidFieldMapper.Defaults.FIELD_TYPE);
Field versionField = new NumericDocValuesField("_version", 1);
Document document = new Document();
document.add(new TextField("value", "test", Field.Store.YES));
document.add(uidField);
document.add(versionField);
document.add(seqID.seqNo);
document.add(seqID.seqNoDocValue);
document.add(seqID.primaryTerm);
ParsedDocument doc = new ParsedDocument(versionField, seqID, "1", "type", null, Arrays.asList(document), B_1, null);
Engine.Index eIndex = new Engine.Index(newUid("1"), doc, randomSeqNum, randomPrimaryTerm,
1, VersionType.INTERNAL, Origin.PRIMARY, 0, 0, false);
Engine.IndexResult eIndexResult = new Engine.IndexResult(1, randomSeqNum, true);
Translog.Index index = new Translog.Index(eIndex, eIndexResult);
BytesStreamOutput out = new BytesStreamOutput();
index.writeTo(out);
StreamInput in = out.bytes().streamInput();
Translog.Index serializedIndex = new Translog.Index(in);
assertEquals(index, serializedIndex);
Engine.Delete eDelete = new Engine.Delete("type", "1", newUid("1"), randomSeqNum, randomPrimaryTerm,
2, VersionType.INTERNAL, Origin.PRIMARY, 0);
Engine.DeleteResult eDeleteResult = new Engine.DeleteResult(2, randomSeqNum, true);
Translog.Delete delete = new Translog.Delete(eDelete, eDeleteResult);
out = new BytesStreamOutput();
delete.writeTo(out);
in = out.bytes().streamInput();
Translog.Delete serializedDelete = new Translog.Delete(in);
assertEquals(delete, serializedDelete);
}
}

View File

@ -19,16 +19,26 @@
package org.elasticsearch.search.query;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
@ -41,22 +51,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class QueryStringIT extends ESIntegTestCase {
@Before
@ -263,6 +257,92 @@ public class QueryStringIT extends ESIntegTestCase {
containsString("unit [D] not supported for date math [-2D]"));
}
private void setupIndexWithGraph(String index) throws Exception {
CreateIndexRequestBuilder builder = prepareCreate(index).setSettings(
Settings.builder()
.put(indexSettings())
.put("index.analysis.filter.graphsyns.type", "synonym_graph")
.putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
.put("index.analysis.analyzer.lower_graphsyns.type", "custom")
.put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard")
.putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject(index).startObject("properties")
.startObject("field").field("type", "text").endObject().endObject().endObject().endObject();
assertAcked(builder.addMapping(index, mapping));
ensureGreen();
List<IndexRequestBuilder> builders = new ArrayList<>();
builders.add(client().prepareIndex(index, index, "1").setSource("field", "say wtf happened foo"));
builders.add(client().prepareIndex(index, index, "2").setSource("field", "bar baz what the fudge man"));
builders.add(client().prepareIndex(index, index, "3").setSource("field", "wtf"));
builders.add(client().prepareIndex(index, index, "4").setSource("field", "what is the name for fudge"));
builders.add(client().prepareIndex(index, index, "5").setSource("field", "bar two three"));
builders.add(client().prepareIndex(index, index, "6").setSource("field", "bar baz two three"));
indexRandom(true, false, builders);
}
public void testGraphQueries() throws Exception {
String index = "graph_test_index";
setupIndexWithGraph(index);
// phrase
SearchResponse searchResponse = client().prepareSearch(index).setQuery(
QueryBuilders.queryStringQuery("\"foo two three\"")
.defaultField("field")
.analyzer("lower_graphsyns")).get();
assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "6");
// and
searchResponse = client().prepareSearch(index).setQuery(
QueryBuilders.queryStringQuery("say what the fudge")
.defaultField("field")
.splitOnWhitespace(false)
.defaultOperator(Operator.AND)
.analyzer("lower_graphsyns")).get();
assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
// and, split on whitespace means we should not recognize the multi-word synonym
searchResponse = client().prepareSearch(index).setQuery(
QueryBuilders.queryStringQuery("say what the fudge")
.defaultField("field")
.splitOnWhitespace(true)
.defaultOperator(Operator.AND)
.analyzer("lower_graphsyns")).get();
assertNoSearchHits(searchResponse);
// or
searchResponse = client().prepareSearch(index).setQuery(
QueryBuilders.queryStringQuery("three what the fudge foo")
.defaultField("field")
.splitOnWhitespace(false)
.defaultOperator(Operator.OR)
.analyzer("lower_graphsyns")).get();
assertHitCount(searchResponse, 6L);
assertSearchHits(searchResponse, "1", "2", "3", "4", "5", "6");
// min should match
searchResponse = client().prepareSearch(index).setQuery(
QueryBuilders.queryStringQuery("three what the fudge foo")
.defaultField("field")
.splitOnWhitespace(false)
.defaultOperator(Operator.OR)
.analyzer("lower_graphsyns")
.minimumShouldMatch("80%")).get();
assertHitCount(searchResponse, 3L);
assertSearchHits(searchResponse, "1", "2", "6");
}
private void assertHits(SearchHits hits, String... ids) {
assertThat(hits.totalHits(), equalTo((long) ids.length));
Set<String> hitIds = new HashSet<>();

View File

@ -7,8 +7,8 @@
# The primary way of configuring a node is via this file. This template lists
# the most important settings you may want to configure for a production cluster.
#
# Please see the documentation for further information on configuration options:
# <https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html>
# Please consult the documentation for further information on configuration options:
# https://www.elastic.co/guide/en/elasticsearch/reference/index.html
#
# ---------------------------------- Cluster -----------------------------------
#
@ -58,8 +58,7 @@
#
#http.port: 9200
#
# For more information, see the documentation at:
# <http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-network.html>
# For more information, consult the network module documentation.
#
# --------------------------------- Discovery ----------------------------------
#
@ -68,12 +67,11 @@
#
#discovery.zen.ping.unicast.hosts: ["host1", "host2"]
#
# Prevent the "split brain" by configuring the majority of nodes (total number of nodes / 2 + 1):
# Prevent the "split brain" by configuring the majority of nodes (total number of master-eligible nodes / 2 + 1):
#
#discovery.zen.minimum_master_nodes: 3
#
# For more information, see the documentation at:
# <http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-discovery.html>
# For more information, consult the zen discovery module documentation.
#
# ---------------------------------- Gateway -----------------------------------
#
@ -81,8 +79,7 @@
#
#gateway.recover_after_nodes: 3
#
# For more information, see the documentation at:
# <http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-gateway.html>
# For more information, consult the gateway module documentation.
#
# ---------------------------------- Various -----------------------------------
#

View File

@ -19,15 +19,6 @@ only. If you want to apply synonyms during indexing please use the
standard <<analysis-synonym-tokenfilter,synonym token filter>>.
===============================
["NOTE",id="synonym-graph-query-note"]
===============================
The graph token stream created by this token filter requires special
query handling. Currently only the <<query-dsl-match-query, Match>> and
<<query-dsl-multi-match-query, Multi Match>> queries can do this. Using
it with any other type of analyzed query will potentially result in
incorrect search results.
===============================
Synonyms are configured using a configuration file.
Here is an example:

View File

@ -39,8 +39,8 @@ Unicast discovery provides the following settings with the `discovery.zen.ping.u
|=======================================================================
|Setting |Description
|`hosts` |Either an array setting or a comma delimited setting. Each
value should be in the form of `host:port` or `host` (where `port` defaults to `9300`). Note that IPv6 hosts must be
bracketed. Defaults to `127.0.0.1, [::1]`
value should be in the form of `host:port` or `host` (where `port` defaults to the setting `transport.profiles.default.port`
falling back to `transport.tcp.port` if not set). Note that IPv6 hosts must be bracketed. Defaults to `127.0.0.1, [::1]`
|`hosts.resolve_timeout` |The amount of time to wait for DNS lookups on each round of pinging. Specified as
<<time-units, time units>>. Defaults to 5s.
|=======================================================================

View File

@ -150,7 +150,7 @@ discovery.zen.ping.unicast.hosts:
- 192.168.1.11 <1>
- seeds.mydomain.com <2>
--------------------------------------------------
<1> The port will default to 9300 if not specified.
<1> The port will default to `transport.profiles.default.port` and fallback to `transport.tcp.port` if not specified.
<2> A hostname that resolves to multiple IP addresses will try all resolved addresses.
[float]

View File

@ -41,7 +41,7 @@ public final class CompilerSettings {
* Constant to be used for enabling additional internal compilation checks (slower).
*/
public static final String PICKY = "picky";
/**
* For testing: do not use.
*/
@ -49,15 +49,17 @@ public final class CompilerSettings {
/**
* The maximum number of statements allowed to be run in a loop.
* For now the number is set fairly high to accommodate users
* doing large update queries.
*/
private int maxLoopCounter = 10000;
private int maxLoopCounter = 1000000;
/**
* Whether to throw exception on ambiguity or other internal parsing issues. This option
* makes things slower too, it is only for debugging.
*/
private boolean picky = false;
/**
* For testing. Do not use.
*/
@ -102,7 +104,7 @@ public final class CompilerSettings {
public void setPicky(boolean picky) {
this.picky = picky;
}
/**
* Returns initial call site depth. This means we pretend we've already seen N different types,
* to better exercise fallback code in tests.
@ -110,7 +112,7 @@ public final class CompilerSettings {
public int getInitialCallSiteDepth() {
return initialCallSiteDepth;
}
/**
* For testing megamorphic fallbacks. Do not use.
* @see #getInitialCallSiteDepth()

View File

@ -148,10 +148,10 @@ public class WhenThingsGoWrongTests extends ScriptTestCase {
public void testLoopLimits() {
// right below limit: ok
exec("for (int x = 0; x < 9999; ++x) {}");
exec("for (int x = 0; x < 999999; ++x) {}");
PainlessError expected = expectScriptThrows(PainlessError.class, () -> {
exec("for (int x = 0; x < 10000; ++x) {}");
exec("for (int x = 0; x < 1000000; ++x) {}");
});
assertTrue(expected.getMessage().contains(
"The maximum number of statements that can be executed in a loop has been reached."));

View File

@ -36,6 +36,7 @@ import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.TimeValue;
@ -176,9 +177,15 @@ public class RemoteScrollableHitSource extends ScrollableHitSource {
try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY,
content)) {
parsedResponse = parser.apply(xContentParser, () -> ParseFieldMatcher.STRICT);
} catch (ParsingException e) {
/* Because we're streaming the response we can't get a copy of it here. The best we can do is hint that it
* is totally wrong and we're probably not talking to Elasticsearch. */
throw new ElasticsearchException(
"Error parsing the response, remote is likely not an Elasticsearch instance", e);
}
} catch (IOException e) {
throw new ElasticsearchException("Error deserializing response", e);
throw new ElasticsearchException("Error deserializing response, remote is likely not an Elasticsearch instance",
e);
}
listener.accept(parsedResponse);
}

View File

@ -463,6 +463,17 @@ public class RemoteScrollableHitSourceTests extends ESTestCase {
assertThat(e.getCause().getCause().getMessage(), containsString("Response didn't include Content-Type: body={"));
}
public void testInvalidJsonThinksRemoveIsNotES() throws IOException {
Exception e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall("some_text.txt").doStart(null));
assertEquals("Error parsing the response, remote is likely not an Elasticsearch instance", e.getCause().getCause().getMessage());
}
public void testUnexpectedJsonThinksRemoveIsNotES() throws IOException {
// Use the response from a main action instead of a proper start response to generate a parse error
Exception e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall("main/2_3_3.json").doStart(null));
assertEquals("Error parsing the response, remote is likely not an Elasticsearch instance", e.getCause().getCause().getMessage());
}
private RemoteScrollableHitSource sourceWithMockedRemoteCall(String... paths) throws Exception {
return sourceWithMockedRemoteCall(true, ContentType.APPLICATION_JSON, paths);
}

View File

@ -0,0 +1 @@
I'm just text!

View File

@ -23,6 +23,8 @@ import com.amazonaws.ClientConfiguration;
import com.amazonaws.Protocol;
import com.amazonaws.services.s3.AmazonS3;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.settings.SecureSetting;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
@ -39,13 +41,12 @@ public interface AwsS3Service extends LifecycleComponent {
/**
* cloud.aws.access_key: AWS Access key. Shared with discovery-ec2 plugin
*/
Setting<String> KEY_SETTING =
Setting.simpleString("cloud.aws.access_key", Property.NodeScope, Property.Filtered, Property.Shared);
SecureSetting<SecureString> KEY_SETTING = SecureSetting.secureString("cloud.aws.access_key", null, true, Property.Shared);
/**
* cloud.aws.secret_key: AWS Secret key. Shared with discovery-ec2 plugin
*/
Setting<String> SECRET_SETTING =
Setting.simpleString("cloud.aws.secret_key", Property.NodeScope, Property.Filtered, Property.Shared);
SecureSetting<SecureString> SECRET_SETTING = SecureSetting.secureString("cloud.aws.secret_key", null, true, Property.Shared);
/**
* cloud.aws.protocol: Protocol for AWS API: http or https. Defaults to https. Shared with discovery-ec2 plugin
*/
@ -63,12 +64,14 @@ public interface AwsS3Service extends LifecycleComponent {
/**
* cloud.aws.proxy.username: In case of proxy with auth, define the username. Shared with discovery-ec2 plugin
*/
Setting<String> PROXY_USERNAME_SETTING = Setting.simpleString("cloud.aws.proxy.username", Property.NodeScope, Property.Shared);
SecureSetting<SecureString> PROXY_USERNAME_SETTING =
SecureSetting.secureString("cloud.aws.proxy.username", null, true, Property.Shared);
/**
* cloud.aws.proxy.password: In case of proxy with auth, define the password. Shared with discovery-ec2 plugin
*/
Setting<String> PROXY_PASSWORD_SETTING =
Setting.simpleString("cloud.aws.proxy.password", Property.NodeScope, Property.Filtered, Property.Shared);
SecureSetting<SecureString> PROXY_PASSWORD_SETTING =
SecureSetting.secureString("cloud.aws.proxy.password", null, true, Property.Shared);
/**
* cloud.aws.signer: If you are using an old AWS API version, you can define a Signer. Shared with discovery-ec2 plugin
*/
@ -92,16 +95,13 @@ public interface AwsS3Service extends LifecycleComponent {
* cloud.aws.s3.access_key: AWS Access key specific for S3 API calls. Defaults to cloud.aws.access_key.
* @see AwsS3Service#KEY_SETTING
*/
Setting<String> KEY_SETTING =
new Setting<>("cloud.aws.s3.access_key", AwsS3Service.KEY_SETTING, Function.identity(),
Property.NodeScope, Property.Filtered);
SecureSetting<SecureString> KEY_SETTING = SecureSetting.secureString("cloud.aws.s3.access_key", AwsS3Service.KEY_SETTING, true);
/**
* cloud.aws.s3.secret_key: AWS Secret key specific for S3 API calls. Defaults to cloud.aws.secret_key.
* @see AwsS3Service#SECRET_SETTING
*/
Setting<String> SECRET_SETTING =
new Setting<>("cloud.aws.s3.secret_key", AwsS3Service.SECRET_SETTING, Function.identity(),
Property.NodeScope, Property.Filtered);
SecureSetting<SecureString> SECRET_SETTING = SecureSetting.secureString("cloud.aws.s3.secret_key",
AwsS3Service.SECRET_SETTING, true);
/**
* cloud.aws.s3.protocol: Protocol for AWS API specific for S3 API calls: http or https. Defaults to cloud.aws.protocol.
* @see AwsS3Service#PROTOCOL_SETTING
@ -128,17 +128,16 @@ public interface AwsS3Service extends LifecycleComponent {
* Defaults to cloud.aws.proxy.username.
* @see AwsS3Service#PROXY_USERNAME_SETTING
*/
Setting<String> PROXY_USERNAME_SETTING =
new Setting<>("cloud.aws.s3.proxy.username", AwsS3Service.PROXY_USERNAME_SETTING, Function.identity(),
Property.NodeScope);
SecureSetting<SecureString> PROXY_USERNAME_SETTING =
SecureSetting.secureString("cloud.aws.s3.proxy.username", AwsS3Service.PROXY_USERNAME_SETTING, true);
/**
* cloud.aws.s3.proxy.password: In case of proxy with auth, define the password specific for S3 API calls.
* Defaults to cloud.aws.proxy.password.
* @see AwsS3Service#PROXY_PASSWORD_SETTING
*/
Setting<String> PROXY_PASSWORD_SETTING =
new Setting<>("cloud.aws.s3.proxy.password", AwsS3Service.PROXY_PASSWORD_SETTING, Function.identity(),
Property.NodeScope, Property.Filtered);
SecureSetting<SecureString> PROXY_PASSWORD_SETTING =
SecureSetting.secureString("cloud.aws.s3.proxy.password", AwsS3Service.PROXY_PASSWORD_SETTING, true);
/**
* cloud.aws.s3.signer: If you are using an old AWS API version, you can define a Signer. Specific for S3 API calls.
* Defaults to cloud.aws.signer.

View File

@ -30,13 +30,17 @@ import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.S3ClientOptions;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.repositories.s3.S3Repository;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.HashMap;
import java.util.Map;
@ -93,14 +97,15 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent implements
String proxyHost = CLOUD_S3.PROXY_HOST_SETTING.get(settings);
if (Strings.hasText(proxyHost)) {
Integer proxyPort = CLOUD_S3.PROXY_PORT_SETTING.get(settings);
String proxyUsername = CLOUD_S3.PROXY_USERNAME_SETTING.get(settings);
String proxyPassword = CLOUD_S3.PROXY_PASSWORD_SETTING.get(settings);
try (SecureString proxyUsername = CLOUD_S3.PROXY_USERNAME_SETTING.get(settings);
SecureString proxyPassword = CLOUD_S3.PROXY_PASSWORD_SETTING.get(settings)) {
clientConfiguration
.withProxyHost(proxyHost)
.withProxyPort(proxyPort)
.withProxyUsername(proxyUsername)
.withProxyPassword(proxyPassword);
clientConfiguration
.withProxyHost(proxyHost)
.withProxyPort(proxyPort)
.withProxyUsername(proxyUsername.toString())
.withProxyPassword(proxyPassword.toString());
}
}
if (maxRetries != null) {
@ -123,17 +128,19 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent implements
public static AWSCredentialsProvider buildCredentials(Logger logger, Settings settings, Settings repositorySettings) {
AWSCredentialsProvider credentials;
String key = getValue(repositorySettings, settings,
S3Repository.Repository.KEY_SETTING, S3Repository.Repositories.KEY_SETTING);
String secret = getValue(repositorySettings, settings,
S3Repository.Repository.SECRET_SETTING, S3Repository.Repositories.SECRET_SETTING);
try (SecureString key = getValue(repositorySettings, settings,
S3Repository.Repository.KEY_SETTING, S3Repository.Repositories.KEY_SETTING);
SecureString secret = getValue(repositorySettings, settings,
S3Repository.Repository.SECRET_SETTING, S3Repository.Repositories.SECRET_SETTING)) {
if (key.isEmpty() && secret.isEmpty()) {
logger.debug("Using either environment variables, system properties or instance profile credentials");
credentials = new DefaultAWSCredentialsProviderChain();
} else {
logger.debug("Using basic key/secret credentials");
credentials = new StaticCredentialsProvider(new BasicAWSCredentials(key, secret));
if (key.length() == 0 && secret.length() == 0) {
// TODO: add deprecation, except for using instance profile
logger.debug("Using either environment variables, system properties or instance profile credentials");
credentials = new DefaultAWSCredentialsProviderChain();
} else {
logger.debug("Using basic key/secret credentials");
credentials = new StaticCredentialsProvider(new BasicAWSCredentials(key.toString(), secret.toString()));
}
}
return credentials;

View File

@ -28,6 +28,8 @@ import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.settings.SecureSetting;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
@ -67,14 +69,14 @@ public class S3Repository extends BlobStoreRepository {
* repositories.s3.access_key: AWS Access key specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.access_key.
* @see CLOUD_S3#KEY_SETTING
*/
Setting<String> KEY_SETTING =
new Setting<>("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, Function.identity(), Property.NodeScope, Property.Filtered);
SecureSetting<SecureString> KEY_SETTING = SecureSetting.secureString("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, true);
/**
* repositories.s3.secret_key: AWS Secret key specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.secret_key.
* @see CLOUD_S3#SECRET_SETTING
*/
Setting<String> SECRET_SETTING =
new Setting<>("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, Function.identity(), Property.NodeScope, Property.Filtered);
SecureSetting<SecureString> SECRET_SETTING = SecureSetting.secureString("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, true);
/**
* repositories.s3.region: Region specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.region.
* @see CLOUD_S3#REGION_SETTING
@ -179,12 +181,13 @@ public class S3Repository extends BlobStoreRepository {
* access_key
* @see Repositories#KEY_SETTING
*/
Setting<String> KEY_SETTING = Setting.simpleString("access_key");
SecureSetting<SecureString> KEY_SETTING = SecureSetting.secureString("access_key", null, true);
/**
* secret_key
* @see Repositories#SECRET_SETTING
*/
Setting<String> SECRET_SETTING = Setting.simpleString("secret_key");
SecureSetting<SecureString> SECRET_SETTING = SecureSetting.secureString("secret_key", null, true);
/**
* bucket
* @see Repositories#BUCKET_SETTING

View File

@ -24,6 +24,7 @@ import com.amazonaws.Protocol;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import org.elasticsearch.common.settings.MockSecureSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.repositories.s3.S3Repository;
import org.elasticsearch.test.ESTestCase;
@ -41,23 +42,123 @@ public class AwsS3ServiceImplTests extends ESTestCase {
public void testAWSCredentialsWithElasticsearchAwsSettings() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.KEY_SETTING.getKey(), "aws_key")
.put(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret")
.build();
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(AwsS3Service.KEY_SETTING.getKey(), "aws_key");
secureSettings.setString(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "aws_key", "aws_secret");
}
public void testAWSCredentialsWithElasticsearchS3Settings() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key")
.put(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret")
.build();
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key");
secureSettings.setString(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "s3_key", "s3_secret");
}
public void testAWSCredentialsWithElasticsearchAwsAndS3Settings() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(AwsS3Service.KEY_SETTING.getKey(), "aws_key");
secureSettings.setString(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret");
secureSettings.setString(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key");
secureSettings.setString(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "s3_key", "s3_secret");
}
public void testAWSCredentialsWithElasticsearchRepositoriesSettings() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key");
secureSettings.setString(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repositories_key", "repositories_secret");
}
public void testAWSCredentialsWithElasticsearchAwsAndRepositoriesSettings() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(AwsS3Service.KEY_SETTING.getKey(), "aws_key");
secureSettings.setString(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret");
secureSettings.setString(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key");
secureSettings.setString(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repositories_key", "repositories_secret");
}
public void testAWSCredentialsWithElasticsearchAwsAndS3AndRepositoriesSettings() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(AwsS3Service.KEY_SETTING.getKey(), "aws_key");
secureSettings.setString(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret");
secureSettings.setString(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key");
secureSettings.setString(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret");
secureSettings.setString(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key");
secureSettings.setString(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repositories_key", "repositories_secret");
}
public void testAWSCredentialsWithElasticsearchRepositoriesSettingsAndRepositorySettings() {
Settings repositorySettings = generateSecureRepositorySettings("repository_key", "repository_secret", "eu-central", null, null);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key");
secureSettings.setString(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repository_key", "repository_secret");
}
public void testAWSCredentialsWithElasticsearchAwsAndRepositoriesSettingsAndRepositorySettings() {
Settings repositorySettings = generateSecureRepositorySettings("repository_key", "repository_secret", "eu-central", null, null);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(AwsS3Service.KEY_SETTING.getKey(), "aws_key");
secureSettings.setString(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret");
secureSettings.setString(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key");
secureSettings.setString(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repository_key", "repository_secret");
}
public void testAWSCredentialsWithElasticsearchAwsAndS3AndRepositoriesSettingsAndRepositorySettings() {
Settings repositorySettings = generateSecureRepositorySettings("repository_key", "repository_secret", "eu-central", null, null);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(AwsS3Service.KEY_SETTING.getKey(), "aws_key");
secureSettings.setString(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret");
secureSettings.setString(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key");
secureSettings.setString(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret");
secureSettings.setString(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key");
secureSettings.setString(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret");
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repository_key", "repository_secret");
}
public void testAWSCredentialsWithElasticsearchAwsSettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.KEY_SETTING.getKey(), "aws_key")
.put(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret")
.build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "aws_key", "aws_secret");
assertWarnings("[" + AwsS3Service.KEY_SETTING.getKey() + "] setting was deprecated",
"[" + AwsS3Service.SECRET_SETTING.getKey() + "] setting was deprecated");
}
public void testAWSCredentialsWithElasticsearchS3SettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key")
.put(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret")
.build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "s3_key", "s3_secret");
assertWarnings("[" + AwsS3Service.CLOUD_S3.KEY_SETTING.getKey() + "] setting was deprecated",
"[" + AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey() + "] setting was deprecated");
}
public void testAWSCredentialsWithElasticsearchAwsAndS3SettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.KEY_SETTING.getKey(), "aws_key")
@ -66,18 +167,22 @@ public class AwsS3ServiceImplTests extends ESTestCase {
.put(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret")
.build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "s3_key", "s3_secret");
assertWarnings("[" + AwsS3Service.CLOUD_S3.KEY_SETTING.getKey() + "] setting was deprecated",
"[" + AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey() + "] setting was deprecated");
}
public void testAWSCredentialsWithElasticsearchRepositoriesSettings() {
public void testAWSCredentialsWithElasticsearchRepositoriesSettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key")
.put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret")
.build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repositories_key", "repositories_secret");
assertWarnings("[" + S3Repository.Repositories.KEY_SETTING.getKey() + "] setting was deprecated",
"[" + S3Repository.Repositories.SECRET_SETTING.getKey() + "] setting was deprecated");
}
public void testAWSCredentialsWithElasticsearchAwsAndRepositoriesSettings() {
public void testAWSCredentialsWithElasticsearchAwsAndRepositoriesSettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.KEY_SETTING.getKey(), "aws_key")
@ -86,9 +191,11 @@ public class AwsS3ServiceImplTests extends ESTestCase {
.put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret")
.build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repositories_key", "repositories_secret");
assertWarnings("[" + S3Repository.Repositories.KEY_SETTING.getKey() + "] setting was deprecated",
"[" + S3Repository.Repositories.SECRET_SETTING.getKey() + "] setting was deprecated");
}
public void testAWSCredentialsWithElasticsearchAwsAndS3AndRepositoriesSettings() {
public void testAWSCredentialsWithElasticsearchAwsAndS3AndRepositoriesSettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.KEY_SETTING.getKey(), "aws_key")
@ -99,18 +206,22 @@ public class AwsS3ServiceImplTests extends ESTestCase {
.put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret")
.build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repositories_key", "repositories_secret");
assertWarnings("[" + S3Repository.Repositories.KEY_SETTING.getKey() + "] setting was deprecated",
"[" + S3Repository.Repositories.SECRET_SETTING.getKey() + "] setting was deprecated");
}
public void testAWSCredentialsWithElasticsearchRepositoriesSettingsAndRepositorySettings() {
public void testAWSCredentialsWithElasticsearchRepositoriesSettingsAndRepositorySettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null);
Settings settings = Settings.builder()
.put(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key")
.put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret")
.build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repository_key", "repository_secret");
assertWarnings("[" + S3Repository.Repository.KEY_SETTING.getKey() + "] setting was deprecated",
"[" + S3Repository.Repository.SECRET_SETTING.getKey() + "] setting was deprecated");
}
public void testAWSCredentialsWithElasticsearchAwsAndRepositoriesSettingsAndRepositorySettings() {
public void testAWSCredentialsWithElasticsearchAwsAndRepositoriesSettingsAndRepositorySettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.KEY_SETTING.getKey(), "aws_key")
@ -119,9 +230,11 @@ public class AwsS3ServiceImplTests extends ESTestCase {
.put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret")
.build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repository_key", "repository_secret");
assertWarnings("[" + S3Repository.Repository.KEY_SETTING.getKey() + "] setting was deprecated",
"[" + S3Repository.Repository.SECRET_SETTING.getKey() + "] setting was deprecated");
}
public void testAWSCredentialsWithElasticsearchAwsAndS3AndRepositoriesSettingsAndRepositorySettings() {
public void testAWSCredentialsWithElasticsearchAwsAndS3AndRepositoriesSettingsAndRepositorySettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.KEY_SETTING.getKey(), "aws_key")
@ -132,6 +245,8 @@ public class AwsS3ServiceImplTests extends ESTestCase {
.put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret")
.build();
launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repository_key", "repository_secret");
assertWarnings("[" + S3Repository.Repository.KEY_SETTING.getKey() + "] setting was deprecated",
"[" + S3Repository.Repository.SECRET_SETTING.getKey() + "] setting was deprecated");
}
protected void launchAWSCredentialsWithElasticsearchSettingsTest(Settings singleRepositorySettings, Settings settings,
@ -148,6 +263,46 @@ public class AwsS3ServiceImplTests extends ESTestCase {
}
public void testAWSConfigurationWithAwsSettings() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(AwsS3Service.PROXY_USERNAME_SETTING.getKey(), "aws_proxy_username");
secureSettings.setString(AwsS3Service.PROXY_PASSWORD_SETTING.getKey(), "aws_proxy_password");
Settings settings = Settings.builder()
.setSecureSettings(secureSettings)
.put(AwsS3Service.PROTOCOL_SETTING.getKey(), "http")
.put(AwsS3Service.PROXY_HOST_SETTING.getKey(), "aws_proxy_host")
.put(AwsS3Service.PROXY_PORT_SETTING.getKey(), 8080)
.put(AwsS3Service.SIGNER_SETTING.getKey(), "AWS3SignerType")
.put(AwsS3Service.READ_TIMEOUT.getKey(), "10s")
.build();
launchAWSConfigurationTest(settings, repositorySettings, Protocol.HTTP, "aws_proxy_host", 8080, "aws_proxy_username",
"aws_proxy_password", "AWS3SignerType", 3, false, 10000);
}
public void testAWSConfigurationWithAwsAndS3Settings() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(AwsS3Service.PROXY_USERNAME_SETTING.getKey(), "aws_proxy_username");
secureSettings.setString(AwsS3Service.PROXY_PASSWORD_SETTING.getKey(), "aws_proxy_password");
secureSettings.setString(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.getKey(), "s3_proxy_username");
secureSettings.setString(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.getKey(), "s3_proxy_password");
Settings settings = Settings.builder()
.setSecureSettings(secureSettings)
.put(AwsS3Service.PROTOCOL_SETTING.getKey(), "http")
.put(AwsS3Service.PROXY_HOST_SETTING.getKey(), "aws_proxy_host")
.put(AwsS3Service.PROXY_PORT_SETTING.getKey(), 8080)
.put(AwsS3Service.SIGNER_SETTING.getKey(), "AWS3SignerType")
.put(AwsS3Service.CLOUD_S3.PROTOCOL_SETTING.getKey(), "https")
.put(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.getKey(), "s3_proxy_host")
.put(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.getKey(), 8081)
.put(AwsS3Service.CLOUD_S3.SIGNER_SETTING.getKey(), "NoOpSignerType")
.put(AwsS3Service.CLOUD_S3.READ_TIMEOUT.getKey(), "10s")
.build();
launchAWSConfigurationTest(settings, repositorySettings, Protocol.HTTPS, "s3_proxy_host", 8081, "s3_proxy_username",
"s3_proxy_password", "NoOpSignerType", 3, false, 10000);
}
public void testAWSConfigurationWithAwsSettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.PROTOCOL_SETTING.getKey(), "http")
@ -160,9 +315,11 @@ public class AwsS3ServiceImplTests extends ESTestCase {
.build();
launchAWSConfigurationTest(settings, repositorySettings, Protocol.HTTP, "aws_proxy_host", 8080, "aws_proxy_username",
"aws_proxy_password", "AWS3SignerType", 3, false, 10000);
assertWarnings("[" + AwsS3Service.PROXY_USERNAME_SETTING.getKey() + "] setting was deprecated",
"[" + AwsS3Service.PROXY_PASSWORD_SETTING.getKey() + "] setting was deprecated");
}
public void testAWSConfigurationWithAwsAndS3Settings() {
public void testAWSConfigurationWithAwsAndS3SettingsBackcompat() {
Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null);
Settings settings = Settings.builder()
.put(AwsS3Service.PROTOCOL_SETTING.getKey(), "http")
@ -181,6 +338,8 @@ public class AwsS3ServiceImplTests extends ESTestCase {
.build();
launchAWSConfigurationTest(settings, repositorySettings, Protocol.HTTPS, "s3_proxy_host", 8081, "s3_proxy_username",
"s3_proxy_password", "NoOpSignerType", 3, false, 10000);
assertWarnings("[" + AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.getKey() + "] setting was deprecated",
"[" + AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.getKey() + "] setting was deprecated");
}
protected void launchAWSConfigurationTest(Settings settings,
@ -236,6 +395,15 @@ public class AwsS3ServiceImplTests extends ESTestCase {
return builder.build();
}
private static Settings generateSecureRepositorySettings(String key, String secret, String region, String endpoint,
Integer maxRetries) {
Settings settings = generateRepositorySettings(null, null, region, endpoint, maxRetries);
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(S3Repository.Repository.KEY_SETTING.getKey(), key);
secureSettings.setString(S3Repository.Repository.SECRET_SETTING.getKey(), secret);
return Settings.builder().put(settings).setSecureSettings(secureSettings).build();
}
public void testDefaultEndpoint() {
launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, null, null), Settings.EMPTY, "");
launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null), Settings.EMPTY,

View File

@ -25,6 +25,8 @@ import com.amazonaws.services.s3.AmazonS3;
import org.elasticsearch.cloud.aws.AwsS3Service;
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.settings.MockSecureSettings;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
@ -67,13 +69,21 @@ public class S3RepositoryTests extends ESTestCase {
}
public void testSettingsResolution() throws Exception {
Settings localSettings = Settings.builder().put(Repository.KEY_SETTING.getKey(), "key1").build();
Settings globalSettings = Settings.builder().put(Repositories.KEY_SETTING.getKey(), "key2").build();
MockSecureSettings secureSettings1 = new MockSecureSettings();
secureSettings1.setString(Repository.KEY_SETTING.getKey(), "key1");
Settings localSettings = Settings.builder().setSecureSettings(secureSettings1).build();
MockSecureSettings secureSettings2 = new MockSecureSettings();
secureSettings2.setString(Repositories.KEY_SETTING.getKey(), "key2");
Settings globalSettings = Settings.builder().setSecureSettings(secureSettings2).build();
assertEquals("key1", getValue(localSettings, globalSettings, Repository.KEY_SETTING, Repositories.KEY_SETTING));
assertEquals("key1", getValue(localSettings, Settings.EMPTY, Repository.KEY_SETTING, Repositories.KEY_SETTING));
assertEquals("key2", getValue(Settings.EMPTY, globalSettings, Repository.KEY_SETTING, Repositories.KEY_SETTING));
assertEquals("", getValue(Settings.EMPTY, Settings.EMPTY, Repository.KEY_SETTING, Repositories.KEY_SETTING));
assertEquals(new SecureString("key1".toCharArray()),
getValue(localSettings, globalSettings, Repository.KEY_SETTING, Repositories.KEY_SETTING));
assertEquals(new SecureString("key1".toCharArray()),
getValue(localSettings, Settings.EMPTY, Repository.KEY_SETTING, Repositories.KEY_SETTING));
assertEquals(new SecureString("key2".toCharArray()),
getValue(Settings.EMPTY, globalSettings, Repository.KEY_SETTING, Repositories.KEY_SETTING));
assertEquals(new SecureString("".toCharArray()),
getValue(Settings.EMPTY, Settings.EMPTY, Repository.KEY_SETTING, Repositories.KEY_SETTING));
}
public void testInvalidChunkBufferSizeSettings() throws IOException {

View File

@ -1,7 +1,12 @@
# Integration tests for Repository S3 component
#
"S3 repository can be registered":
- skip:
features: warnings
- do:
warnings:
- "[access_key] setting was deprecated in Elasticsearch and it will be removed in a future release! See the breaking changes lists in the documentation for details"
- "[secret_key] setting was deprecated in Elasticsearch and it will be removed in a future release! See the breaking changes lists in the documentation for details"
snapshot.create_repository:
repository: test_repo_s3_1
verify: false

View File

@ -26,6 +26,13 @@
- '{"index": {"_index": "test_index", "_type": "test_type"}}'
- '{"f1": "v5_mixed", "f2": 9}'
- do:
index:
index: test_index
type: test_type
id: d10
body: {"f1": "v6_mixed", "f2": 10}
- do:
indices.flush:
index: test_index
@ -34,7 +41,23 @@
search:
index: test_index
- match: { hits.total: 10 } # 5 docs from old cluster, 5 docs from mixed cluster
- match: { hits.total: 11 } # 5 docs from old cluster, 6 docs from mixed cluster
- do:
delete:
index: test_index
type: test_type
id: d10
- do:
indices.flush:
index: test_index
- do:
search:
index: test_index
- match: { hits.total: 10 }
---
"Verify custom cluster metadata still exists during upgrade":

View File

@ -0,0 +1,54 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.settings;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* A mock implementation of secure settings for tests to use.
*/
public class MockSecureSettings implements SecureSettings {
private Map<String, SecureString> secureStrings = new HashMap<>();
@Override
public boolean isLoaded() {
return true;
}
@Override
public boolean hasSetting(String setting) {
return secureStrings.containsKey(setting);
}
@Override
public SecureString getString(String setting) {
return secureStrings.get(setting);
}
public void setString(String setting, String value) {
secureStrings.put(setting, new SecureString(value.toCharArray()));
}
@Override
public void close() throws IOException {}
}

View File

@ -130,6 +130,7 @@ import java.util.stream.Collectors;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
@ -308,7 +309,7 @@ public abstract class ESTestCase extends LuceneTestCase {
+ Arrays.asList(expectedWarnings) + "\nActual: " + actualWarnings,
expectedWarnings.length, actualWarnings.size());
for (String msg : expectedWarnings) {
assertThat(actualWarnings, hasItem(equalTo(msg)));
assertThat(actualWarnings, hasItem(containsString(msg)));
}
} finally {
resetDeprecationLogger();

View File

@ -400,23 +400,14 @@ public class MockTcpTransport extends TcpTransport<MockTcpTransport.MockChannel>
@Override
protected void stopInternal() {
ThreadPool.terminate(executor, 10, TimeUnit.SECONDS);
synchronized (openChannels) {
assert openChannels.isEmpty() : "there are still open channels: " + openChannels;
}
}
@Override
protected Version getCurrentVersion() {
return mockVersion;
}
@Override
protected void doClose() {
if (Thread.currentThread().isInterrupted() == false) {
// TCPTransport might be interrupted due to a timeout waiting for connections to be closed.
// in this case the thread is interrupted and we can't tell if we really missed something or if we are
// still closing connections. in such a case we don't assert the open channels
synchronized (openChannels) {
assert openChannels.isEmpty() : "there are still open channels: " + openChannels;
}
}
}
}