mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-09 14:34:43 +00:00
Merge remote-tracking branch 'origin/master' into json_strict_duplicate_checks
Original commit: elastic/x-pack-elasticsearch@a010b1b4e9
This commit is contained in:
commit
8be3250601
@ -52,6 +52,7 @@ import org.elasticsearch.xpack.action.XPackInfoAction;
|
||||
import org.elasticsearch.xpack.action.XPackUsageAction;
|
||||
import org.elasticsearch.xpack.common.http.HttpClient;
|
||||
import org.elasticsearch.xpack.common.http.HttpRequestTemplate;
|
||||
import org.elasticsearch.xpack.common.http.HttpSettings;
|
||||
import org.elasticsearch.xpack.common.http.auth.HttpAuthFactory;
|
||||
import org.elasticsearch.xpack.common.http.auth.HttpAuthRegistry;
|
||||
import org.elasticsearch.xpack.common.http.auth.basic.BasicAuth;
|
||||
@ -316,7 +317,7 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
ArrayList<Setting<?>> settings = new ArrayList<>();
|
||||
settings.addAll(Security.getSettings(transportClientMode));
|
||||
settings.addAll(Security.getSettings(transportClientMode, extensionsService));
|
||||
settings.addAll(MonitoringSettings.getSettings());
|
||||
settings.addAll(watcher.getSettings());
|
||||
settings.addAll(licensing.getSettings());
|
||||
@ -335,10 +336,7 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I
|
||||
settings.add(ReportingAttachmentParser.INTERVAL_SETTING);
|
||||
|
||||
// http settings
|
||||
settings.add(Setting.simpleString("xpack.http.default_read_timeout", Setting.Property.NodeScope));
|
||||
settings.add(Setting.simpleString("xpack.http.default_connection_timeout", Setting.Property.NodeScope));
|
||||
settings.add(Setting.groupSetting("xpack.http.ssl.", Setting.Property.NodeScope));
|
||||
settings.add(Setting.groupSetting("xpack.http.proxy.", Setting.Property.NodeScope));
|
||||
settings.addAll(HttpSettings.getSettings());
|
||||
return settings;
|
||||
}
|
||||
|
||||
|
@ -5,24 +5,19 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack;
|
||||
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.xpack.ssl.SSLClientAuth;
|
||||
import org.elasticsearch.xpack.ssl.SSLConfigurationSettings;
|
||||
import org.elasticsearch.xpack.ssl.VerificationMode;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.ssl.SSLClientAuth;
|
||||
import org.elasticsearch.xpack.ssl.VerificationMode;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
/**
|
||||
* A container for xpack setting constants.
|
||||
*/
|
||||
@ -74,170 +69,23 @@ public class XPackSettings {
|
||||
public static final VerificationMode VERIFICATION_MODE_DEFAULT = VerificationMode.FULL;
|
||||
|
||||
// global settings that apply to everything!
|
||||
private static final Setting<List<String>> CIPHERS_SETTING = Setting.listSetting("xpack.ssl.cipher_suites", DEFAULT_CIPHERS,
|
||||
Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<List<String>> SUPPORTED_PROTOCOLS_SETTING = Setting.listSetting("xpack.ssl.supported_protocols",
|
||||
DEFAULT_SUPPORTED_PROTOCOLS, Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<SSLClientAuth> CLIENT_AUTH_SETTING = new Setting<>("xpack.ssl.client_authentication",
|
||||
CLIENT_AUTH_DEFAULT.name(), SSLClientAuth::parse, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<VerificationMode> VERIFICATION_MODE_SETTING = new Setting<>("xpack.ssl.verification_mode",
|
||||
VERIFICATION_MODE_DEFAULT.name(), VerificationMode::parse, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> KEYSTORE_PATH_SETTING = new Setting<>("xpack.ssl.keystore.path",
|
||||
s -> System.getProperty("javax.net.ssl.keyStore"), Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> KEYSTORE_PASSWORD_SETTING = new Setting<>("xpack.ssl.keystore.password",
|
||||
s -> System.getProperty("javax.net.ssl.keyStorePassword"), Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<String> KEYSTORE_ALGORITHM_SETTING = new Setting<>("xpack.ssl.keystore.algorithm",
|
||||
s -> System.getProperty("ssl.KeyManagerFactory.algorithm", KeyManagerFactory.getDefaultAlgorithm()),
|
||||
Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> KEYSTORE_KEY_PASSWORD_SETTING =
|
||||
new Setting<>("xpack.ssl.keystore.key_password", KEYSTORE_PASSWORD_SETTING, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRUSTSTORE_PATH_SETTING = new Setting<>("xpack.ssl.truststore.path",
|
||||
s -> System.getProperty("javax.net.ssl.trustStore"), Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRUSTSTORE_PASSWORD_SETTING = new Setting<>("xpack.ssl.truststore.password",
|
||||
s -> System.getProperty("javax.net.ssl.trustStorePassword"), Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<String> TRUSTSTORE_ALGORITHM_SETTING = new Setting<>("xpack.ssl.truststore.algorithm",
|
||||
s -> System.getProperty("ssl.TrustManagerFactory.algorithm", TrustManagerFactory.getDefaultAlgorithm()),
|
||||
Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> KEY_PATH_SETTING =
|
||||
new Setting<>("xpack.ssl.key", (String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> KEY_PASSWORD_SETTING =
|
||||
new Setting<>("xpack.ssl.key_passphrase", (String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> CERT_SETTING =
|
||||
new Setting<>("xpack.ssl.certificate", (String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<List<String>> CA_PATHS_SETTING = Setting.listSetting("xpack.ssl.certificate_authorities",
|
||||
Collections.emptyList(), s -> s, Property.NodeScope, Property.Filtered);
|
||||
public static final String GLOBAL_SSL_PREFIX = "xpack.ssl.";
|
||||
private static final SSLConfigurationSettings GLOBAL_SSL = SSLConfigurationSettings.withPrefix(GLOBAL_SSL_PREFIX);
|
||||
|
||||
// http specific settings
|
||||
private static final Setting<List<String>> HTTP_CIPHERS_SETTING = Setting.listSetting("xpack.security.http.ssl.cipher_suites",
|
||||
DEFAULT_CIPHERS, Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<List<String>> HTTP_SUPPORTED_PROTOCOLS_SETTING =
|
||||
Setting.listSetting("xpack.security.http.ssl.supported_protocols", emptyList(), Function.identity(),
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<SSLClientAuth> HTTP_CLIENT_AUTH_SETTING = new Setting<>("xpack.security.http.ssl.client_authentication",
|
||||
CLIENT_AUTH_DEFAULT.name(), SSLClientAuth::parse, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<VerificationMode> HTTP_VERIFICATION_MODE_SETTING =
|
||||
new Setting<>("xpack.security.http.ssl.verification_mode", VERIFICATION_MODE_DEFAULT.name(), VerificationMode::parse,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> HTTP_KEYSTORE_PATH_SETTING = new Setting<>("xpack.security.http.ssl.keystore.path",
|
||||
(String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> HTTP_KEYSTORE_PASSWORD_SETTING =
|
||||
new Setting<>("xpack.security.http.ssl.keystore.password", (String) null, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<String> HTTP_KEYSTORE_ALGORITHM_SETTING = new Setting<>("xpack.security.http.ssl.keystore.algorithm",
|
||||
"", Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> HTTP_KEYSTORE_KEY_PASSWORD_SETTING =
|
||||
new Setting<>("xpack.security.http.ssl.keystore.key_password", HTTP_KEYSTORE_PASSWORD_SETTING, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> HTTP_TRUSTSTORE_PATH_SETTING = new Setting<>("xpack.security.http.ssl.truststore.path",
|
||||
(String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> HTTP_TRUSTSTORE_PASSWORD_SETTING =
|
||||
new Setting<>("xpack.security.http.ssl.truststore.password", (String) null, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<String> HTTP_TRUSTSTORE_ALGORITHM_SETTING = new Setting<>("xpack.security.http.ssl.truststore.algorithm",
|
||||
"", Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> HTTP_KEY_PATH_SETTING =
|
||||
new Setting<>("xpack.security.http.ssl.key", (String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> HTTP_KEY_PASSWORD_SETTING = new Setting<>("xpack.security.http.ssl.key_passphrase",
|
||||
(String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> HTTP_CERT_SETTING = new Setting<>("xpack.security.http.ssl.certificate",
|
||||
(String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<List<String>> HTTP_CA_PATHS_SETTING =
|
||||
Setting.listSetting("xpack.security.http.ssl.certificate_authorities", emptyList(), s -> s,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
public static final String HTTP_SSL_PREFIX = Security.setting("http.ssl.");
|
||||
private static final SSLConfigurationSettings HTTP_SSL = SSLConfigurationSettings.withPrefix(HTTP_SSL_PREFIX);
|
||||
|
||||
// transport specific settings
|
||||
private static final Setting<List<String>> TRANSPORT_CIPHERS_SETTING =
|
||||
Setting.listSetting("xpack.security.transport.ssl.cipher_suites", DEFAULT_CIPHERS, Function.identity(),
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<List<String>> TRANSPORT_SUPPORTED_PROTOCOLS_SETTING =
|
||||
Setting.listSetting("xpack.security.transport.ssl.supported_protocols", emptyList(), Function.identity(),
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<SSLClientAuth> TRANSPORT_CLIENT_AUTH_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.client_authentication", CLIENT_AUTH_DEFAULT.name(), SSLClientAuth::parse,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<VerificationMode> TRANSPORT_VERIFICATION_MODE_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.verification_mode", VERIFICATION_MODE_DEFAULT.name(), VerificationMode::parse,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRANSPORT_KEYSTORE_PATH_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.keystore.path", (String) null, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRANSPORT_KEYSTORE_PASSWORD_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.keystore.password", (String) null, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<String> TRANSPORT_KEYSTORE_ALGORITHM_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.keystore.algorithm", "", Function.identity(), Property.NodeScope,
|
||||
Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRANSPORT_KEYSTORE_KEY_PASSWORD_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.keystore.key_password", TRANSPORT_KEYSTORE_PASSWORD_SETTING, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRANSPORT_TRUSTSTORE_PATH_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.truststore.path", (String) null, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRANSPORT_TRUSTSTORE_PASSWORD_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.truststore.password", (String) null, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<String> TRANSPORT_TRUSTSTORE_ALGORITHM_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.truststore.algorithm", "", Function.identity(),
|
||||
Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRANSPORT_KEY_PATH_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.key", (String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRANSPORT_KEY_PASSWORD_SETTING =
|
||||
new Setting<>("xpack.security.transport.ssl.key_passphrase", (String) null, Optional::ofNullable, Property.NodeScope,
|
||||
Property.Filtered);
|
||||
private static final Setting<Optional<String>> TRANSPORT_CERT_SETTING = new Setting<>("xpack.security.transport.ssl.certificate",
|
||||
(String) null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
private static final Setting<List<String>> TRANSPORT_CA_PATHS_SETTING =
|
||||
Setting.listSetting("xpack.security.transport.ssl.certificate_authorities", emptyList(), s -> s,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
public static final String TRANSPORT_SSL_PREFIX = Security.setting("transport.ssl.");
|
||||
private static final SSLConfigurationSettings TRANSPORT_SSL = SSLConfigurationSettings.withPrefix(TRANSPORT_SSL_PREFIX);
|
||||
|
||||
/* End SSL settings */
|
||||
|
||||
static {
|
||||
ALL_SETTINGS.add(CIPHERS_SETTING);
|
||||
ALL_SETTINGS.add(SUPPORTED_PROTOCOLS_SETTING);
|
||||
ALL_SETTINGS.add(KEYSTORE_PATH_SETTING);
|
||||
ALL_SETTINGS.add(KEYSTORE_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(KEYSTORE_ALGORITHM_SETTING);
|
||||
ALL_SETTINGS.add(KEYSTORE_KEY_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(KEY_PATH_SETTING);
|
||||
ALL_SETTINGS.add(KEY_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(CERT_SETTING);
|
||||
ALL_SETTINGS.add(TRUSTSTORE_PATH_SETTING);
|
||||
ALL_SETTINGS.add(TRUSTSTORE_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(TRUSTSTORE_ALGORITHM_SETTING);
|
||||
ALL_SETTINGS.add(CA_PATHS_SETTING);
|
||||
ALL_SETTINGS.add(VERIFICATION_MODE_SETTING);
|
||||
ALL_SETTINGS.add(CLIENT_AUTH_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_CIPHERS_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_SUPPORTED_PROTOCOLS_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_KEYSTORE_PATH_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_KEYSTORE_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_KEYSTORE_ALGORITHM_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_KEYSTORE_KEY_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_KEY_PATH_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_KEY_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_CERT_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_TRUSTSTORE_PATH_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_TRUSTSTORE_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_TRUSTSTORE_ALGORITHM_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_CA_PATHS_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_VERIFICATION_MODE_SETTING);
|
||||
ALL_SETTINGS.add(HTTP_CLIENT_AUTH_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_CIPHERS_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_SUPPORTED_PROTOCOLS_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_KEYSTORE_PATH_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_KEYSTORE_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_KEYSTORE_ALGORITHM_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_KEYSTORE_KEY_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_KEY_PATH_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_KEY_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_CERT_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_TRUSTSTORE_PATH_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_TRUSTSTORE_PASSWORD_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_TRUSTSTORE_ALGORITHM_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_CA_PATHS_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_VERIFICATION_MODE_SETTING);
|
||||
ALL_SETTINGS.add(TRANSPORT_CLIENT_AUTH_SETTING);
|
||||
ALL_SETTINGS.addAll(GLOBAL_SSL.getAllSettings());
|
||||
ALL_SETTINGS.addAll(HTTP_SSL.getAllSettings());
|
||||
ALL_SETTINGS.addAll(TRANSPORT_SSL.getAllSettings());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -39,12 +39,6 @@ import java.util.Map;
|
||||
*/
|
||||
public class HttpClient extends AbstractComponent {
|
||||
|
||||
static final String SETTINGS_SSL_PREFIX = "xpack.http.ssl.";
|
||||
static final String SETTINGS_PROXY_PREFIX = "xpack.http.proxy.";
|
||||
|
||||
static final String SETTINGS_PROXY_HOST = SETTINGS_PROXY_PREFIX + "host";
|
||||
static final String SETTINGS_PROXY_PORT = SETTINGS_PROXY_PREFIX + "port";
|
||||
|
||||
private final HttpAuthRegistry httpAuthRegistry;
|
||||
private final TimeValue defaultConnectionTimeout;
|
||||
private final TimeValue defaultReadTimeout;
|
||||
@ -55,21 +49,27 @@ public class HttpClient extends AbstractComponent {
|
||||
public HttpClient(Settings settings, HttpAuthRegistry httpAuthRegistry, SSLService sslService) {
|
||||
super(settings);
|
||||
this.httpAuthRegistry = httpAuthRegistry;
|
||||
this.defaultConnectionTimeout = settings.getAsTime("xpack.http.default_connection_timeout", TimeValue.timeValueSeconds(10));
|
||||
this.defaultReadTimeout = settings.getAsTime("xpack.http.default_read_timeout", TimeValue.timeValueSeconds(10));
|
||||
Integer proxyPort = settings.getAsInt(SETTINGS_PROXY_PORT, null);
|
||||
String proxyHost = settings.get(SETTINGS_PROXY_HOST, null);
|
||||
this.defaultConnectionTimeout = HttpSettings.CONNECTION_TIMEOUT.get(settings);
|
||||
this.defaultReadTimeout = HttpSettings.READ_TIMEOUT.get(settings);
|
||||
|
||||
final Integer proxyPort;
|
||||
if (HttpSettings.PROXY_HOST.exists(settings)) {
|
||||
proxyPort = HttpSettings.PROXY_PORT.get(settings);
|
||||
} else {
|
||||
proxyPort = null;
|
||||
}
|
||||
final String proxyHost = HttpSettings.PROXY_HOST.get(settings);
|
||||
if (proxyPort != null && Strings.hasText(proxyHost)) {
|
||||
this.proxy = new HttpProxy(proxyHost, proxyPort);
|
||||
logger.info("Using default proxy for http input and slack/hipchat/pagerduty/webhook actions [{}:{}]", proxyHost, proxyPort);
|
||||
} else if (proxyPort == null && Strings.hasText(proxyHost) == false) {
|
||||
this.proxy = HttpProxy.NO_PROXY;
|
||||
} else {
|
||||
throw new IllegalArgumentException("HTTP Proxy requires both settings: [" + SETTINGS_PROXY_HOST + "] and [" +
|
||||
SETTINGS_PROXY_PORT + "]");
|
||||
throw new IllegalArgumentException("HTTP Proxy requires both settings: [" + HttpSettings.PROXY_HOST_KEY + "] and [" +
|
||||
HttpSettings.PROXY_PORT_KEY + "]");
|
||||
}
|
||||
Settings sslSettings = settings.getByPrefix(SETTINGS_SSL_PREFIX);
|
||||
this.sslSocketFactory = sslService.sslSocketFactory(settings.getByPrefix(SETTINGS_SSL_PREFIX));
|
||||
Settings sslSettings = settings.getByPrefix(HttpSettings.SSL_KEY_PREFIX);
|
||||
this.sslSocketFactory = sslService.sslSocketFactory(settings.getByPrefix(HttpSettings.SSL_KEY_PREFIX));
|
||||
this.isHostnameVerificationEnabled = sslService.getVerificationMode(sslSettings, Settings.EMPTY).isHostnameVerificationEnabled();
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,49 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.common.http;
|
||||
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.ssl.SSLConfigurationSettings;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Handles the configuration and parsing of settings for the <code>xpack.http.</code> prefix
|
||||
*/
|
||||
public class HttpSettings {
|
||||
|
||||
private static final TimeValue DEFAULT_READ_TIMEOUT = TimeValue.timeValueSeconds(10);
|
||||
private static final TimeValue DEFAULT_CONNECTION_TIMEOUT = DEFAULT_READ_TIMEOUT;
|
||||
|
||||
static final Setting<TimeValue> READ_TIMEOUT = Setting.timeSetting("xpack.http.default_read_timeout",
|
||||
DEFAULT_READ_TIMEOUT, Setting.Property.NodeScope);
|
||||
static final Setting<TimeValue> CONNECTION_TIMEOUT = Setting.timeSetting("xpack.http.default_connection_timeout",
|
||||
DEFAULT_CONNECTION_TIMEOUT, Setting.Property.NodeScope);
|
||||
|
||||
static final String PROXY_HOST_KEY = "xpack.http.proxy.host";
|
||||
static final String PROXY_PORT_KEY = "xpack.http.proxy.port";
|
||||
static final String SSL_KEY_PREFIX = "xpack.http.ssl.";
|
||||
|
||||
static final Setting<String> PROXY_HOST = Setting.simpleString(PROXY_HOST_KEY, Setting.Property.NodeScope);
|
||||
static final Setting<Integer> PROXY_PORT = Setting.intSetting(PROXY_PORT_KEY, 0, 0, 0xFFFF, Setting.Property.NodeScope);
|
||||
|
||||
private static final SSLConfigurationSettings SSL = SSLConfigurationSettings.withPrefix(SSL_KEY_PREFIX);
|
||||
|
||||
public static List<? extends Setting<?>> getSettings() {
|
||||
final ArrayList<Setting<?>> settings = new ArrayList<>();
|
||||
settings.addAll(SSL.getAllSettings());
|
||||
settings.add(READ_TIMEOUT);
|
||||
settings.add(CONNECTION_TIMEOUT);
|
||||
settings.add(PROXY_HOST);
|
||||
settings.add(PROXY_PORT);
|
||||
return settings;
|
||||
}
|
||||
|
||||
private HttpSettings() {
|
||||
}
|
||||
}
|
@ -9,10 +9,13 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationFailureHandler;
|
||||
import org.elasticsearch.xpack.security.authc.Realm;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
|
||||
|
||||
/**
|
||||
@ -49,6 +52,16 @@ public abstract class XPackExtension {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the set of {@link Setting settings} that may be configured for the each type of realm.
|
||||
*
|
||||
* Each <em>setting key</em> must be unqualified and is in the same format as will be provided via {@link RealmConfig#settings()}.
|
||||
* If a given realm-type is not present in the returned map, then it will be treated as if it supported <em>all</em> possible settings.
|
||||
*
|
||||
* The life-cycle of an extension dictates that this method will be called before {@link #getRealms(ResourceWatcherService)}
|
||||
*/
|
||||
public Map<String, Set<Setting<?>>> getRealmSettings() { return Collections.emptyMap(); }
|
||||
|
||||
/**
|
||||
* Returns a handler for authentication failures, or null to use the default handler.
|
||||
*
|
||||
|
@ -124,7 +124,7 @@ public class RestGraphAction extends XPackRestHandler {
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.getParseFieldMatcher().match(fieldName, QUERY_FIELD)) {
|
||||
context.parseInnerQueryBuilder().ifPresent(currentHop::guidingQuery);
|
||||
currentHop.guidingQuery(context.parseInnerQueryBuilder());
|
||||
} else if (context.getParseFieldMatcher().match(fieldName, CONNECTIONS_FIELD)) {
|
||||
parseHop(parser, context, graphRequest.createNextHop(null), graphRequest);
|
||||
} else if (context.getParseFieldMatcher().match(fieldName, CONTROLS_FIELD)) {
|
||||
|
@ -13,6 +13,7 @@ import org.elasticsearch.action.support.ActionFilter;
|
||||
import org.elasticsearch.action.support.DestructiveOperations;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
@ -47,6 +48,7 @@ import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.extensions.XPackExtension;
|
||||
import org.elasticsearch.xpack.extensions.XPackExtensionsService;
|
||||
import org.elasticsearch.xpack.security.action.SecurityActionModule;
|
||||
import org.elasticsearch.xpack.security.action.filter.SecurityActionFilter;
|
||||
import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheAction;
|
||||
@ -79,15 +81,14 @@ import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationFailureHandler;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationService;
|
||||
import org.elasticsearch.xpack.security.authc.DefaultAuthenticationFailureHandler;
|
||||
import org.elasticsearch.xpack.security.authc.InternalRealms;
|
||||
import org.elasticsearch.xpack.security.authc.Realm;
|
||||
import org.elasticsearch.xpack.security.authc.RealmSettings;
|
||||
import org.elasticsearch.xpack.security.authc.Realms;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.NativeRealm;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
import org.elasticsearch.xpack.security.authc.file.FileRealm;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.LdapRealm;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory;
|
||||
import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
|
||||
import org.elasticsearch.xpack.security.authz.AuthorizationService;
|
||||
@ -246,13 +247,7 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin {
|
||||
final AnonymousUser anonymousUser = new AnonymousUser(settings);
|
||||
final ReservedRealm reservedRealm = new ReservedRealm(env, settings, nativeUsersStore, anonymousUser);
|
||||
Map<String, Realm.Factory> realmFactories = new HashMap<>();
|
||||
realmFactories.put(FileRealm.TYPE, config -> new FileRealm(config, resourceWatcherService));
|
||||
realmFactories.put(NativeRealm.TYPE, config -> new NativeRealm(config, nativeUsersStore));
|
||||
realmFactories.put(LdapRealm.AD_TYPE,
|
||||
config -> new LdapRealm(LdapRealm.AD_TYPE, config, resourceWatcherService, sslService, threadPool));
|
||||
realmFactories.put(LdapRealm.LDAP_TYPE,
|
||||
config -> new LdapRealm(LdapRealm.LDAP_TYPE, config, resourceWatcherService, sslService, threadPool));
|
||||
realmFactories.put(PkiRealm.TYPE, config -> new PkiRealm(config, resourceWatcherService, sslService));
|
||||
realmFactories.putAll(InternalRealms.getFactories(threadPool, resourceWatcherService, sslService, nativeUsersStore));
|
||||
for (XPackExtension extension : extensions) {
|
||||
Map<String, Realm.Factory> newRealms = extension.getRealms(resourceWatcherService);
|
||||
for (Map.Entry<String, Realm.Factory> entry : newRealms.entrySet()) {
|
||||
@ -380,7 +375,10 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin {
|
||||
return settingsBuilder.build();
|
||||
}
|
||||
|
||||
public static List<Setting<?>> getSettings(boolean transportClientMode) {
|
||||
/**
|
||||
* Get the {@link Setting setting configuration} for all security components, including those defined in extensions.
|
||||
*/
|
||||
public static List<Setting<?>> getSettings(boolean transportClientMode, @Nullable XPackExtensionsService extensionsService) {
|
||||
List<Setting<?>> settingsList = new ArrayList<>();
|
||||
// always register for both client and node modes
|
||||
settingsList.add(USER_SETTING);
|
||||
@ -401,7 +399,7 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin {
|
||||
|
||||
// authentication settings
|
||||
AnonymousUser.addSettings(settingsList);
|
||||
Realms.addSettings(settingsList);
|
||||
RealmSettings.addSettings(settingsList, extensionsService == null ? null : extensionsService.getExtensions());
|
||||
NativeRolesStore.addSettings(settingsList);
|
||||
AuthenticationService.addSettings(settingsList);
|
||||
AuthorizationService.addSettings(settingsList);
|
||||
|
@ -152,6 +152,7 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil
|
||||
destructiveOperations.failDestructive(indicesRequest.indices());
|
||||
} catch(IllegalArgumentException e) {
|
||||
listener.onFailure(e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,91 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.authc;
|
||||
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.NativeRealm;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
import org.elasticsearch.xpack.security.authc.file.FileRealm;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.LdapRealm;
|
||||
import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
|
||||
import org.elasticsearch.xpack.ssl.SSLService;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Provides a single entry point into dealing with all standard XPack security {@link Realm realms}.
|
||||
* This class does not handle extensions.
|
||||
* @see Realms for the component that manages configured realms (including custom extension realms)
|
||||
*/
|
||||
public class InternalRealms {
|
||||
|
||||
/**
|
||||
* The list of all <em>internal</em> realm types, excluding {@link ReservedRealm#TYPE}.
|
||||
*/
|
||||
private static final Set<String> TYPES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||
NativeRealm.TYPE, FileRealm.TYPE, LdapRealm.AD_TYPE, LdapRealm.LDAP_TYPE, PkiRealm.TYPE
|
||||
)));
|
||||
|
||||
/**
|
||||
* Determines whether <code>type</code> is an internal realm-type, optionally considering
|
||||
* the {@link ReservedRealm}.
|
||||
*/
|
||||
public static boolean isInternalRealm(String type, boolean includeReservedRealm) {
|
||||
if (TYPES.contains(type)) {
|
||||
return true;
|
||||
}
|
||||
if (includeReservedRealm && ReservedRealm.TYPE.equals(type)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link Realm.Factory factories} for each <em>internal</em> realm type.
|
||||
* This excludes the {@link ReservedRealm}, as it cannot be created dynamically.
|
||||
* @return A map from <em>realm-type</em> to <code>Factory</code>
|
||||
*/
|
||||
public static Map<String, Realm.Factory> getFactories(ThreadPool threadPool, ResourceWatcherService resourceWatcherService,
|
||||
SSLService sslService, NativeUsersStore nativeUsersStore){
|
||||
Map<String, Realm.Factory> map = new HashMap<>();
|
||||
map.put(FileRealm.TYPE, config -> new FileRealm(config, resourceWatcherService));
|
||||
map.put(NativeRealm.TYPE, config -> new NativeRealm(config, nativeUsersStore));
|
||||
map.put(LdapRealm.AD_TYPE,
|
||||
config -> new LdapRealm(LdapRealm.AD_TYPE, config, resourceWatcherService, sslService, threadPool));
|
||||
map.put(LdapRealm.LDAP_TYPE,
|
||||
config -> new LdapRealm(LdapRealm.LDAP_TYPE, config, resourceWatcherService, sslService, threadPool));
|
||||
map.put(PkiRealm.TYPE, config -> new PkiRealm(config, resourceWatcherService, sslService));
|
||||
return Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides the {@link Setting setting configuration} for each <em>internal</em> realm type.
|
||||
* This excludes the {@link ReservedRealm}, as it cannot be configured dynamically.
|
||||
* @return A map from <em>realm-type</em> to a collection of <code>Setting</code> objects.
|
||||
*/
|
||||
public static Map<String,Set<Setting<?>>> getSettings() {
|
||||
Map<String, Set<Setting<?>>> map = new HashMap<>();
|
||||
map.put(FileRealm.TYPE, FileRealm.getSettings());
|
||||
map.put(NativeRealm.TYPE, NativeRealm.getSettings());
|
||||
map.put(LdapRealm.AD_TYPE, LdapRealm.getSettings(LdapRealm.AD_TYPE));
|
||||
map.put(LdapRealm.LDAP_TYPE, LdapRealm.getSettings(LdapRealm.LDAP_TYPE));
|
||||
map.put(PkiRealm.TYPE, PkiRealm.getSettings());
|
||||
return Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
private InternalRealms() {
|
||||
}
|
||||
|
||||
}
|
@ -29,8 +29,8 @@ public class RealmConfig {
|
||||
this.settings = settings;
|
||||
this.globalSettings = globalSettings;
|
||||
this.env = env;
|
||||
enabled = settings.getAsBoolean("enabled", true);
|
||||
order = settings.getAsInt("order", Integer.MAX_VALUE);
|
||||
enabled = RealmSettings.ENABLED_SETTING.get(settings);
|
||||
order = RealmSettings.ORDER_SETTING.get(settings);
|
||||
}
|
||||
|
||||
public String name() {
|
||||
|
@ -0,0 +1,154 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.authc;
|
||||
|
||||
import org.elasticsearch.common.settings.AbstractScopedSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.extensions.XPackExtension;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.elasticsearch.common.Strings.isNullOrEmpty;
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
|
||||
/**
|
||||
* Configures the {@link Setting#groupSetting(String, Consumer, Setting.Property...) group setting} for security
|
||||
* {@link Realm realms}, with validation according to the realm type.
|
||||
* <p>
|
||||
* The allowable settings for a given realm are dependent on the {@link Realm#type() realm type}, so it is not possible
|
||||
* to simply provide a list of {@link Setting} objects and rely on the global setting validation (e.g. A custom realm-type might
|
||||
* define a setting with the same logical key as an internal realm-type, but a different data type).
|
||||
* </p> <p>
|
||||
* Instead, realm configuration relies on the <code>validator</code> parameter to
|
||||
* {@link Setting#groupSetting(String, Consumer, Setting.Property...)} in order to validate each realm in a way that respects the
|
||||
* declared <code>type</code>.
|
||||
* Internally, this validation delegates to {@link AbstractScopedSettings#validate(Settings)} so that validation is reasonably aligned
|
||||
* with the way we validate settings globally.
|
||||
* </p>
|
||||
* <p>
|
||||
* The allowable settings for each realm-type are determined by calls to {@link InternalRealms#getSettings()} and
|
||||
* {@link XPackExtension#getRealmSettings()}
|
||||
*/
|
||||
public class RealmSettings {
|
||||
|
||||
public static final String PREFIX = setting("authc.realms.");
|
||||
|
||||
static final Setting<String> TYPE_SETTING = Setting.simpleString("type", Setting.Property.NodeScope);
|
||||
static final Setting<Boolean> ENABLED_SETTING = Setting.boolSetting("enabled", true, Setting.Property.NodeScope);
|
||||
static final Setting<Integer> ORDER_SETTING = Setting.intSetting("order", Integer.MAX_VALUE, Setting.Property.NodeScope);
|
||||
|
||||
/**
|
||||
* Add the {@link Setting} configuration for <em>all</em> realms to the provided list.
|
||||
*/
|
||||
public static void addSettings(List<Setting<?>> settingsList, List<XPackExtension> extensions) {
|
||||
settingsList.add(getGroupSetting(extensions));
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the child {@link Settings} for the {@link #PREFIX realms prefix}.
|
||||
* The top level names in the returned <code>Settings</code> will be the names of the configured realms.
|
||||
*/
|
||||
public static Settings get(Settings settings) {
|
||||
return settings.getByPrefix(RealmSettings.PREFIX);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the child {@link Setting} for the provided realm into a fully scoped key for use in an error message.
|
||||
* @see #PREFIX
|
||||
*/
|
||||
public static String getFullSettingKey(RealmConfig realm, Setting<?> setting) {
|
||||
return getFullSettingKey(realm.name(), setting);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see #getFullSettingKey(RealmConfig, Setting)
|
||||
*/
|
||||
public static String getFullSettingKey(RealmConfig realm, String subKey) {
|
||||
return getFullSettingKey(realm.name(), subKey);
|
||||
}
|
||||
|
||||
private static String getFullSettingKey(String name, Setting<?> setting) {
|
||||
return getFullSettingKey(name, setting.getKey());
|
||||
}
|
||||
|
||||
private static String getFullSettingKey(String name, String subKey) {
|
||||
return PREFIX + name + "." + subKey;
|
||||
}
|
||||
|
||||
private static Setting<Settings> getGroupSetting(List<XPackExtension> extensions) {
|
||||
return Setting.groupSetting(PREFIX, getSettingsValidator(extensions), Setting.Property.NodeScope);
|
||||
}
|
||||
|
||||
private static Consumer<Settings> getSettingsValidator(List<XPackExtension> extensions) {
|
||||
final Map<String, Set<Setting<?>>> childSettings = new HashMap<>(InternalRealms.getSettings());
|
||||
if (extensions != null) {
|
||||
extensions.forEach(ext -> {
|
||||
final Map<String, Set<Setting<?>>> extSettings = ext.getRealmSettings();
|
||||
extSettings.keySet().stream().filter(childSettings::containsKey).forEach(type -> {
|
||||
throw new IllegalArgumentException("duplicate realm type " + type);
|
||||
});
|
||||
childSettings.putAll(extSettings);
|
||||
});
|
||||
}
|
||||
childSettings.forEach(RealmSettings::verify);
|
||||
return validator(childSettings);
|
||||
}
|
||||
|
||||
private static void verify(String type, Set<Setting<?>> settings) {
|
||||
Set<String> keys = new HashSet<>();
|
||||
settings.forEach(setting -> {
|
||||
final String key = setting.getKey();
|
||||
if (keys.contains(key)) {
|
||||
throw new IllegalArgumentException("duplicate setting for key " + key + " in realm type " + type);
|
||||
}
|
||||
keys.add(key);
|
||||
if (setting.getProperties().contains(Setting.Property.NodeScope) == false) {
|
||||
throw new IllegalArgumentException("setting " + key + " in realm type " + type + " does not have NodeScope");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static Consumer<Settings> validator(Map<String, Set<Setting<?>>> validSettings) {
|
||||
return (settings) -> settings.names().forEach(n -> validateRealm(n, settings.getAsSettings(n), validSettings));
|
||||
}
|
||||
|
||||
private static void validateRealm(String name, Settings settings, Map<String, Set<Setting<?>>> validSettings) {
|
||||
final String type = TYPE_SETTING.get(settings);
|
||||
if (isNullOrEmpty(type)) {
|
||||
throw new IllegalArgumentException("missing realm type [" + getFullSettingKey(name, TYPE_SETTING) + "] for realm");
|
||||
}
|
||||
validateRealm(name, type, settings, validSettings.get(type));
|
||||
}
|
||||
|
||||
private static void validateRealm(String name, String type, Settings settings, Set<Setting<?>> validSettings) {
|
||||
if (validSettings == null) {
|
||||
// For backwards compatibility, we assume that is we don't know the valid settings for a realm.type then everything
|
||||
// is valid. Ideally we would reject these, but XPackExtension doesn't enforce that realm-factories and realm-settings are
|
||||
// perfectly aligned
|
||||
return;
|
||||
}
|
||||
Set<Setting<?>> settingSet = new HashSet<>(validSettings);
|
||||
settingSet.add(TYPE_SETTING);
|
||||
settingSet.add(ENABLED_SETTING);
|
||||
settingSet.add(ORDER_SETTING);
|
||||
final AbstractScopedSettings validator = new AbstractScopedSettings(settings, settingSet, Setting.Property.NodeScope) { };
|
||||
try {
|
||||
validator.validate(settings);
|
||||
} catch (RuntimeException e) {
|
||||
throw new IllegalArgumentException("incorrect configuration for realm [" + getFullSettingKey(name, "")
|
||||
+ "] of type " + type, e);
|
||||
}
|
||||
}
|
||||
|
||||
private RealmSettings() {
|
||||
}
|
||||
}
|
@ -18,8 +18,6 @@ import java.util.Set;
|
||||
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
@ -27,21 +25,13 @@ import org.elasticsearch.license.XPackLicenseState.AllowedRealmType;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.NativeRealm;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
import org.elasticsearch.xpack.security.authc.file.FileRealm;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.LdapRealm;
|
||||
import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
|
||||
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
|
||||
/**
|
||||
* Serves as a realms registry (also responsible for ordering the realms appropriately)
|
||||
*/
|
||||
public class Realms extends AbstractComponent implements Iterable<Realm> {
|
||||
|
||||
static final List<String> INTERNAL_REALM_TYPES =
|
||||
Arrays.asList(ReservedRealm.TYPE, NativeRealm.TYPE, FileRealm.TYPE, LdapRealm.AD_TYPE, LdapRealm.LDAP_TYPE, PkiRealm.TYPE);
|
||||
|
||||
public static final Setting<Settings> REALMS_GROUPS_SETTINGS = Setting.groupSetting(setting("authc.realms."), Property.NodeScope);
|
||||
|
||||
private final Environment env;
|
||||
private final Map<String, Realm.Factory> factories;
|
||||
private final XPackLicenseState licenseState;
|
||||
@ -68,7 +58,7 @@ public class Realms extends AbstractComponent implements Iterable<Realm> {
|
||||
List<Realm> nativeRealms = new ArrayList<>();
|
||||
for (Realm realm : realms) {
|
||||
// don't add the reserved realm here otherwise we end up with only this realm...
|
||||
if (INTERNAL_REALM_TYPES.contains(realm.type()) && ReservedRealm.TYPE.equals(realm.type()) == false) {
|
||||
if (InternalRealms.isInternalRealm(realm.type(), false)) {
|
||||
internalRealms.add(realm);
|
||||
}
|
||||
|
||||
@ -142,7 +132,7 @@ public class Realms extends AbstractComponent implements Iterable<Realm> {
|
||||
}
|
||||
|
||||
protected List<Realm> initRealms() throws Exception {
|
||||
Settings realmsSettings = REALMS_GROUPS_SETTINGS.get(settings);
|
||||
Settings realmsSettings = RealmSettings.get(settings);
|
||||
Set<String> internalTypes = new HashSet<>();
|
||||
List<Realm> realms = new ArrayList<>();
|
||||
for (String name : realmsSettings.names()) {
|
||||
@ -239,10 +229,6 @@ public class Realms extends AbstractComponent implements Iterable<Realm> {
|
||||
}
|
||||
}
|
||||
|
||||
public static void addSettings(List<Setting<?>> settingsModule) {
|
||||
settingsModule.add(REALMS_GROUPS_SETTINGS);
|
||||
}
|
||||
|
||||
private static void combineMaps(Map<String, Object> mapA, Map<String, Object> mapB) {
|
||||
for (Entry<String, Object> entry : mapB.entrySet()) {
|
||||
mapA.compute(entry.getKey(), (key, value) -> {
|
||||
@ -274,9 +260,10 @@ public class Realms extends AbstractComponent implements Iterable<Realm> {
|
||||
case NATIVE:
|
||||
return FileRealm.TYPE.equals(type) || NativeRealm.TYPE.equals(type);
|
||||
case DEFAULT:
|
||||
return INTERNAL_REALM_TYPES.contains(type);
|
||||
return InternalRealms.isInternalRealm(type, true);
|
||||
default:
|
||||
throw new IllegalStateException("unknown enabled realm type [" + enabledRealmType + "]");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -6,11 +6,15 @@
|
||||
package org.elasticsearch.xpack.security.authc.esnative;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm;
|
||||
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
|
||||
import org.elasticsearch.xpack.security.user.User;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* User/password realm that is backed by an Elasticsearch index
|
||||
*/
|
||||
@ -34,4 +38,11 @@ public class NativeRealm extends CachingUsernamePasswordRealm {
|
||||
protected void doAuthenticate(UsernamePasswordToken token, ActionListener<User> listener) {
|
||||
userStore.verifyPassword(token.principal(), token.credentials(), listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The {@link Setting setting configuration} for this realm type
|
||||
*/
|
||||
public static Set<Setting<?>> getSettings() {
|
||||
return CachingUsernamePasswordRealm.getCachingSettings();
|
||||
}
|
||||
}
|
||||
|
@ -5,9 +5,12 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.authc.file;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm;
|
||||
@ -61,4 +64,11 @@ public class FileRealm extends CachingUsernamePasswordRealm {
|
||||
stats.put("size", userPasswdStore.usersCount());
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The {@link Setting setting configuration} for this realm type
|
||||
*/
|
||||
public static Set<Setting<?>> getSettings() {
|
||||
return CachingUsernamePasswordRealm.getCachingSettings();
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.cache.Cache;
|
||||
import org.elasticsearch.common.cache.CacheBuilder;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
@ -26,8 +27,10 @@ import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.xpack.ssl.SSLService;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.attributesToSearchFor;
|
||||
@ -107,6 +110,18 @@ class ActiveDirectorySessionFactory extends SessionFactory {
|
||||
return "DC=" + domain.replace(".", ",DC=");
|
||||
}
|
||||
|
||||
public static Set<Setting<?>> getSettings() {
|
||||
Set<Setting<?>> settings = new HashSet<>();
|
||||
settings.addAll(SessionFactory.getSettings());
|
||||
settings.add(Setting.simpleString(AD_DOMAIN_NAME_SETTING, Setting.Property.NodeScope));
|
||||
settings.add(Setting.simpleString(AD_GROUP_SEARCH_BASEDN_SETTING, Setting.Property.NodeScope));
|
||||
settings.add(Setting.simpleString(AD_GROUP_SEARCH_SCOPE_SETTING, Setting.Property.NodeScope));
|
||||
settings.add(Setting.simpleString(AD_USER_SEARCH_BASEDN_SETTING, Setting.Property.NodeScope));
|
||||
settings.add(Setting.simpleString(AD_USER_SEARCH_FILTER_SETTING, Setting.Property.NodeScope));
|
||||
settings.add(Setting.simpleString(AD_USER_SEARCH_SCOPE_SETTING, Setting.Property.NodeScope));
|
||||
return settings;
|
||||
}
|
||||
|
||||
ADAuthenticator getADAuthenticator(String username) {
|
||||
if (username.indexOf('\\') > 0) {
|
||||
return downLevelADAuthenticator;
|
||||
|
@ -5,6 +5,7 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.authc.ldap;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
@ -13,12 +14,13 @@ import com.unboundid.ldap.sdk.LDAPException;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapLoadBalancing;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession;
|
||||
import org.elasticsearch.xpack.security.authc.RealmSettings;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory;
|
||||
import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm;
|
||||
import org.elasticsearch.xpack.security.authc.support.DnRoleMapper;
|
||||
@ -59,13 +61,27 @@ public final class LdapRealm extends CachingUsernamePasswordRealm {
|
||||
sessionFactory = new ActiveDirectorySessionFactory(config, sslService);
|
||||
} else {
|
||||
assert LDAP_TYPE.equals(type) : "type [" + type + "] is unknown. expected one of [" + AD_TYPE + ", " + LDAP_TYPE + "]";
|
||||
Settings searchSettings = userSearchSettings(config);
|
||||
if (searchSettings.names().isEmpty()) {
|
||||
sessionFactory = new LdapSessionFactory(config, sslService);
|
||||
} else if (config.settings().getAsArray(LdapSessionFactory.USER_DN_TEMPLATES_SETTING).length > 0) {
|
||||
throw new IllegalArgumentException("settings were found for both user search and user template modes of operation. " +
|
||||
"Please remove the settings for the mode you do not wish to use. For more details refer to the ldap " +
|
||||
final boolean hasSearchSettings = LdapUserSearchSessionFactory.hasUserSearchSettings(config);
|
||||
final boolean hasTemplates = LdapSessionFactory.USER_DN_TEMPLATES_SETTING.exists(config.settings());
|
||||
if (hasSearchSettings == false) {
|
||||
if(hasTemplates == false) {
|
||||
throw new IllegalArgumentException("settings were not found for either user search [" +
|
||||
RealmSettings.getFullSettingKey(config, LdapUserSearchSessionFactory.SEARCH_PREFIX) +
|
||||
"] or user template [" +
|
||||
RealmSettings.getFullSettingKey(config, LdapSessionFactory.USER_DN_TEMPLATES_SETTING) +
|
||||
"] modes of operation. " +
|
||||
"Please provide the settings for the mode you wish to use. For more details refer to the ldap " +
|
||||
"authentication section of the X-Pack guide.");
|
||||
}
|
||||
sessionFactory = new LdapSessionFactory(config, sslService);
|
||||
} else if (hasTemplates) {
|
||||
throw new IllegalArgumentException("settings were found for both user search [" +
|
||||
RealmSettings.getFullSettingKey(config, LdapUserSearchSessionFactory.SEARCH_PREFIX) +
|
||||
"] and user template [" +
|
||||
RealmSettings.getFullSettingKey(config, LdapSessionFactory.USER_DN_TEMPLATES_SETTING) +
|
||||
"] modes of operation. " +
|
||||
"Please remove the settings for the mode you do not wish to use. For more details refer to the ldap " +
|
||||
"authentication section of the X-Pack guide.");
|
||||
} else {
|
||||
sessionFactory = new LdapUserSearchSessionFactory(config, sslService);
|
||||
}
|
||||
@ -73,8 +89,22 @@ public final class LdapRealm extends CachingUsernamePasswordRealm {
|
||||
return sessionFactory;
|
||||
}
|
||||
|
||||
static Settings userSearchSettings(RealmConfig config) {
|
||||
return config.settings().getAsSettings("user_search");
|
||||
/**
|
||||
* @return The {@link Setting setting configuration} for this realm type
|
||||
* @param type Either {@link #AD_TYPE} or {@link #LDAP_TYPE}
|
||||
*/
|
||||
public static Set<Setting<?>> getSettings(String type) {
|
||||
Set<Setting<?>> settings = new HashSet<>();
|
||||
settings.addAll(CachingUsernamePasswordRealm.getCachingSettings());
|
||||
DnRoleMapper.getSettings(settings);
|
||||
if (AD_TYPE.equals(type)) {
|
||||
settings.addAll(ActiveDirectorySessionFactory.getSettings());
|
||||
} else {
|
||||
assert LDAP_TYPE.equals(type) : "type [" + type + "] is unknown. expected one of [" + AD_TYPE + ", " + LDAP_TYPE + "]";
|
||||
settings.addAll(LdapSessionFactory.getSettings());
|
||||
settings.addAll(LdapUserSearchSessionFactory.getSettings());
|
||||
}
|
||||
return settings;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -106,7 +136,7 @@ public final class LdapRealm extends CachingUsernamePasswordRealm {
|
||||
Map<String, Object> usage = super.usageStats();
|
||||
usage.put("load_balance_type", LdapLoadBalancing.resolve(config.settings()).toString());
|
||||
usage.put("ssl", sessionFactory.isSslUsed());
|
||||
usage.put("user_search", userSearchSettings(config).isEmpty() == false);
|
||||
usage.put("user_search", LdapUserSearchSessionFactory.hasUserSearchSettings(config));
|
||||
return usage;
|
||||
}
|
||||
|
||||
|
@ -12,8 +12,11 @@ import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.RealmSettings;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession.GroupsResolver;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory;
|
||||
@ -23,7 +26,12 @@ import org.elasticsearch.xpack.ssl.SSLService;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.escapedRDNValue;
|
||||
|
||||
@ -35,7 +43,8 @@ import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.esca
|
||||
*/
|
||||
public class LdapSessionFactory extends SessionFactory {
|
||||
|
||||
public static final String USER_DN_TEMPLATES_SETTING = "user_dn_templates";
|
||||
public static final Setting<List<String>> USER_DN_TEMPLATES_SETTING = Setting.listSetting("user_dn_templates",
|
||||
Collections.emptyList(), Function.identity(), Setting.Property.NodeScope);
|
||||
|
||||
private final String[] userDnTemplates;
|
||||
private final GroupsResolver groupResolver;
|
||||
@ -43,9 +52,10 @@ public class LdapSessionFactory extends SessionFactory {
|
||||
public LdapSessionFactory(RealmConfig config, SSLService sslService) {
|
||||
super(config, sslService);
|
||||
Settings settings = config.settings();
|
||||
userDnTemplates = settings.getAsArray(USER_DN_TEMPLATES_SETTING);
|
||||
if (userDnTemplates == null) {
|
||||
throw new IllegalArgumentException("missing required LDAP setting [" + USER_DN_TEMPLATES_SETTING + "]");
|
||||
userDnTemplates = USER_DN_TEMPLATES_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
|
||||
if (userDnTemplates.length == 0) {
|
||||
throw new IllegalArgumentException("missing required LDAP setting ["
|
||||
+ RealmSettings.getFullSettingKey(config, USER_DN_TEMPLATES_SETTING) + "]");
|
||||
}
|
||||
groupResolver = groupResolver(settings);
|
||||
}
|
||||
@ -116,10 +126,16 @@ public class LdapSessionFactory extends SessionFactory {
|
||||
}
|
||||
|
||||
static GroupsResolver groupResolver(Settings settings) {
|
||||
Settings searchSettings = settings.getAsSettings("group_search");
|
||||
if (!searchSettings.names().isEmpty()) {
|
||||
return new SearchGroupsResolver(searchSettings);
|
||||
if (SearchGroupsResolver.BASE_DN.exists(settings)) {
|
||||
return new SearchGroupsResolver(settings);
|
||||
}
|
||||
return new UserAttributeGroupsResolver(settings);
|
||||
}
|
||||
|
||||
public static Set<Setting<?>> getSettings() {
|
||||
Set<Setting<?>> settings = new HashSet<>();
|
||||
settings.addAll(SessionFactory.getSettings());
|
||||
settings.add(USER_DN_TEMPLATES_SETTING);
|
||||
return settings;
|
||||
}
|
||||
}
|
||||
|
@ -17,9 +17,11 @@ import com.unboundid.ldap.sdk.SimpleBindRequest;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.RealmSettings;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession.GroupsResolver;
|
||||
@ -30,6 +32,11 @@ import org.elasticsearch.xpack.ssl.SSLService;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static com.unboundid.ldap.sdk.Filter.createEqualityFilter;
|
||||
import static com.unboundid.ldap.sdk.Filter.encodeValue;
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.attributesToSearchFor;
|
||||
@ -42,6 +49,30 @@ class LdapUserSearchSessionFactory extends SessionFactory {
|
||||
static final String DEFAULT_USERNAME_ATTRIBUTE = "uid";
|
||||
static final TimeValue DEFAULT_HEALTH_CHECK_INTERVAL = TimeValue.timeValueSeconds(60L);
|
||||
|
||||
static final String SEARCH_PREFIX = "user_search.";
|
||||
|
||||
private static final Setting<String> SEARCH_BASE_DN = Setting.simpleString("user_search.base_dn", Setting.Property.NodeScope);
|
||||
private static final Setting<String> SEARCH_ATTRIBUTE = new Setting<>("user_search.attribute", DEFAULT_USERNAME_ATTRIBUTE,
|
||||
Function.identity(), Setting.Property.NodeScope);
|
||||
private static final Setting<LdapSearchScope> SEARCH_SCOPE = new Setting<>("user_search.scope", (String) null,
|
||||
s -> LdapSearchScope.resolve(s, LdapSearchScope.SUB_TREE), Setting.Property.NodeScope);
|
||||
|
||||
private static final Setting<Boolean> POOL_ENABLED = Setting.boolSetting("user_search.pool.enabled",
|
||||
true, Setting.Property.NodeScope);
|
||||
private static final Setting<Integer> POOL_INITIAL_SIZE = Setting.intSetting("user_search.pool.initial_size",
|
||||
DEFAULT_CONNECTION_POOL_INITIAL_SIZE, 0, Setting.Property.NodeScope);
|
||||
private static final Setting<Integer> POOL_SIZE = Setting.intSetting("user_search.pool.size",
|
||||
DEFAULT_CONNECTION_POOL_SIZE, 1, Setting.Property.NodeScope);
|
||||
private static final Setting<TimeValue> HEALTH_CHECK_INTERVAL = Setting.timeSetting("user_search.pool.health_check.interval",
|
||||
DEFAULT_HEALTH_CHECK_INTERVAL, Setting.Property.NodeScope);
|
||||
private static final Setting<Boolean> HEALTH_CHECK_ENABLED = Setting.boolSetting("user_search.pool.health_check.enabled",
|
||||
true, Setting.Property.NodeScope);
|
||||
private static final Setting<Optional<String>> HEALTH_CHECK_DN = new Setting<>("user_search.pool.health_check.dn", (String) null,
|
||||
Optional::ofNullable, Setting.Property.NodeScope);
|
||||
|
||||
private static final Setting<String> BIND_DN = Setting.simpleString("bind_dn", Setting.Property.NodeScope);
|
||||
private static final Setting<String> BIND_PASSWORD = Setting.simpleString("bind_password", Setting.Property.NodeScope);
|
||||
|
||||
private final String userSearchBaseDn;
|
||||
private final LdapSearchScope scope;
|
||||
private final String userAttribute;
|
||||
@ -53,14 +84,15 @@ class LdapUserSearchSessionFactory extends SessionFactory {
|
||||
LdapUserSearchSessionFactory(RealmConfig config, SSLService sslService) throws LDAPException {
|
||||
super(config, sslService);
|
||||
Settings settings = config.settings();
|
||||
userSearchBaseDn = settings.get("user_search.base_dn");
|
||||
if (userSearchBaseDn == null) {
|
||||
throw new IllegalArgumentException("user_search base_dn must be specified");
|
||||
if (SEARCH_BASE_DN.exists(settings)) {
|
||||
userSearchBaseDn = SEARCH_BASE_DN.get(settings);
|
||||
} else {
|
||||
throw new IllegalArgumentException("[" + RealmSettings.getFullSettingKey(config, SEARCH_BASE_DN) + "] must be specified");
|
||||
}
|
||||
scope = LdapSearchScope.resolve(settings.get("user_search.scope"), LdapSearchScope.SUB_TREE);
|
||||
userAttribute = settings.get("user_search.attribute", DEFAULT_USERNAME_ATTRIBUTE);
|
||||
groupResolver = groupResolver(config.settings());
|
||||
useConnectionPool = settings.getAsBoolean("user_search.pool.enabled", true);
|
||||
scope = SEARCH_SCOPE.get(settings);
|
||||
userAttribute = SEARCH_ATTRIBUTE.get(settings);
|
||||
groupResolver = groupResolver(settings);
|
||||
useConnectionPool = POOL_ENABLED.get(settings);
|
||||
if (useConnectionPool) {
|
||||
connectionPool = createConnectionPool(config, serverSet, timeout, logger);
|
||||
} else {
|
||||
@ -72,17 +104,16 @@ class LdapUserSearchSessionFactory extends SessionFactory {
|
||||
throws LDAPException {
|
||||
Settings settings = config.settings();
|
||||
SimpleBindRequest bindRequest = bindRequest(settings);
|
||||
final int initialSize = settings.getAsInt("user_search.pool.initial_size", DEFAULT_CONNECTION_POOL_INITIAL_SIZE);
|
||||
final int size = settings.getAsInt("user_search.pool.size", DEFAULT_CONNECTION_POOL_SIZE);
|
||||
final int initialSize = POOL_INITIAL_SIZE.get(settings);
|
||||
final int size = POOL_SIZE.get(settings);
|
||||
LDAPConnectionPool pool = null;
|
||||
boolean success = false;
|
||||
try {
|
||||
pool = new LDAPConnectionPool(serverSet, bindRequest, initialSize, size);
|
||||
pool.setRetryFailedOperationsDueToInvalidConnections(true);
|
||||
if (settings.getAsBoolean("user_search.pool.health_check.enabled", true)) {
|
||||
String entryDn = settings.get("user_search.pool.health_check.dn", (bindRequest == null) ? null : bindRequest.getBindDN());
|
||||
final long healthCheckInterval =
|
||||
settings.getAsTime("user_search.pool.health_check.interval", DEFAULT_HEALTH_CHECK_INTERVAL).millis();
|
||||
if (HEALTH_CHECK_ENABLED.get(settings)) {
|
||||
String entryDn = HEALTH_CHECK_DN.get(settings).orElseGet(() -> bindRequest == null ? null : bindRequest.getBindDN());
|
||||
final long healthCheckInterval = HEALTH_CHECK_INTERVAL.get(settings).millis();
|
||||
if (entryDn != null) {
|
||||
// Checks the status of the LDAP connection at a specified interval in the background. We do not check on
|
||||
// on create as the LDAP server may require authentication to get an entry and a bind request has not been executed
|
||||
@ -93,7 +124,8 @@ class LdapUserSearchSessionFactory extends SessionFactory {
|
||||
pool.setHealthCheck(healthCheck);
|
||||
pool.setHealthCheckIntervalMillis(healthCheckInterval);
|
||||
} else {
|
||||
logger.warn("[bind_dn] and [user_search.pool.health_check.dn] have not been specified so no " +
|
||||
logger.warn("[" + RealmSettings.getFullSettingKey(config, BIND_DN) + "] and [" +
|
||||
RealmSettings.getFullSettingKey(config, HEALTH_CHECK_DN) + "] have not been specified so no " +
|
||||
"ldap query will be run as a health check");
|
||||
}
|
||||
}
|
||||
@ -109,13 +141,16 @@ class LdapUserSearchSessionFactory extends SessionFactory {
|
||||
|
||||
static SimpleBindRequest bindRequest(Settings settings) {
|
||||
SimpleBindRequest request = null;
|
||||
String bindDn = settings.get("bind_dn");
|
||||
if (bindDn != null) {
|
||||
request = new SimpleBindRequest(bindDn, settings.get("bind_password"));
|
||||
if (BIND_DN.exists(settings)) {
|
||||
request = new SimpleBindRequest(BIND_DN.get(settings), BIND_PASSWORD.get(settings));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
public static boolean hasUserSearchSettings(RealmConfig config) {
|
||||
return config.settings().getByPrefix("user_search.").isEmpty() == false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void session(String user, SecuredString password, ActionListener<LdapSession> listener) {
|
||||
if (useConnectionPool) {
|
||||
@ -268,10 +303,30 @@ class LdapUserSearchSessionFactory extends SessionFactory {
|
||||
}
|
||||
|
||||
static GroupsResolver groupResolver(Settings settings) {
|
||||
Settings searchSettings = settings.getAsSettings("group_search");
|
||||
if (!searchSettings.names().isEmpty()) {
|
||||
return new SearchGroupsResolver(searchSettings);
|
||||
if (SearchGroupsResolver.BASE_DN.exists(settings)) {
|
||||
return new SearchGroupsResolver(settings);
|
||||
}
|
||||
return new UserAttributeGroupsResolver(settings);
|
||||
}
|
||||
|
||||
public static Set<Setting<?>> getSettings() {
|
||||
Set<Setting<?>> settings = new HashSet<>();
|
||||
settings.addAll(SessionFactory.getSettings());
|
||||
settings.add(SEARCH_BASE_DN);
|
||||
settings.add(SEARCH_SCOPE);
|
||||
settings.add(SEARCH_ATTRIBUTE);
|
||||
settings.add(POOL_ENABLED);
|
||||
settings.add(POOL_INITIAL_SIZE);
|
||||
settings.add(POOL_SIZE);
|
||||
settings.add(HEALTH_CHECK_ENABLED);
|
||||
settings.add(HEALTH_CHECK_DN);
|
||||
settings.add(HEALTH_CHECK_INTERVAL);
|
||||
settings.add(BIND_DN);
|
||||
settings.add(BIND_PASSWORD);
|
||||
|
||||
settings.addAll(SearchGroupsResolver.getSettings());
|
||||
settings.addAll(UserAttributeGroupsResolver.getSettings());
|
||||
|
||||
return settings;
|
||||
}
|
||||
}
|
||||
|
@ -13,6 +13,8 @@ import com.unboundid.ldap.sdk.SearchRequest;
|
||||
import com.unboundid.ldap.sdk.SearchScope;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope;
|
||||
@ -20,9 +22,13 @@ import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession.GroupsRes
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.Strings.isNullOrEmpty;
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.OBJECT_CLASS_PRESENCE_FILTER;
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.createFilter;
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.search;
|
||||
@ -38,19 +44,28 @@ class SearchGroupsResolver implements GroupsResolver {
|
||||
"(|(objectclass=groupOfNames)(objectclass=groupOfUniqueNames)(objectclass=group)(objectclass=posixGroup))" +
|
||||
"(|(uniqueMember={0})(member={0})(memberUid={0})))";
|
||||
|
||||
static final Setting<String> BASE_DN = Setting.simpleString("group_search.base_dn", Setting.Property.NodeScope);
|
||||
static final Setting<LdapSearchScope> SCOPE = new Setting<>("group_search.scope", (String) null,
|
||||
s -> LdapSearchScope.resolve(s, LdapSearchScope.SUB_TREE), Setting.Property.NodeScope);
|
||||
static final Setting<String> USER_ATTRIBUTE = Setting.simpleString("group_search.user_attribute", Setting.Property.NodeScope);
|
||||
|
||||
static final Setting<String> FILTER = new Setting<>("group_search.filter", GROUP_SEARCH_DEFAULT_FILTER,
|
||||
Function.identity(), Setting.Property.NodeScope);
|
||||
|
||||
private final String baseDn;
|
||||
private final String filter;
|
||||
private final String userAttribute;
|
||||
private final LdapSearchScope scope;
|
||||
|
||||
SearchGroupsResolver(Settings settings) {
|
||||
baseDn = settings.get("base_dn");
|
||||
if (baseDn == null) {
|
||||
if (BASE_DN.exists(settings)) {
|
||||
baseDn = BASE_DN.get(settings);
|
||||
} else {
|
||||
throw new IllegalArgumentException("base_dn must be specified");
|
||||
}
|
||||
filter = settings.get("filter", GROUP_SEARCH_DEFAULT_FILTER);
|
||||
userAttribute = settings.get("user_attribute");
|
||||
scope = LdapSearchScope.resolve(settings.get("scope"), LdapSearchScope.SUB_TREE);
|
||||
filter = FILTER.get(settings);
|
||||
userAttribute = USER_ATTRIBUTE.get(settings);
|
||||
scope = SCOPE.get(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -75,7 +90,7 @@ class SearchGroupsResolver implements GroupsResolver {
|
||||
}
|
||||
|
||||
public String[] attributes() {
|
||||
if (userAttribute != null) {
|
||||
if (Strings.hasLength(userAttribute)) {
|
||||
return new String[] { userAttribute };
|
||||
}
|
||||
return null;
|
||||
@ -83,7 +98,7 @@ class SearchGroupsResolver implements GroupsResolver {
|
||||
|
||||
private void getUserId(String dn, Collection<Attribute> attributes, LDAPInterface connection, TimeValue timeout,
|
||||
ActionListener<String> listener) {
|
||||
if (userAttribute == null) {
|
||||
if (isNullOrEmpty(userAttribute)) {
|
||||
listener.onResponse(dn);
|
||||
} else if (attributes != null) {
|
||||
final String value = attributes.stream().filter((attribute) -> attribute.getName().equals(userAttribute))
|
||||
@ -107,4 +122,13 @@ class SearchGroupsResolver implements GroupsResolver {
|
||||
}, listener::onFailure),
|
||||
userAttribute);
|
||||
}
|
||||
|
||||
public static Set<Setting<?>> getSettings() {
|
||||
Set<Setting<?>> settings = new HashSet<>();
|
||||
settings.add(BASE_DN);
|
||||
settings.add(FILTER);
|
||||
settings.add(USER_ATTRIBUTE);
|
||||
settings.add(SCOPE);
|
||||
return settings;
|
||||
}
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ import com.unboundid.ldap.sdk.LDAPInterface;
|
||||
import com.unboundid.ldap.sdk.SearchScope;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession.GroupsResolver;
|
||||
@ -20,6 +21,8 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.OBJECT_CLASS_PRESENCE_FILTER;
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.searchForEntry;
|
||||
@ -29,10 +32,12 @@ import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.sear
|
||||
*/
|
||||
class UserAttributeGroupsResolver implements GroupsResolver {
|
||||
|
||||
private static final Setting<String> ATTRIBUTE = new Setting<>("user_group_attribute", "memberOf",
|
||||
Function.identity(), Setting.Property.NodeScope);
|
||||
private final String attribute;
|
||||
|
||||
UserAttributeGroupsResolver(Settings settings) {
|
||||
this(settings.get("user_group_attribute", "memberOf"));
|
||||
this(ATTRIBUTE.get(settings));
|
||||
}
|
||||
|
||||
private UserAttributeGroupsResolver(String attribute) {
|
||||
@ -62,4 +67,8 @@ class UserAttributeGroupsResolver implements GroupsResolver {
|
||||
public String[] attributes() {
|
||||
return new String[] { attribute };
|
||||
}
|
||||
|
||||
public static Set<Setting<?>> getSettings() {
|
||||
return Collections.singleton(ATTRIBUTE);
|
||||
}
|
||||
}
|
||||
|
@ -12,11 +12,14 @@ import com.unboundid.ldap.sdk.RoundRobinServerSet;
|
||||
import com.unboundid.ldap.sdk.ServerSet;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Enumeration representing the various supported {@link ServerSet} types that can be used with out built in realms.
|
||||
@ -51,7 +54,7 @@ public enum LdapLoadBalancing {
|
||||
if (InetAddresses.isInetAddress(addresses[0])) {
|
||||
throw new IllegalArgumentException(toString() + " can only be used with a DNS name");
|
||||
}
|
||||
TimeValue dnsTtl = settings.getAsTime("cache_ttl", TimeValue.timeValueHours(1L));
|
||||
TimeValue dnsTtl = settings.getAsTime(CACHE_TTL_SETTING, CACHE_TTL_DEFAULT);
|
||||
return new RoundRobinDNSServerSet(addresses[0], ports[0],
|
||||
RoundRobinDNSServerSet.AddressSelectionMode.ROUND_ROBIN, dnsTtl.millis(), null, socketFactory, options);
|
||||
}
|
||||
@ -67,7 +70,7 @@ public enum LdapLoadBalancing {
|
||||
if (InetAddresses.isInetAddress(addresses[0])) {
|
||||
throw new IllegalArgumentException(toString() + " can only be used with a DNS name");
|
||||
}
|
||||
TimeValue dnsTtl = settings.getAsTime("cache_ttl", TimeValue.timeValueHours(1L));
|
||||
TimeValue dnsTtl = settings.getAsTime(CACHE_TTL_SETTING, CACHE_TTL_DEFAULT);
|
||||
return new RoundRobinDNSServerSet(addresses[0], ports[0],
|
||||
RoundRobinDNSServerSet.AddressSelectionMode.FAILOVER, dnsTtl.millis(), null, socketFactory, options);
|
||||
}
|
||||
@ -76,6 +79,8 @@ public enum LdapLoadBalancing {
|
||||
public static final String LOAD_BALANCE_SETTINGS = "load_balance";
|
||||
public static final String LOAD_BALANCE_TYPE_SETTING = "type";
|
||||
public static final String LOAD_BALANCE_TYPE_DEFAULT = LdapLoadBalancing.FAILOVER.toString();
|
||||
public static final String CACHE_TTL_SETTING = "cache_ttl";
|
||||
public static final TimeValue CACHE_TTL_DEFAULT = TimeValue.timeValueHours(1L);
|
||||
|
||||
abstract ServerSet buildServerSet(String[] addresses, int[] ports, Settings settings, @Nullable SocketFactory socketFactory,
|
||||
@Nullable LDAPConnectionOptions options);
|
||||
@ -101,4 +106,11 @@ public enum LdapLoadBalancing {
|
||||
Settings loadBalanceSettings = settings.getAsSettings(LOAD_BALANCE_SETTINGS);
|
||||
return loadBalancing.buildServerSet(addresses, ports, loadBalanceSettings, socketFactory, options);
|
||||
}
|
||||
|
||||
public static Set<Setting<?>> getSettings() {
|
||||
Set<Setting<?>> settings = new HashSet<>();
|
||||
settings.add(Setting.simpleString(LOAD_BALANCE_SETTINGS + "." + LOAD_BALANCE_TYPE_SETTING, Setting.Property.NodeScope));
|
||||
settings.add(Setting.simpleString(LOAD_BALANCE_SETTINGS + "." + CACHE_TTL_SETTING, Setting.Property.NodeScope));
|
||||
return settings;
|
||||
}
|
||||
}
|
||||
|
@ -13,14 +13,20 @@ import com.unboundid.util.ssl.HostNameSSLSocketVerifier;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.xpack.ssl.SSLConfigurationSettings;
|
||||
import org.elasticsearch.xpack.ssl.SSLService;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
@ -149,6 +155,19 @@ public abstract class SessionFactory {
|
||||
return sslUsed;
|
||||
}
|
||||
|
||||
protected static Set<Setting<?>> getSettings() {
|
||||
Set<Setting<?>> settings = new HashSet<>();
|
||||
settings.addAll(LdapLoadBalancing.getSettings());
|
||||
settings.add(Setting.listSetting(URLS_SETTING, Collections.emptyList(), Function.identity(), Setting.Property.NodeScope));
|
||||
settings.add(Setting.timeSetting(TIMEOUT_TCP_CONNECTION_SETTING, TIMEOUT_DEFAULT, Setting.Property.NodeScope));
|
||||
settings.add(Setting.timeSetting(TIMEOUT_TCP_READ_SETTING, TIMEOUT_DEFAULT, Setting.Property.NodeScope));
|
||||
settings.add(Setting.timeSetting(TIMEOUT_LDAP_SETTING, TIMEOUT_DEFAULT, Setting.Property.NodeScope));
|
||||
settings.add(Setting.boolSetting(HOSTNAME_VERIFICATION_SETTING, true, Setting.Property.NodeScope));
|
||||
settings.add(Setting.boolSetting(FOLLOW_REFERRALS_SETTING, true, Setting.Property.NodeScope));
|
||||
settings.addAll(SSLConfigurationSettings.withPrefix("ssl.").getAllSettings());
|
||||
return settings;
|
||||
}
|
||||
|
||||
public static class LDAPServers {
|
||||
|
||||
private final String[] addresses;
|
||||
|
@ -11,42 +11,48 @@ import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.xpack.security.authc.Realms;
|
||||
import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4Transport;
|
||||
import org.elasticsearch.xpack.ssl.CertUtils;
|
||||
import org.elasticsearch.xpack.ssl.SSLConfigurationSettings;
|
||||
import org.elasticsearch.xpack.ssl.SSLService;
|
||||
import org.elasticsearch.xpack.security.user.User;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
|
||||
import org.elasticsearch.xpack.security.authc.Realm;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.RealmSettings;
|
||||
import org.elasticsearch.xpack.security.authc.support.DnRoleMapper;
|
||||
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
import javax.net.ssl.X509TrustManager;
|
||||
import java.security.cert.Certificate;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
import static org.elasticsearch.xpack.XPackSettings.HTTP_SSL_ENABLED;
|
||||
import static org.elasticsearch.xpack.XPackSettings.TRANSPORT_SSL_ENABLED;
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
|
||||
public class PkiRealm extends Realm {
|
||||
|
||||
public static final String PKI_CERT_HEADER_NAME = "__SECURITY_CLIENT_CERTIFICATE";
|
||||
public static final String TYPE = "pki";
|
||||
public static final String DEFAULT_USERNAME_PATTERN = "CN=(.*?)(?:,|$)";
|
||||
|
||||
static final String DEFAULT_USERNAME_PATTERN = "CN=(.*?)(?:,|$)";
|
||||
private static final Setting<Pattern> USERNAME_PATTERN_SETTING = new Setting<>("username_pattern", DEFAULT_USERNAME_PATTERN,
|
||||
s -> Pattern.compile(s, Pattern.CASE_INSENSITIVE), Setting.Property.NodeScope);
|
||||
private static final SSLConfigurationSettings SSL_SETTINGS = SSLConfigurationSettings.withoutPrefix();
|
||||
|
||||
// For client based cert validation, the auth type must be specified but UNKNOWN is an acceptable value
|
||||
public static final String AUTH_TYPE = "UNKNOWN";
|
||||
@ -64,8 +70,7 @@ public class PkiRealm extends Realm {
|
||||
PkiRealm(RealmConfig config, DnRoleMapper roleMapper, SSLService sslService) {
|
||||
super(TYPE, config);
|
||||
this.trustManager = trustManagers(config);
|
||||
this.principalPattern = Pattern.compile(config.settings().get("username_pattern", DEFAULT_USERNAME_PATTERN),
|
||||
Pattern.CASE_INSENSITIVE);
|
||||
this.principalPattern = USERNAME_PATTERN_SETTING.get(config.settings());
|
||||
this.roleMapper = roleMapper;
|
||||
checkSSLEnabled(config, sslService);
|
||||
}
|
||||
@ -149,31 +154,26 @@ public class PkiRealm extends Realm {
|
||||
static X509TrustManager trustManagers(RealmConfig realmConfig) {
|
||||
final Settings settings = realmConfig.settings();
|
||||
final Environment env = realmConfig.env();
|
||||
String[] certificateAuthorities = settings.getAsArray("certificate_authorities", null);
|
||||
String truststorePath = settings.get("truststore.path");
|
||||
String[] certificateAuthorities = settings.getAsArray(SSL_SETTINGS.caPaths.getKey(), null);
|
||||
String truststorePath = SSL_SETTINGS.truststorePath.get(settings).orElse(null);
|
||||
if (truststorePath == null && certificateAuthorities == null) {
|
||||
return null;
|
||||
} else if (truststorePath != null && certificateAuthorities != null) {
|
||||
final String settingPrefix = Realms.REALMS_GROUPS_SETTINGS.getKey() + realmConfig.name() + ".";
|
||||
throw new IllegalArgumentException("[" + settingPrefix + "truststore.path] and [" + settingPrefix + "certificate_authorities]" +
|
||||
" cannot be used at the same time");
|
||||
final String pathKey = RealmSettings.getFullSettingKey(realmConfig, SSL_SETTINGS.truststorePath);
|
||||
final String caKey = RealmSettings.getFullSettingKey(realmConfig, SSL_SETTINGS.caPaths);
|
||||
throw new IllegalArgumentException("[" + pathKey + "] and [" + caKey + "] cannot be used at the same time");
|
||||
} else if (truststorePath != null) {
|
||||
return trustManagersFromTruststore(realmConfig);
|
||||
return trustManagersFromTruststore(truststorePath, realmConfig);
|
||||
}
|
||||
return trustManagersFromCAs(settings, env);
|
||||
}
|
||||
|
||||
private static X509TrustManager trustManagersFromTruststore(RealmConfig realmConfig) {
|
||||
private static X509TrustManager trustManagersFromTruststore(String truststorePath, RealmConfig realmConfig) {
|
||||
final Settings settings = realmConfig.settings();
|
||||
String truststorePath = settings.get("truststore.path");
|
||||
String password = settings.get("truststore.password");
|
||||
if (password == null) {
|
||||
final String settingPrefix = Realms.REALMS_GROUPS_SETTINGS.getKey() + realmConfig.name() + ".";
|
||||
throw new IllegalArgumentException("[" + settingPrefix + "truststore.password] is not configured");
|
||||
}
|
||||
|
||||
String trustStoreAlgorithm = settings.get("truststore.algorithm", System.getProperty("ssl.TrustManagerFactory.algorithm",
|
||||
TrustManagerFactory.getDefaultAlgorithm()));
|
||||
String password = SSL_SETTINGS.truststorePassword.get(settings).orElseThrow(() -> new IllegalArgumentException(
|
||||
"[" + RealmSettings.getFullSettingKey(realmConfig, SSL_SETTINGS.truststorePassword) + "] is not configured"
|
||||
));
|
||||
String trustStoreAlgorithm = SSL_SETTINGS.truststoreAlgorithm.get(settings);
|
||||
try {
|
||||
return CertUtils.trustManager(truststorePath, password, trustStoreAlgorithm, realmConfig.env());
|
||||
} catch (Exception e) {
|
||||
@ -182,7 +182,7 @@ public class PkiRealm extends Realm {
|
||||
}
|
||||
|
||||
private static X509TrustManager trustManagersFromCAs(Settings settings, Environment env) {
|
||||
String[] certificateAuthorities = settings.getAsArray("certificate_authorities", null);
|
||||
String[] certificateAuthorities = settings.getAsArray(SSL_SETTINGS.caPaths.getKey(), null);
|
||||
assert certificateAuthorities != null;
|
||||
try {
|
||||
Certificate[] certificates = CertUtils.readCertificates(Arrays.asList(certificateAuthorities), env);
|
||||
@ -232,4 +232,21 @@ public class PkiRealm extends Realm {
|
||||
throw new IllegalStateException("PKI realm [" + config.name() + "] is enabled but cannot be used as neither HTTP or Transport " +
|
||||
"has SSL with client authentication enabled");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The {@link Setting setting configuration} for this realm type
|
||||
*/
|
||||
public static Set<Setting<?>> getSettings() {
|
||||
Set<Setting<?>> settings = new HashSet<>();
|
||||
settings.add(USERNAME_PATTERN_SETTING);
|
||||
|
||||
settings.add(SSL_SETTINGS.truststorePath);
|
||||
settings.add(SSL_SETTINGS.truststorePassword);
|
||||
settings.add(SSL_SETTINGS.truststoreAlgorithm);
|
||||
settings.add(SSL_SETTINGS.caPaths);
|
||||
|
||||
DnRoleMapper.getSettings(settings);
|
||||
|
||||
return settings;
|
||||
}
|
||||
}
|
||||
|
@ -8,34 +8,41 @@ package org.elasticsearch.xpack.security.authc.support;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.cache.Cache;
|
||||
import org.elasticsearch.common.cache.CacheBuilder;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.user.User;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm implements CachingRealm {
|
||||
|
||||
public static final String CACHE_HASH_ALGO_SETTING = "cache.hash_algo";
|
||||
public static final String CACHE_TTL_SETTING = "cache.ttl";
|
||||
public static final String CACHE_MAX_USERS_SETTING = "cache.max_users";
|
||||
public static final Setting<String> CACHE_HASH_ALGO_SETTING = Setting.simpleString("cache.hash_algo", Setting.Property.NodeScope);
|
||||
|
||||
private static final TimeValue DEFAULT_TTL = TimeValue.timeValueMinutes(20);
|
||||
private static final int DEFAULT_MAX_USERS = 100000; //100k users
|
||||
public static final Setting<TimeValue> CACHE_TTL_SETTING = Setting.timeSetting("cache.ttl", DEFAULT_TTL, Setting.Property.NodeScope);
|
||||
|
||||
private static final int DEFAULT_MAX_USERS = 100_000; //100k users
|
||||
public static final Setting<Integer> CACHE_MAX_USERS_SETTING = Setting.intSetting("cache.max_users", DEFAULT_MAX_USERS,
|
||||
Setting.Property.NodeScope);
|
||||
|
||||
private final Cache<String, UserWithHash> cache;
|
||||
final Hasher hasher;
|
||||
|
||||
protected CachingUsernamePasswordRealm(String type, RealmConfig config) {
|
||||
super(type, config);
|
||||
hasher = Hasher.resolve(config.settings().get(CACHE_HASH_ALGO_SETTING, null), Hasher.SSHA256);
|
||||
TimeValue ttl = config.settings().getAsTime(CACHE_TTL_SETTING, DEFAULT_TTL);
|
||||
hasher = Hasher.resolve(CACHE_HASH_ALGO_SETTING.get(config.settings()), Hasher.SSHA256);
|
||||
TimeValue ttl = CACHE_TTL_SETTING.get(config.settings());
|
||||
if (ttl.getNanos() > 0) {
|
||||
cache = CacheBuilder.<String, UserWithHash>builder()
|
||||
.setExpireAfterAccess(ttl)
|
||||
.setMaximumWeight(config.settings().getAsInt(CACHE_MAX_USERS_SETTING, DEFAULT_MAX_USERS))
|
||||
.setMaximumWeight(CACHE_MAX_USERS_SETTING.get(config.settings()))
|
||||
.build();
|
||||
} else {
|
||||
cache = null;
|
||||
@ -172,6 +179,13 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
||||
|
||||
protected abstract void doLookupUser(String username, ActionListener<User> listener);
|
||||
|
||||
/**
|
||||
* Returns the {@link Setting setting configuration} that is common for all caching realms
|
||||
*/
|
||||
protected static Set<Setting<?>> getCachingSettings() {
|
||||
return new HashSet<>(Arrays.asList(CACHE_HASH_ALGO_SETTING, CACHE_TTL_SETTING, CACHE_MAX_USERS_SETTING));
|
||||
}
|
||||
|
||||
private static class UserWithHash {
|
||||
User user;
|
||||
char[] hash;
|
||||
|
@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
@ -30,6 +31,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
@ -41,9 +43,12 @@ import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.rela
|
||||
*/
|
||||
public class DnRoleMapper {
|
||||
|
||||
public static final String DEFAULT_FILE_NAME = "role_mapping.yml";
|
||||
public static final String ROLE_MAPPING_FILE_SETTING = "files.role_mapping";
|
||||
public static final String USE_UNMAPPED_GROUPS_AS_ROLES_SETTING = "unmapped_groups_as_roles";
|
||||
private static final String DEFAULT_FILE_NAME = "role_mapping.yml";
|
||||
public static final Setting<String> ROLE_MAPPING_FILE_SETTING = new Setting<>("files.role_mapping", DEFAULT_FILE_NAME,
|
||||
Function.identity(), Setting.Property.NodeScope);
|
||||
|
||||
public static final Setting<Boolean> USE_UNMAPPED_GROUPS_AS_ROLES_SETTING = Setting.boolSetting("unmapped_groups_as_roles", false,
|
||||
Setting.Property.NodeScope);
|
||||
|
||||
protected final Logger logger;
|
||||
protected final RealmConfig config;
|
||||
@ -61,7 +66,7 @@ public class DnRoleMapper {
|
||||
this.logger = config.logger(getClass());
|
||||
this.listeners = new CopyOnWriteArrayList<>(Collections.singleton(listener));
|
||||
|
||||
useUnmappedGroupsAsRoles = config.settings().getAsBoolean(USE_UNMAPPED_GROUPS_AS_ROLES_SETTING, false);
|
||||
useUnmappedGroupsAsRoles = USE_UNMAPPED_GROUPS_AS_ROLES_SETTING.get(config.settings());
|
||||
file = resolveFile(config.settings(), config.env());
|
||||
dnRoles = parseFileLenient(file, logger, realmType, config.name());
|
||||
FileWatcher watcher = new FileWatcher(file.getParent());
|
||||
@ -78,7 +83,7 @@ public class DnRoleMapper {
|
||||
}
|
||||
|
||||
public static Path resolveFile(Settings settings, Environment env) {
|
||||
String location = settings.get(ROLE_MAPPING_FILE_SETTING, DEFAULT_FILE_NAME);
|
||||
String location = ROLE_MAPPING_FILE_SETTING.get(settings);
|
||||
return XPackPlugin.resolveConfigFile(env, location);
|
||||
}
|
||||
|
||||
@ -185,6 +190,11 @@ public class DnRoleMapper {
|
||||
listeners.forEach(Runnable::run);
|
||||
}
|
||||
|
||||
public static void getSettings(Set<Setting<?>> settings) {
|
||||
settings.add(USE_UNMAPPED_GROUPS_AS_ROLES_SETTING);
|
||||
settings.add(ROLE_MAPPING_FILE_SETTING);
|
||||
}
|
||||
|
||||
private class FileListener implements FileChangesListener {
|
||||
@Override
|
||||
public void onFileCreated(Path file) {
|
||||
|
@ -80,7 +80,6 @@ import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
@ -158,13 +157,11 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
|
||||
QueryShardContext queryShardContext = queryShardContextProvider.apply(shardId);
|
||||
bytesReference = evaluateTemplate(bytesReference);
|
||||
try (XContentParser parser = XContentFactory.xContent(bytesReference).createParser(bytesReference)) {
|
||||
Optional<QueryBuilder> queryBuilder = queryShardContext.newParseContext(parser).parseInnerQueryBuilder();
|
||||
if (queryBuilder.isPresent()) {
|
||||
verifyRoleQuery(queryBuilder.get());
|
||||
failIfQueryUsesClient(scriptService, queryBuilder.get(), queryShardContext);
|
||||
ParsedQuery parsedQuery = queryShardContext.toQuery(queryBuilder.get());
|
||||
filter.add(parsedQuery.query(), SHOULD);
|
||||
}
|
||||
QueryBuilder queryBuilder = queryShardContext.newParseContext(parser).parseInnerQueryBuilder();
|
||||
verifyRoleQuery(queryBuilder);
|
||||
failIfQueryUsesClient(scriptService, queryBuilder, queryShardContext);
|
||||
ParsedQuery parsedQuery = queryShardContext.toQuery(queryBuilder);
|
||||
filter.add(parsedQuery.query(), SHOULD);
|
||||
}
|
||||
}
|
||||
// at least one of the queries should match
|
||||
|
@ -84,6 +84,7 @@ public interface ServerTransportFilter {
|
||||
destructiveOperations.failDestructive(indicesRequest.indices());
|
||||
} catch(IllegalArgumentException e) {
|
||||
listener.onFailure(e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
/*
|
||||
|
@ -5,22 +5,19 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.ssl;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Represents the configuration for an SSLContext
|
||||
*/
|
||||
@ -29,48 +26,7 @@ class SSLConfiguration {
|
||||
// These settings are never registered, but they exist so that we can parse the values defined under grouped settings. Also, some are
|
||||
// implemented as optional settings, which provides a declarative manner for fallback as we typically fallback to values from a
|
||||
// different configuration
|
||||
private static final Setting<List<String>> CIPHERS_SETTING = Setting.listSetting("cipher_suites", Collections.emptyList(), s -> s);
|
||||
private static final Setting<List<String>> SUPPORTED_PROTOCOLS_SETTING =
|
||||
Setting.listSetting("supported_protocols", Collections.emptyList(), s -> s);
|
||||
private static final Setting<Optional<String>> KEYSTORE_PATH_SETTING =
|
||||
new Setting<>("keystore.path", (String) null, Optional::ofNullable);
|
||||
private static final Setting<Optional<String>> KEYSTORE_PASSWORD_SETTING =
|
||||
new Setting<>("keystore.password", (String) null, Optional::ofNullable);
|
||||
private static final Setting<String> KEYSTORE_ALGORITHM_SETTING = new Setting<>("keystore.algorithm",
|
||||
s -> System.getProperty("ssl.KeyManagerFactory.algorithm", KeyManagerFactory.getDefaultAlgorithm()), Function.identity());
|
||||
private static final Setting<Optional<String>> KEYSTORE_KEY_PASSWORD_SETTING =
|
||||
new Setting<>("keystore.key_password", KEYSTORE_PASSWORD_SETTING, Optional::ofNullable);
|
||||
private static final Setting<Optional<String>> TRUSTSTORE_PATH_SETTING =
|
||||
new Setting<>("truststore.path", (String) null, Optional::ofNullable);
|
||||
private static final Setting<Optional<String>> TRUSTSTORE_PASSWORD_SETTING =
|
||||
new Setting<>("truststore.password", (String) null, Optional::ofNullable);
|
||||
private static final Setting<String> TRUSTSTORE_ALGORITHM_SETTING = new Setting<>("truststore.algorithm",
|
||||
s -> System.getProperty("ssl.TrustManagerFactory.algorithm",
|
||||
TrustManagerFactory.getDefaultAlgorithm()), Function.identity());
|
||||
private static final Setting<Optional<String>> KEY_PATH_SETTING =
|
||||
new Setting<>("key", (String) null, Optional::ofNullable);
|
||||
private static final Setting<Optional<String>> KEY_PASSWORD_SETTING =
|
||||
new Setting<>("key_passphrase", (String) null, Optional::ofNullable);
|
||||
private static final Setting<Optional<String>> CERT_SETTING =
|
||||
new Setting<>("certificate", (String) null, Optional::ofNullable);
|
||||
private static final Setting<List<String>> CA_PATHS_SETTING =
|
||||
Setting.listSetting("certificate_authorities", Collections.emptyList(), s -> s);
|
||||
private static final Setting<Optional<SSLClientAuth>> CLIENT_AUTH_SETTING =
|
||||
new Setting<>("client_authentication", (String) null, s -> {
|
||||
if (s == null) {
|
||||
return Optional.ofNullable(null);
|
||||
} else {
|
||||
return Optional.of(SSLClientAuth.parse(s));
|
||||
}
|
||||
});
|
||||
private static final Setting<Optional<VerificationMode>> VERIFICATION_MODE_SETTING = new Setting<>("verification_mode", (String) null,
|
||||
s -> {
|
||||
if (s == null) {
|
||||
return Optional.ofNullable(null);
|
||||
} else {
|
||||
return Optional.of(VerificationMode.parse(s));
|
||||
}
|
||||
});
|
||||
private static final SSLConfigurationSettings SETTINGS_PARSER = SSLConfigurationSettings.withoutPrefix();
|
||||
|
||||
private final KeyConfig keyConfig;
|
||||
private final TrustConfig trustConfig;
|
||||
@ -87,10 +43,10 @@ class SSLConfiguration {
|
||||
SSLConfiguration(Settings settings) {
|
||||
this.keyConfig = createKeyConfig(settings, null);
|
||||
this.trustConfig = createTrustConfig(settings, keyConfig, null);
|
||||
this.ciphers = getListOrDefault(CIPHERS_SETTING, settings, XPackSettings.DEFAULT_CIPHERS);
|
||||
this.supportedProtocols = getListOrDefault(SUPPORTED_PROTOCOLS_SETTING, settings, XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS);
|
||||
this.sslClientAuth = CLIENT_AUTH_SETTING.get(settings).orElse(XPackSettings.CLIENT_AUTH_DEFAULT);
|
||||
this.verificationMode = VERIFICATION_MODE_SETTING.get(settings).orElse(XPackSettings.VERIFICATION_MODE_DEFAULT);
|
||||
this.ciphers = getListOrDefault(SETTINGS_PARSER.ciphers, settings, XPackSettings.DEFAULT_CIPHERS);
|
||||
this.supportedProtocols = getListOrDefault(SETTINGS_PARSER.supportedProtocols, settings, XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS);
|
||||
this.sslClientAuth = SETTINGS_PARSER.clientAuth.get(settings).orElse(XPackSettings.CLIENT_AUTH_DEFAULT);
|
||||
this.verificationMode = SETTINGS_PARSER.verificationMode.get(settings).orElse(XPackSettings.VERIFICATION_MODE_DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -103,10 +59,11 @@ class SSLConfiguration {
|
||||
Objects.requireNonNull(globalSSLConfiguration);
|
||||
this.keyConfig = createKeyConfig(settings, globalSSLConfiguration);
|
||||
this.trustConfig = createTrustConfig(settings, keyConfig, globalSSLConfiguration);
|
||||
this.ciphers = getListOrDefault(CIPHERS_SETTING, settings, globalSSLConfiguration.cipherSuites());
|
||||
this.supportedProtocols = getListOrDefault(SUPPORTED_PROTOCOLS_SETTING, settings, globalSSLConfiguration.supportedProtocols());
|
||||
this.sslClientAuth = CLIENT_AUTH_SETTING.get(settings).orElse(globalSSLConfiguration.sslClientAuth());
|
||||
this.verificationMode = VERIFICATION_MODE_SETTING.get(settings).orElse(globalSSLConfiguration.verificationMode());
|
||||
this.ciphers = getListOrDefault(SETTINGS_PARSER.ciphers, settings, globalSSLConfiguration.cipherSuites());
|
||||
this.supportedProtocols = getListOrDefault(SETTINGS_PARSER.supportedProtocols, settings,
|
||||
globalSSLConfiguration.supportedProtocols());
|
||||
this.sslClientAuth = SETTINGS_PARSER.clientAuth.get(settings).orElse(globalSSLConfiguration.sslClientAuth());
|
||||
this.verificationMode = SETTINGS_PARSER.verificationMode.get(settings).orElse(globalSSLConfiguration.verificationMode());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -216,8 +173,8 @@ class SSLConfiguration {
|
||||
}
|
||||
|
||||
private static KeyConfig createKeyConfig(Settings settings, SSLConfiguration global) {
|
||||
String keyStorePath = KEYSTORE_PATH_SETTING.get(settings).orElse(null);
|
||||
String keyPath = KEY_PATH_SETTING.get(settings).orElse(null);
|
||||
String keyStorePath = SETTINGS_PARSER.keystorePath.get(settings).orElse(null);
|
||||
String keyPath = SETTINGS_PARSER.keyPath.get(settings).orElse(null);
|
||||
if (keyPath != null && keyStorePath != null) {
|
||||
throw new IllegalArgumentException("you cannot specify a keystore and key file");
|
||||
} else if (keyStorePath == null && keyPath == null) {
|
||||
@ -233,29 +190,29 @@ class SSLConfiguration {
|
||||
}
|
||||
|
||||
if (keyPath != null) {
|
||||
String keyPassword = KEY_PASSWORD_SETTING.get(settings).orElse(null);
|
||||
String certPath = CERT_SETTING.get(settings).orElse(null);
|
||||
String keyPassword = SETTINGS_PARSER.keyPassword.get(settings).orElse(null);
|
||||
String certPath = SETTINGS_PARSER.cert.get(settings).orElse(null);
|
||||
if (certPath == null) {
|
||||
throw new IllegalArgumentException("you must specify the certificates to use with the key");
|
||||
}
|
||||
return new PEMKeyConfig(keyPath, keyPassword, certPath);
|
||||
} else {
|
||||
String keyStorePassword = KEYSTORE_PASSWORD_SETTING.get(settings).orElse(null);
|
||||
String keyStoreAlgorithm = KEYSTORE_ALGORITHM_SETTING.get(settings);
|
||||
String keyStoreKeyPassword = KEYSTORE_KEY_PASSWORD_SETTING.get(settings).orElse(keyStorePassword);
|
||||
String trustStoreAlgorithm = TRUSTSTORE_ALGORITHM_SETTING.get(settings);
|
||||
String keyStorePassword = SETTINGS_PARSER.keystorePassword.get(settings).orElse(null);
|
||||
String keyStoreAlgorithm = SETTINGS_PARSER.keystoreAlgorithm.get(settings);
|
||||
String keyStoreKeyPassword = SETTINGS_PARSER.keystoreKeyPassword.get(settings).orElse(keyStorePassword);
|
||||
String trustStoreAlgorithm = SETTINGS_PARSER.truststoreAlgorithm.get(settings);
|
||||
return new StoreKeyConfig(keyStorePath, keyStorePassword, keyStoreKeyPassword, keyStoreAlgorithm, trustStoreAlgorithm);
|
||||
}
|
||||
}
|
||||
|
||||
private static TrustConfig createTrustConfig(Settings settings, KeyConfig keyConfig, SSLConfiguration global) {
|
||||
String trustStorePath = TRUSTSTORE_PATH_SETTING.get(settings).orElse(null);
|
||||
List<String> caPaths = getListOrNull(CA_PATHS_SETTING, settings);
|
||||
String trustStorePath = SETTINGS_PARSER.truststorePath.get(settings).orElse(null);
|
||||
List<String> caPaths = getListOrNull(SETTINGS_PARSER.caPaths, settings);
|
||||
if (trustStorePath != null && caPaths != null) {
|
||||
throw new IllegalArgumentException("you cannot specify a truststore and ca files");
|
||||
}
|
||||
|
||||
VerificationMode verificationMode = VERIFICATION_MODE_SETTING.get(settings).orElseGet(() -> {
|
||||
VerificationMode verificationMode = SETTINGS_PARSER.verificationMode.get(settings).orElseGet(() -> {
|
||||
if (global != null) {
|
||||
return global.verificationMode();
|
||||
}
|
||||
@ -266,8 +223,8 @@ class SSLConfiguration {
|
||||
} else if (caPaths != null) {
|
||||
return new PEMTrustConfig(caPaths);
|
||||
} else if (trustStorePath != null) {
|
||||
String trustStorePassword = TRUSTSTORE_PASSWORD_SETTING.get(settings).orElse(null);
|
||||
String trustStoreAlgorithm = TRUSTSTORE_ALGORITHM_SETTING.get(settings);
|
||||
String trustStorePassword = SETTINGS_PARSER.truststorePassword.get(settings).orElse(null);
|
||||
String trustStoreAlgorithm = SETTINGS_PARSER.truststoreAlgorithm.get(settings);
|
||||
return new StoreTrustConfig(trustStorePath, trustStorePassword, trustStoreAlgorithm);
|
||||
} else if (global == null && System.getProperty("javax.net.ssl.trustStore") != null) {
|
||||
return new StoreTrustConfig(System.getProperty("javax.net.ssl.trustStore"),
|
||||
|
@ -0,0 +1,135 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ssl;
|
||||
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* Bridges {@link SSLConfiguration} into the {@link Settings} framework, using {@link Setting} objects.
|
||||
*/
|
||||
public class SSLConfigurationSettings {
|
||||
|
||||
private final String prefix;
|
||||
|
||||
public final Setting<List<String>> ciphers;
|
||||
public final Setting<List<String>> supportedProtocols;
|
||||
public final Setting<Optional<String>> keystorePath;
|
||||
public final Setting<Optional<String>> keystorePassword;
|
||||
public final Setting<String> keystoreAlgorithm;
|
||||
public final Setting<Optional<String>> keystoreKeyPassword;
|
||||
public final Setting<Optional<String>> truststorePath;
|
||||
public final Setting<Optional<String>> truststorePassword;
|
||||
public final Setting<String> truststoreAlgorithm;
|
||||
public final Setting<Optional<String>> keyPath;
|
||||
public final Setting<Optional<String>> keyPassword;
|
||||
public final Setting<Optional<String>> cert;
|
||||
public final Setting<List<String>> caPaths;
|
||||
public final Setting<Optional<SSLClientAuth>> clientAuth;
|
||||
public final Setting<Optional<VerificationMode>> verificationMode;
|
||||
|
||||
/**
|
||||
* @see #withoutPrefix
|
||||
* @see #withPrefix
|
||||
* @param prefix The prefix under which each setting should be defined. Must be either the empty string (<code>""</code>) or a string
|
||||
* ending in <code>"."</code>
|
||||
*/
|
||||
private SSLConfigurationSettings(String prefix) {
|
||||
assert prefix != null : "Prefix cannot be null (but can be blank)";
|
||||
this.prefix = prefix;
|
||||
|
||||
ciphers = list("cipher_suites", Collections.emptyList());
|
||||
supportedProtocols = list("supported_protocols", Collections.emptyList());
|
||||
keystorePath = optionalString("keystore.path");
|
||||
keystorePassword = optionalString("keystore.password");
|
||||
keystoreKeyPassword = optionalString("keystore.key_password", keystorePassword);
|
||||
truststorePath = optionalString("truststore.path");
|
||||
truststorePassword = optionalString("truststore.password");
|
||||
keystoreAlgorithm = systemProperty("keystore.algorithm",
|
||||
"ssl.KeyManagerFactory.algorithm", KeyManagerFactory.getDefaultAlgorithm());
|
||||
truststoreAlgorithm = systemProperty("truststore.algorithm", "ssl.TrustManagerFactory.algorithm",
|
||||
TrustManagerFactory.getDefaultAlgorithm());
|
||||
keyPath = optionalString("key");
|
||||
keyPassword = optionalString("key_passphrase");
|
||||
cert = optionalString("certificate");
|
||||
caPaths = list("certificate_authorities", Collections.emptyList());
|
||||
clientAuth = optional("client_authentication", SSLClientAuth::parse);
|
||||
verificationMode = optional("verification_mode", VerificationMode::parse);
|
||||
}
|
||||
|
||||
public List<Setting<?>> getAllSettings() {
|
||||
return Arrays.asList(ciphers, supportedProtocols,
|
||||
keystorePath, keystorePassword, keystoreAlgorithm, keystoreKeyPassword,
|
||||
truststorePath, truststorePassword, truststoreAlgorithm,
|
||||
keyPath, keyPassword,
|
||||
cert, caPaths, clientAuth, verificationMode);
|
||||
}
|
||||
|
||||
private Setting<Optional<String>> optionalString(String keyPart) {
|
||||
return optionalString(keyPart, (s) -> null);
|
||||
}
|
||||
|
||||
private Setting<Optional<String>> optionalString(String keyPart, Function<Settings, String> defaultValue) {
|
||||
return new Setting<>(prefix + keyPart, defaultValue, Optional::ofNullable,
|
||||
Setting.Property.NodeScope, Setting.Property.Filtered);
|
||||
}
|
||||
|
||||
private Setting<Optional<String>> optionalString(String keyPart, Setting<Optional<String>> fallback) {
|
||||
return new Setting<>(prefix + keyPart, fallback, Optional::ofNullable,
|
||||
Setting.Property.NodeScope, Setting.Property.Filtered);
|
||||
}
|
||||
|
||||
private <T> Setting<Optional<T>> optional(String keyPart, Function<String, T> parserIfNotNull) {
|
||||
Function<String,Optional<T>> parser = s -> {
|
||||
if (s == null) {
|
||||
return Optional.empty();
|
||||
} else {
|
||||
return Optional.of(parserIfNotNull.apply(s));
|
||||
}
|
||||
};
|
||||
return new Setting<>(prefix + keyPart, (String) null, parser, Setting.Property.NodeScope, Setting.Property.Filtered);
|
||||
}
|
||||
|
||||
private Setting<String> systemProperty(String keyPart, String systemProperty, String defaultValue) {
|
||||
return string(keyPart, s -> System.getProperty(systemProperty, defaultValue));
|
||||
}
|
||||
|
||||
private Setting<String> string(String keyPart, Function<Settings, String> defaultFunction) {
|
||||
return new Setting<>(prefix + keyPart, defaultFunction, Function.identity(),
|
||||
Setting.Property.NodeScope, Setting.Property.Filtered);
|
||||
}
|
||||
|
||||
private Setting<List<String>> list(String keyPart, List<String> defaultValue) {
|
||||
return Setting.listSetting(prefix + keyPart, defaultValue, Function.identity(),
|
||||
Setting.Property.NodeScope, Setting.Property.Filtered);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct settings that are un-prefixed. That is, they can be used to read from a {@link Settings} object where the configuration
|
||||
* keys are the root names of the <code>Settings</code>.
|
||||
*/
|
||||
public static SSLConfigurationSettings withoutPrefix() {
|
||||
return new SSLConfigurationSettings("");
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct settings that have a prefixed. That is, they can be used to read from a {@link Settings} object where the configuration
|
||||
* keys are prefixed-children of the <code>Settings</code>.
|
||||
* @param prefix A string that must end in <code>"ssl."</code>
|
||||
*/
|
||||
public static SSLConfigurationSettings withPrefix(String prefix) {
|
||||
assert prefix.endsWith("ssl.") : "The ssl config prefix (" + prefix + ") should end in 'ssl.'";
|
||||
return new SSLConfigurationSettings(prefix);
|
||||
}
|
||||
}
|
@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.transport.TransportSettings;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
|
||||
import javax.net.ssl.HostnameVerifier;
|
||||
import javax.net.ssl.SSLContext;
|
||||
@ -62,7 +63,7 @@ public class SSLService extends AbstractComponent {
|
||||
public SSLService(Settings settings, Environment environment) {
|
||||
super(settings);
|
||||
this.env = environment;
|
||||
this.globalSSLConfiguration = new SSLConfiguration(settings.getByPrefix("xpack.ssl."));
|
||||
this.globalSSLConfiguration = new SSLConfiguration(settings.getByPrefix(XPackSettings.GLOBAL_SSL_PREFIX));
|
||||
this.sslContexts = loadSSLConfigurations();
|
||||
}
|
||||
|
||||
@ -387,7 +388,7 @@ public class SSLService extends AbstractComponent {
|
||||
Map<SSLConfiguration, SSLContextHolder> sslConfigurations = new HashMap<>();
|
||||
sslConfigurations.put(globalSSLConfiguration, createSslContext(globalSSLConfiguration));
|
||||
|
||||
final Settings transportSSLSettings = settings.getByPrefix("xpack.security.transport.ssl.");
|
||||
final Settings transportSSLSettings = settings.getByPrefix(XPackSettings.TRANSPORT_SSL_PREFIX);
|
||||
List<Settings> sslSettingsList = new ArrayList<>();
|
||||
sslSettingsList.add(transportSSLSettings);
|
||||
sslSettingsList.add(getHttpTransportSSLSettings(settings));
|
||||
@ -741,7 +742,7 @@ public class SSLService extends AbstractComponent {
|
||||
|
||||
private static List<Settings> getRealmsSSLSettings(Settings settings) {
|
||||
List<Settings> sslSettings = new ArrayList<>();
|
||||
Settings realmsSettings = settings.getByPrefix("xpack.security.authc.realms.");
|
||||
Settings realmsSettings = settings.getByPrefix(Security.setting("authc.realms."));
|
||||
for (String name : realmsSettings.names()) {
|
||||
Settings realmSSLSettings = realmsSettings.getAsSettings(name).getByPrefix("ssl.");
|
||||
if (realmSSLSettings.isEmpty() == false) {
|
||||
@ -764,7 +765,7 @@ public class SSLService extends AbstractComponent {
|
||||
}
|
||||
|
||||
public static Settings getHttpTransportSSLSettings(Settings settings) {
|
||||
Settings httpSSLSettings = settings.getByPrefix("xpack.security.http.ssl.");
|
||||
Settings httpSSLSettings = settings.getByPrefix(XPackSettings.HTTP_SSL_PREFIX);
|
||||
if (httpSSLSettings.isEmpty()) {
|
||||
return httpSSLSettings;
|
||||
}
|
||||
|
@ -142,7 +142,6 @@ import org.elasticsearch.xpack.watcher.trigger.schedule.engine.SchedulerSchedule
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleTriggerEngine;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchLockService;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
@ -255,13 +254,10 @@ public class Watcher implements ActionPlugin, ScriptPlugin {
|
||||
final InputRegistry inputRegistry = new InputRegistry(settings, inputFactories);
|
||||
inputFactories.put(ChainInput.TYPE, new ChainInputFactory(settings, inputRegistry));
|
||||
|
||||
// TODO replace internal client where needed, so we can remove ctors
|
||||
final WatcherClientProxy watcherClientProxy = new WatcherClientProxy(settings, internalClient);
|
||||
|
||||
final WatcherClient watcherClient = new WatcherClient(internalClient);
|
||||
|
||||
final HistoryStore historyStore = new HistoryStore(settings, watcherClientProxy);
|
||||
|
||||
final Set<Schedule.Parser> scheduleParsers = new HashSet<>();
|
||||
scheduleParsers.add(new CronSchedule.Parser());
|
||||
scheduleParsers.add(new DailySchedule.Parser());
|
||||
@ -288,10 +284,9 @@ public class Watcher implements ActionPlugin, ScriptPlugin {
|
||||
final WatchLockService watchLockService = new WatchLockService(settings);
|
||||
final WatchExecutor watchExecutor = getWatchExecutor(threadPool);
|
||||
final Watch.Parser watchParser = new Watch.Parser(settings, triggerService, registry, inputRegistry, cryptoService, clock);
|
||||
final WatchStore watchStore = new WatchStore(settings, watcherClientProxy, watchParser);
|
||||
|
||||
final ExecutionService executionService = new ExecutionService(settings, historyStore, triggeredWatchStore, watchExecutor,
|
||||
watchStore, watchLockService, clock, threadPool);
|
||||
watchLockService, clock, threadPool, watchParser, watcherClientProxy);
|
||||
|
||||
final TriggerEngine.Listener triggerEngineListener = getTriggerEngineListener(executionService);
|
||||
triggerService.register(triggerEngineListener);
|
||||
@ -299,15 +294,15 @@ public class Watcher implements ActionPlugin, ScriptPlugin {
|
||||
final WatcherIndexTemplateRegistry watcherIndexTemplateRegistry = new WatcherIndexTemplateRegistry(settings,
|
||||
clusterService.getClusterSettings(), clusterService, threadPool, internalClient);
|
||||
|
||||
final WatcherService watcherService = new WatcherService(settings, clock, triggerService, watchStore,
|
||||
watchParser, executionService, watchLockService, watcherIndexTemplateRegistry);
|
||||
final WatcherService watcherService = new WatcherService(settings, triggerService, executionService, watchLockService,
|
||||
watcherIndexTemplateRegistry, watchParser, watcherClientProxy);
|
||||
|
||||
final WatcherLifeCycleService watcherLifeCycleService =
|
||||
new WatcherLifeCycleService(settings, threadPool, clusterService, watcherService);
|
||||
|
||||
return Arrays.asList(registry, watcherClient, inputRegistry, historyStore, triggerService, triggeredWatchParser,
|
||||
watcherLifeCycleService, executionService, watchStore, triggerEngineListener, watcherService, watchParser,
|
||||
configuredTriggerEngine, triggeredWatchStore, watcherSearchTemplateService);
|
||||
watcherLifeCycleService, executionService, triggerEngineListener, watcherService, watchParser,
|
||||
configuredTriggerEngine, triggeredWatchStore, watcherSearchTemplateService, watcherClientProxy);
|
||||
}
|
||||
|
||||
protected TriggerEngine getTriggerEngine(Clock clock, ScheduleRegistry scheduleRegistry) {
|
||||
@ -441,7 +436,7 @@ public class Watcher implements ActionPlugin, ScriptPlugin {
|
||||
|
||||
String errorMessage = LoggerMessageFormat.format("the [action.auto_create_index] setting value [{}] is too" +
|
||||
" restrictive. disable [action.auto_create_index] or set it to " +
|
||||
"[{}, {}, {}*]", (Object) value, WatchStore.INDEX, TriggeredWatchStore.INDEX_NAME, HistoryStore.INDEX_PREFIX);
|
||||
"[{}, {}, {}*]", (Object) value, Watch.INDEX, TriggeredWatchStore.INDEX_NAME, HistoryStore.INDEX_PREFIX);
|
||||
if (Booleans.isExplicitFalse(value)) {
|
||||
throw new IllegalArgumentException(errorMessage);
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
@ -130,15 +130,15 @@ public class WatcherLifeCycleService extends AbstractComponent implements Cluste
|
||||
threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> start(state, false));
|
||||
} else {
|
||||
boolean isWatchIndexDeleted = event.indicesDeleted().stream()
|
||||
.filter(index -> WatchStore.INDEX.equals(index.getName()))
|
||||
.filter(index -> Watch.INDEX.equals(index.getName()))
|
||||
.findAny()
|
||||
.isPresent();
|
||||
|
||||
boolean isWatchIndexOpenInPreviousClusterState = event.previousState().metaData().hasIndex(WatchStore.INDEX) &&
|
||||
event.previousState().metaData().index(WatchStore.INDEX).getState() == IndexMetaData.State.OPEN;
|
||||
boolean isWatchIndexClosedInCurrentClusterState = event.state().metaData().hasIndex(WatchStore.INDEX) &&
|
||||
event.state().metaData().index(WatchStore.INDEX).getState() == IndexMetaData.State.CLOSE;
|
||||
boolean hasWatcherIndexBeenClosed = isWatchIndexOpenInPreviousClusterState && isWatchIndexClosedInCurrentClusterState;
|
||||
final boolean isWatchIndexOpenInPreviousClusterState = event.previousState().metaData().hasIndex(Watch.INDEX) &&
|
||||
event.previousState().metaData().index(Watch.INDEX).getState() == IndexMetaData.State.OPEN;
|
||||
final boolean isWatchIndexClosedInCurrentClusterState = event.state().metaData().hasIndex(Watch.INDEX) &&
|
||||
event.state().metaData().index(Watch.INDEX).getState() == IndexMetaData.State.CLOSE;
|
||||
final boolean hasWatcherIndexBeenClosed = isWatchIndexOpenInPreviousClusterState && isWatchIndexClosedInCurrentClusterState;
|
||||
|
||||
if (isWatchIndexDeleted || hasWatcherIndexBeenClosed) {
|
||||
threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> watcherService.watchIndexDeletedOrClosed());
|
||||
|
@ -6,57 +6,67 @@
|
||||
package org.elasticsearch.xpack.watcher;
|
||||
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.xpack.watcher.execution.ExecutionService;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerService;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchLockService;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStatus;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.joda.time.DateTime;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStoreUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Clock;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalArgument;
|
||||
import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalState;
|
||||
import static org.elasticsearch.xpack.watcher.support.Exceptions.ioException;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
import static org.elasticsearch.xpack.watcher.watch.Watch.DOC_TYPE;
|
||||
import static org.elasticsearch.xpack.watcher.watch.Watch.INDEX;
|
||||
|
||||
|
||||
public class WatcherService extends AbstractComponent {
|
||||
|
||||
private final Clock clock;
|
||||
private final TriggerService triggerService;
|
||||
private final Watch.Parser watchParser;
|
||||
private final WatchStore watchStore;
|
||||
private final WatchLockService watchLockService;
|
||||
private final ExecutionService executionService;
|
||||
private final WatcherIndexTemplateRegistry watcherIndexTemplateRegistry;
|
||||
// package-private for testing
|
||||
final AtomicReference<WatcherState> state = new AtomicReference<>(WatcherState.STOPPED);
|
||||
private final TimeValue scrollTimeout;
|
||||
private final int scrollSize;
|
||||
private final Watch.Parser parser;
|
||||
private final WatcherClientProxy client;
|
||||
|
||||
public WatcherService(Settings settings, Clock clock, TriggerService triggerService, WatchStore watchStore,
|
||||
Watch.Parser watchParser, ExecutionService executionService, WatchLockService watchLockService,
|
||||
WatcherIndexTemplateRegistry watcherIndexTemplateRegistry) {
|
||||
public WatcherService(Settings settings, TriggerService triggerService,
|
||||
ExecutionService executionService, WatchLockService watchLockService,
|
||||
WatcherIndexTemplateRegistry watcherIndexTemplateRegistry, Watch.Parser parser, WatcherClientProxy client) {
|
||||
super(settings);
|
||||
this.clock = clock;
|
||||
this.triggerService = triggerService;
|
||||
this.watchStore = watchStore;
|
||||
this.watchParser = watchParser;
|
||||
this.watchLockService = watchLockService;
|
||||
this.executionService = executionService;
|
||||
this.watcherIndexTemplateRegistry = watcherIndexTemplateRegistry;
|
||||
this.scrollTimeout = settings.getAsTime("xpack.watcher.watch.scroll.timeout", TimeValue.timeValueSeconds(30));
|
||||
this.scrollSize = settings.getAsInt("xpack.watcher.watch.scroll.size", 100);
|
||||
this.parser = parser;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
public void start(ClusterState clusterState) throws Exception {
|
||||
@ -65,12 +75,9 @@ public class WatcherService extends AbstractComponent {
|
||||
logger.debug("starting watch service...");
|
||||
watcherIndexTemplateRegistry.addTemplatesIfMissing();
|
||||
watchLockService.start();
|
||||
|
||||
// Try to load watch store before the execution service, b/c action depends on watch store
|
||||
watchStore.start(clusterState);
|
||||
executionService.start(clusterState);
|
||||
triggerService.start(loadWatches(clusterState));
|
||||
|
||||
triggerService.start(watchStore.activeWatches());
|
||||
state.set(WatcherState.STARTED);
|
||||
logger.debug("watch service has started");
|
||||
} catch (Exception e) {
|
||||
@ -83,7 +90,7 @@ public class WatcherService extends AbstractComponent {
|
||||
}
|
||||
|
||||
public boolean validate(ClusterState state) {
|
||||
return watchStore.validate(state) && executionService.validate(state);
|
||||
return executionService.validate(state);
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
@ -96,7 +103,6 @@ public class WatcherService extends AbstractComponent {
|
||||
} catch (ElasticsearchTimeoutException te) {
|
||||
logger.warn("error stopping WatchLockService", te);
|
||||
}
|
||||
watchStore.stop();
|
||||
state.set(WatcherState.STOPPED);
|
||||
logger.debug("watch service has stopped");
|
||||
} else {
|
||||
@ -104,151 +110,74 @@ public class WatcherService extends AbstractComponent {
|
||||
}
|
||||
}
|
||||
|
||||
public WatchStore.WatchDelete deleteWatch(String id) {
|
||||
ensureStarted();
|
||||
WatchStore.WatchDelete delete = watchStore.delete(id);
|
||||
if (delete.deleteResponse().getResult() == DocWriteResponse.Result.DELETED) {
|
||||
triggerService.remove(id);
|
||||
}
|
||||
return delete;
|
||||
}
|
||||
|
||||
public IndexResponse putWatch(String id, BytesReference watchSource, boolean active) throws IOException {
|
||||
ensureStarted();
|
||||
DateTime now = new DateTime(clock.millis(), UTC);
|
||||
Watch watch = watchParser.parseWithSecrets(id, false, watchSource, now);
|
||||
watch.setState(active, now);
|
||||
WatchStore.WatchPut result = watchStore.put(watch);
|
||||
|
||||
if (result.previous() == null) {
|
||||
// this is a newly created watch, so we only need to schedule it if it's active
|
||||
if (result.current().status().state().isActive()) {
|
||||
triggerService.add(result.current());
|
||||
}
|
||||
|
||||
} else if (result.current().status().state().isActive()) {
|
||||
|
||||
if (!result.previous().status().state().isActive()) {
|
||||
// the replaced watch was inactive, which means it wasn't scheduled. The new watch is active
|
||||
// so we need to schedule it
|
||||
triggerService.add(result.current());
|
||||
|
||||
} else if (!result.previous().trigger().equals(result.current().trigger())) {
|
||||
// the previous watch was active and its schedule is different than the schedule of the
|
||||
// new watch, so we need to
|
||||
triggerService.add(result.current());
|
||||
}
|
||||
} else {
|
||||
// if the current is inactive, we'll just remove it from the trigger service
|
||||
// just to be safe
|
||||
triggerService.remove(result.current().id());
|
||||
}
|
||||
return result.indexResponse();
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: add version, fields, etc support that the core get api has as well.
|
||||
* This reads all watches from the .watches index/alias and puts them into memory for a short period of time,
|
||||
* before they are fed into the trigger service.
|
||||
*
|
||||
* This is only invoked when a node becomes master, so either on start up or when a master node switches - while watcher is started up
|
||||
*/
|
||||
public Watch getWatch(String name) {
|
||||
return watchStore.get(name);
|
||||
private Collection<Watch> loadWatches(ClusterState clusterState) {
|
||||
IndexMetaData indexMetaData = WatchStoreUtils.getConcreteIndex(INDEX, clusterState.metaData());
|
||||
|
||||
// no index exists, all good, we can start
|
||||
if (indexMetaData == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
RefreshResponse refreshResponse = client.refresh(new RefreshRequest(INDEX));
|
||||
if (refreshResponse.getSuccessfulShards() < indexMetaData.getNumberOfShards()) {
|
||||
throw illegalState("not all required shards have been refreshed");
|
||||
}
|
||||
|
||||
List<Watch> watches = new ArrayList<>();
|
||||
SearchRequest searchRequest = new SearchRequest(INDEX)
|
||||
.types(DOC_TYPE)
|
||||
.scroll(scrollTimeout)
|
||||
.source(new SearchSourceBuilder()
|
||||
.size(scrollSize)
|
||||
.sort(SortBuilders.fieldSort("_doc"))
|
||||
.version(true));
|
||||
SearchResponse response = client.search(searchRequest, null);
|
||||
try {
|
||||
if (response.getTotalShards() != response.getSuccessfulShards()) {
|
||||
throw new ElasticsearchException("Partial response while loading watches");
|
||||
}
|
||||
|
||||
while (response.getHits().hits().length != 0) {
|
||||
for (SearchHit hit : response.getHits()) {
|
||||
String id = hit.getId();
|
||||
try {
|
||||
Watch watch = parser.parse(id, true, hit.getSourceRef());
|
||||
watch.version(hit.version());
|
||||
watches.add(watch);
|
||||
} catch (Exception e) {
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("couldn't load watch [{}], ignoring it...", id), e);
|
||||
}
|
||||
}
|
||||
response = client.searchScroll(response.getScrollId(), scrollTimeout);
|
||||
}
|
||||
} finally {
|
||||
client.clearScroll(response.getScrollId());
|
||||
}
|
||||
return watches;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public WatcherState state() {
|
||||
return state.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Acks the watch if needed
|
||||
*/
|
||||
public WatchStatus ackWatch(String id, String[] actionIds) throws IOException {
|
||||
ensureStarted();
|
||||
if (actionIds == null || actionIds.length == 0) {
|
||||
actionIds = new String[] { Watch.ALL_ACTIONS_ID };
|
||||
}
|
||||
Watch watch = watchStore.get(id);
|
||||
if (watch == null) {
|
||||
throw illegalArgument("watch [{}] does not exist", id);
|
||||
}
|
||||
// we need to create a safe copy of the status
|
||||
if (watch.ack(new DateTime(clock.millis(), UTC), actionIds)) {
|
||||
try {
|
||||
watchStore.updateStatus(watch);
|
||||
} catch (IOException ioe) {
|
||||
throw ioException("failed to update the watch [{}] on ack", ioe, watch.id());
|
||||
} catch (VersionConflictEngineException vcee) {
|
||||
throw illegalState("failed to update the watch [{}] on ack, perhaps it was force deleted", vcee, watch.id());
|
||||
}
|
||||
}
|
||||
return new WatchStatus(watch.status());
|
||||
}
|
||||
|
||||
public WatchStatus activateWatch(String id) throws IOException {
|
||||
return setWatchState(id, true);
|
||||
}
|
||||
|
||||
public WatchStatus deactivateWatch(String id) throws IOException {
|
||||
return setWatchState(id, false);
|
||||
}
|
||||
|
||||
WatchStatus setWatchState(String id, boolean active) throws IOException {
|
||||
ensureStarted();
|
||||
// for now, when a watch is deactivated we don't remove its runtime representation
|
||||
// that is, the store will still keep the watch in memory. We only mark the watch
|
||||
// as inactive (both in runtime and also update the watch in the watches index)
|
||||
// and remove the watch from the trigger service, such that it will not be triggered
|
||||
// nor its trigger be evaluated.
|
||||
//
|
||||
// later on we can consider removing the watch runtime representation from memory
|
||||
// as well. This will mean that the in-memory loaded watches will no longer be a
|
||||
// complete representation of the watches in the index. This requires careful thought
|
||||
// to make sure, such incompleteness doesn't hurt any other part of watcher (we need
|
||||
// to run this exercise anyway... and make sure that nothing in watcher relies on the
|
||||
// fact that the watch store holds all watches in memory.
|
||||
|
||||
Watch watch = watchStore.get(id);
|
||||
if (watch == null) {
|
||||
throw illegalArgument("watch [{}] does not exist", id);
|
||||
}
|
||||
if (watch.setState(active, new DateTime(clock.millis(), UTC))) {
|
||||
try {
|
||||
watchStore.updateStatus(watch);
|
||||
if (active) {
|
||||
triggerService.add(watch);
|
||||
} else {
|
||||
triggerService.remove(watch.id());
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
throw ioException("failed to update the watch [{}] on ack", ioe, watch.id());
|
||||
} catch (VersionConflictEngineException vcee) {
|
||||
throw illegalState("failed to update the watch [{}] on ack, perhaps it was force deleted", vcee, watch.id());
|
||||
}
|
||||
}
|
||||
// we need to create a safe copy of the status
|
||||
return new WatchStatus(watch.status());
|
||||
}
|
||||
|
||||
public long watchesCount() {
|
||||
return watchStore.watches().size();
|
||||
}
|
||||
|
||||
private void ensureStarted() {
|
||||
if (state.get() != WatcherState.STARTED) {
|
||||
throw new IllegalStateException("not started");
|
||||
}
|
||||
}
|
||||
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> innerMap = executionService.usageStats();
|
||||
innerMap.putAll(watchStore.usageStats());
|
||||
return innerMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Something deleted or closed the {@link WatchStore#INDEX} and thus we need to do some cleanup to prevent further execution of watches
|
||||
* Something deleted or closed the {@link Watch#INDEX} and thus we need to do some cleanup to prevent further execution of watches
|
||||
* as those watches cannot be updated anymore
|
||||
*/
|
||||
public void watchIndexDeletedOrClosed() {
|
||||
watchStore.clearWatchesInMemory();
|
||||
executionService.clearExecutions();
|
||||
}
|
||||
}
|
||||
|
@ -74,11 +74,7 @@ public class ActionStatus implements ToXContent {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ackStatus.hashCode();
|
||||
result = 31 * result + (lastExecution != null ? lastExecution.hashCode() : 0);
|
||||
result = 31 * result + (lastSuccessfulExecution != null ? lastSuccessfulExecution.hashCode() : 0);
|
||||
result = 31 * result + (lastThrottle != null ? lastThrottle.hashCode() : 0);
|
||||
return result;
|
||||
return Objects.hash(ackStatus, lastExecution, lastSuccessfulExecution, lastThrottle);
|
||||
}
|
||||
|
||||
public void update(DateTime timestamp, Action.Result result) {
|
||||
@ -238,15 +234,12 @@ public class ActionStatus implements ToXContent {
|
||||
|
||||
AckStatus ackStatus = (AckStatus) o;
|
||||
|
||||
if (!timestamp.equals(ackStatus.timestamp)) return false;
|
||||
return state == ackStatus.state;
|
||||
return Objects.equals(timestamp, ackStatus.timestamp) && Objects.equals(state, ackStatus.state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = timestamp.hashCode();
|
||||
result = 31 * result + state.hashCode();
|
||||
return result;
|
||||
return Objects.hash(timestamp, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -11,10 +11,7 @@ import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.actions.Action;
|
||||
import org.elasticsearch.xpack.common.secret.Secret;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.WatcherXContentParser;
|
||||
import org.elasticsearch.xpack.notification.email.Authentication;
|
||||
import org.elasticsearch.xpack.notification.email.DataAttachment;
|
||||
import org.elasticsearch.xpack.notification.email.Email;
|
||||
@ -22,6 +19,9 @@ import org.elasticsearch.xpack.notification.email.EmailTemplate;
|
||||
import org.elasticsearch.xpack.notification.email.Profile;
|
||||
import org.elasticsearch.xpack.notification.email.attachment.EmailAttachments;
|
||||
import org.elasticsearch.xpack.notification.email.attachment.EmailAttachmentsParser;
|
||||
import org.elasticsearch.xpack.watcher.actions.Action;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.WatcherXContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
@ -105,7 +105,7 @@ public class EmailAction implements Action {
|
||||
}
|
||||
if (auth != null) {
|
||||
builder.field(Field.USER.getPreferredName(), auth.user());
|
||||
if (!WatcherParams.hideSecrets(params)) {
|
||||
if (WatcherParams.hideSecrets(params) == false) {
|
||||
builder.field(Field.PASSWORD.getPreferredName(), auth.password(), params);
|
||||
}
|
||||
}
|
||||
|
@ -33,19 +33,20 @@ public final class ScriptCondition extends Condition {
|
||||
|
||||
private final ScriptService scriptService;
|
||||
private final Script script;
|
||||
private final CompiledScript compiledScript;
|
||||
|
||||
public ScriptCondition(Script script) {
|
||||
super(TYPE);
|
||||
this.script = script;
|
||||
scriptService = null;
|
||||
compiledScript = null;
|
||||
}
|
||||
|
||||
ScriptCondition(Script script, ScriptService scriptService) {
|
||||
super(TYPE);
|
||||
this.scriptService = scriptService;
|
||||
this.script = script;
|
||||
// try to compile so we catch syntax errors early
|
||||
scriptService.compile(script, Watcher.SCRIPT_CONTEXT, Collections.emptyMap());
|
||||
compiledScript = scriptService.compile(script, Watcher.SCRIPT_CONTEXT, Collections.emptyMap());
|
||||
}
|
||||
|
||||
public Script getScript() {
|
||||
@ -72,7 +73,6 @@ public final class ScriptCondition extends Condition {
|
||||
if (script.getParams() != null && !script.getParams().isEmpty()) {
|
||||
parameters.putAll(script.getParams());
|
||||
}
|
||||
CompiledScript compiledScript = scriptService.compile(script, Watcher.SCRIPT_CONTEXT, Collections.emptyMap());
|
||||
ExecutableScript executable = scriptService.executable(compiledScript, parameters);
|
||||
Object value = executable.run();
|
||||
if (value instanceof Boolean) {
|
||||
|
@ -7,9 +7,10 @@ package org.elasticsearch.xpack.watcher.execution;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
@ -25,13 +26,15 @@ import org.elasticsearch.xpack.watcher.condition.Condition;
|
||||
import org.elasticsearch.xpack.watcher.history.HistoryStore;
|
||||
import org.elasticsearch.xpack.watcher.history.WatchRecord;
|
||||
import org.elasticsearch.xpack.watcher.input.Input;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.transform.Transform;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchLockService;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStoreUtils;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Clock;
|
||||
import java.util.ArrayList;
|
||||
import java.util.BitSet;
|
||||
@ -39,7 +42,6 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
@ -58,28 +60,31 @@ public class ExecutionService extends AbstractComponent {
|
||||
private final HistoryStore historyStore;
|
||||
private final TriggeredWatchStore triggeredWatchStore;
|
||||
private final WatchExecutor executor;
|
||||
private final WatchStore watchStore;
|
||||
private final WatchLockService watchLockService;
|
||||
private final Clock clock;
|
||||
private final TimeValue defaultThrottlePeriod;
|
||||
private final TimeValue maxStopTimeout;
|
||||
private final ThreadPool threadPool;
|
||||
private final Watch.Parser parser;
|
||||
private final WatcherClientProxy client;
|
||||
|
||||
private volatile CurrentExecutions currentExecutions;
|
||||
private final AtomicBoolean started = new AtomicBoolean(false);
|
||||
|
||||
public ExecutionService(Settings settings, HistoryStore historyStore, TriggeredWatchStore triggeredWatchStore, WatchExecutor executor,
|
||||
WatchStore watchStore, WatchLockService watchLockService, Clock clock, ThreadPool threadPool) {
|
||||
WatchLockService watchLockService, Clock clock, ThreadPool threadPool, Watch.Parser parser,
|
||||
WatcherClientProxy client) {
|
||||
super(settings);
|
||||
this.historyStore = historyStore;
|
||||
this.triggeredWatchStore = triggeredWatchStore;
|
||||
this.executor = executor;
|
||||
this.watchStore = watchStore;
|
||||
this.watchLockService = watchLockService;
|
||||
this.clock = clock;
|
||||
this.defaultThrottlePeriod = DEFAULT_THROTTLE_PERIOD_SETTING.get(settings);
|
||||
this.maxStopTimeout = Watcher.MAX_STOP_TIMEOUT_SETTING.get(settings);
|
||||
this.threadPool = threadPool;
|
||||
this.parser = parser;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
public void start(ClusterState state) throws Exception {
|
||||
@ -105,7 +110,16 @@ public class ExecutionService extends AbstractComponent {
|
||||
}
|
||||
|
||||
public boolean validate(ClusterState state) {
|
||||
return triggeredWatchStore.validate(state);
|
||||
boolean triggeredWatchStoreReady = triggeredWatchStore.validate(state);
|
||||
try {
|
||||
IndexMetaData indexMetaData = WatchStoreUtils.getConcreteIndex(Watch.INDEX, state.metaData());
|
||||
if (indexMetaData != null) {
|
||||
return triggeredWatchStoreReady && state.routingTable().index(indexMetaData.getIndex()).allPrimaryShardsActive();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
return triggeredWatchStoreReady;
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
@ -171,42 +185,40 @@ public class ExecutionService extends AbstractComponent {
|
||||
if (!started.get()) {
|
||||
throw new IllegalStateException("not started");
|
||||
}
|
||||
final LinkedList<TriggeredWatch> triggeredWatches = new LinkedList<>();
|
||||
final LinkedList<TriggeredExecutionContext> contexts = new LinkedList<>();
|
||||
|
||||
final List<TriggeredWatch> triggeredWatches = new ArrayList<>();
|
||||
final List<TriggeredExecutionContext> contexts = new ArrayList<>();
|
||||
DateTime now = new DateTime(clock.millis(), UTC);
|
||||
for (TriggerEvent event : events) {
|
||||
Watch watch = watchStore.get(event.jobName());
|
||||
if (watch == null) {
|
||||
logger.warn("unable to find watch [{}] in the watch store, perhaps it has been deleted", event.jobName());
|
||||
continue;
|
||||
}
|
||||
TriggeredExecutionContext ctx = new TriggeredExecutionContext(watch, now, event, defaultThrottlePeriod);
|
||||
contexts.add(ctx);
|
||||
triggeredWatches.add(new TriggeredWatch(ctx.id(), event));
|
||||
}
|
||||
|
||||
logger.debug("saving watch records [{}]", triggeredWatches.size());
|
||||
|
||||
triggeredWatchStore.putAll(triggeredWatches, new ActionListener<BitSet>() {
|
||||
@Override
|
||||
public void onResponse(BitSet slots) {
|
||||
int slot = 0;
|
||||
while ((slot = slots.nextSetBit(slot)) != -1) {
|
||||
executeAsync(contexts.get(slot), triggeredWatches.get(slot));
|
||||
slot++;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(e);
|
||||
if (cause instanceof EsRejectedExecutionException) {
|
||||
logger.debug("failed to store watch records due to overloaded threadpool [{}]", ExceptionsHelper.detailedMessage(e));
|
||||
threadPool.generic().execute(() -> {
|
||||
for (TriggerEvent event : events) {
|
||||
GetResponse response = client.getWatch(event.jobName());
|
||||
if (response.isExists() == false) {
|
||||
logger.warn("unable to find watch [{}] in watch index, perhaps it has been deleted", event.jobName());
|
||||
} else {
|
||||
logger.warn("failed to store watch records", e);
|
||||
try {
|
||||
Watch watch = parser.parseWithSecrets(response.getId(), true, response.getSourceAsBytesRef(), now);
|
||||
TriggeredExecutionContext ctx = new TriggeredExecutionContext(watch, now, event, defaultThrottlePeriod);
|
||||
contexts.add(ctx);
|
||||
triggeredWatches.add(new TriggeredWatch(ctx.id(), event));
|
||||
} catch (IOException e) {
|
||||
logger.warn("unable to parse watch [{}]", event.jobName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (triggeredWatches.isEmpty() == false) {
|
||||
logger.debug("saving triggered [{}] watches", triggeredWatches.size());
|
||||
|
||||
triggeredWatchStore.putAll(triggeredWatches, ActionListener.wrap(
|
||||
(slots) -> {
|
||||
int slot = 0;
|
||||
while ((slot = slots.nextSetBit(slot)) != -1) {
|
||||
executeAsync(contexts.get(slot), triggeredWatches.get(slot));
|
||||
slot++;
|
||||
}
|
||||
},
|
||||
(e) -> logger.warn("failed to store watch [] records", e)));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -214,31 +226,30 @@ public class ExecutionService extends AbstractComponent {
|
||||
if (!started.get()) {
|
||||
throw new IllegalStateException("not started");
|
||||
}
|
||||
final LinkedList<TriggeredWatch> triggeredWatches = new LinkedList<>();
|
||||
final LinkedList<TriggeredExecutionContext> contexts = new LinkedList<>();
|
||||
final List<TriggeredWatch> triggeredWatches = new ArrayList<>();
|
||||
final List<TriggeredExecutionContext> contexts = new ArrayList<>();
|
||||
|
||||
DateTime now = new DateTime(clock.millis(), UTC);
|
||||
for (TriggerEvent event : events) {
|
||||
Watch watch = watchStore.get(event.jobName());
|
||||
if (watch == null) {
|
||||
logger.warn("unable to find watch [{}] in the watch store, perhaps it has been deleted", event.jobName());
|
||||
GetResponse response = client.getWatch(event.jobName());
|
||||
if (response.isExists() == false) {
|
||||
logger.warn("unable to find watch [{}] in watch index, perhaps it has been deleted", event.jobName());
|
||||
continue;
|
||||
}
|
||||
Watch watch = parser.parseWithSecrets(response.getId(), true, response.getSourceAsBytesRef(), now);
|
||||
TriggeredExecutionContext ctx = new TriggeredExecutionContext(watch, now, event, defaultThrottlePeriod);
|
||||
contexts.add(ctx);
|
||||
triggeredWatches.add(new TriggeredWatch(ctx.id(), event));
|
||||
}
|
||||
|
||||
logger.debug("saving watch records [{}]", triggeredWatches.size());
|
||||
if (triggeredWatches.size() == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
BitSet slots = triggeredWatchStore.putAll(triggeredWatches);
|
||||
int slot = 0;
|
||||
while ((slot = slots.nextSetBit(slot)) != -1) {
|
||||
executeAsync(contexts.get(slot), triggeredWatches.get(slot));
|
||||
slot++;
|
||||
if (triggeredWatches.isEmpty() == false) {
|
||||
logger.debug("saving triggered [{}] watches", triggeredWatches.size());
|
||||
BitSet slots = triggeredWatchStore.putAll(triggeredWatches);
|
||||
int slot = 0;
|
||||
while ((slot = slots.nextSetBit(slot)) != -1) {
|
||||
executeAsync(contexts.get(slot), triggeredWatches.get(slot));
|
||||
slot++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -250,7 +261,10 @@ public class ExecutionService extends AbstractComponent {
|
||||
}
|
||||
try {
|
||||
currentExecutions.put(ctx.watch().id(), new WatchExecution(ctx, Thread.currentThread()));
|
||||
if (ctx.knownWatch() && watchStore.get(ctx.watch().id()) == null) {
|
||||
final AtomicBoolean watchExists = new AtomicBoolean(true);
|
||||
client.getWatch(ctx.watch().id(), ActionListener.wrap((r) -> watchExists.set(r.isExists()), (e) -> watchExists.set(false)));
|
||||
|
||||
if (ctx.knownWatch() && watchExists.get() == false) {
|
||||
// fail fast if we are trying to execute a deleted watch
|
||||
String message = "unable to find watch for record [" + ctx.id() + "], perhaps it has been deleted, ignoring...";
|
||||
logger.warn("{}", message);
|
||||
@ -261,7 +275,7 @@ public class ExecutionService extends AbstractComponent {
|
||||
|
||||
record = executeInner(ctx);
|
||||
if (ctx.recordExecution()) {
|
||||
watchStore.updateStatus(ctx.watch());
|
||||
client.updateWatchStatus(ctx.watch());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
@ -330,33 +344,28 @@ public class ExecutionService extends AbstractComponent {
|
||||
try {
|
||||
executor.execute(new WatchExecutionTask(ctx));
|
||||
} catch (EsRejectedExecutionException e) {
|
||||
// we are still in the transport thread here most likely, so we cannot run heavy operations
|
||||
// this means some offloading needs to be done for indexing into the history and delete the triggered watches entry
|
||||
threadPool.generic().execute(() -> {
|
||||
String message = "failed to run triggered watch [" + triggeredWatch.id() + "] due to thread pool capacity";
|
||||
logger.debug("{}", message);
|
||||
WatchRecord record = ctx.abortBeforeExecution(ExecutionState.FAILED, message);
|
||||
try {
|
||||
if (ctx.overrideRecordOnConflict()) {
|
||||
historyStore.forcePut(record);
|
||||
} else {
|
||||
historyStore.put(record);
|
||||
}
|
||||
} catch (Exception exc) {
|
||||
logger.error((Supplier<?>) () ->
|
||||
new ParameterizedMessage("Error storing watch history record for watch [{}] after thread pool rejection",
|
||||
triggeredWatch.id()), exc);
|
||||
String message = "failed to run triggered watch [" + triggeredWatch.id() + "] due to thread pool capacity";
|
||||
WatchRecord record = ctx.abortBeforeExecution(ExecutionState.FAILED, message);
|
||||
try {
|
||||
if (ctx.overrideRecordOnConflict()) {
|
||||
historyStore.forcePut(record);
|
||||
} else {
|
||||
historyStore.put(record);
|
||||
}
|
||||
} catch (Exception exc) {
|
||||
logger.error((Supplier<?>) () ->
|
||||
new ParameterizedMessage("Error storing watch history record for watch [{}] after thread pool rejection",
|
||||
triggeredWatch.id()), exc);
|
||||
}
|
||||
|
||||
try {
|
||||
triggeredWatchStore.delete(triggeredWatch.id());
|
||||
} catch (Exception exc) {
|
||||
logger.error((Supplier<?>) () ->
|
||||
new ParameterizedMessage("Error deleting triggered watch store record for watch [{}] after thread pool " +
|
||||
"rejection", triggeredWatch.id()), exc);
|
||||
}
|
||||
});
|
||||
}
|
||||
try {
|
||||
triggeredWatchStore.delete(triggeredWatch.id());
|
||||
} catch (Exception exc) {
|
||||
logger.error((Supplier<?>) () ->
|
||||
new ParameterizedMessage("Error deleting triggered watch store record for watch [{}] after thread pool " +
|
||||
"rejection", triggeredWatch.id()), exc);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
WatchRecord executeInner(WatchExecutionContext ctx) {
|
||||
@ -417,8 +426,8 @@ public class ExecutionService extends AbstractComponent {
|
||||
assert triggeredWatches != null;
|
||||
int counter = 0;
|
||||
for (TriggeredWatch triggeredWatch : triggeredWatches) {
|
||||
Watch watch = watchStore.get(triggeredWatch.id().watchId());
|
||||
if (watch == null) {
|
||||
GetResponse response = client.getWatch(triggeredWatch.id().watchId());
|
||||
if (response.isExists() == false) {
|
||||
String message = "unable to find watch for record [" + triggeredWatch.id().watchId() + "]/[" + triggeredWatch.id() +
|
||||
"], perhaps it has been deleted, ignoring...";
|
||||
WatchRecord record = new WatchRecord.MessageWatchRecord(triggeredWatch.id(), triggeredWatch.triggerEvent(),
|
||||
@ -426,8 +435,10 @@ public class ExecutionService extends AbstractComponent {
|
||||
historyStore.forcePut(record);
|
||||
triggeredWatchStore.delete(triggeredWatch.id());
|
||||
} else {
|
||||
TriggeredExecutionContext ctx = new StartupExecutionContext(watch, new DateTime(clock.millis(), UTC),
|
||||
triggeredWatch.triggerEvent(), defaultThrottlePeriod);
|
||||
DateTime now = new DateTime(clock.millis(), UTC);
|
||||
Watch watch = parser.parseWithSecrets(response.getId(), true, response.getSourceAsBytesRef(), now);
|
||||
TriggeredExecutionContext ctx =
|
||||
new StartupExecutionContext(watch, now, triggeredWatch.triggerEvent(), defaultThrottlePeriod);
|
||||
executeAsync(ctx, triggeredWatch);
|
||||
counter++;
|
||||
}
|
||||
|
@ -15,7 +15,6 @@ import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
@ -24,7 +23,6 @@ import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
@ -75,8 +73,9 @@ public class TriggeredWatchStore extends AbstractComponent {
|
||||
public boolean validate(ClusterState state) {
|
||||
try {
|
||||
IndexMetaData indexMetaData = WatchStoreUtils.getConcreteIndex(INDEX_NAME, state.metaData());
|
||||
return state.routingTable().index(indexMetaData.getIndex()).allPrimaryShardsActive();
|
||||
} catch (IndexNotFoundException e) {
|
||||
if (indexMetaData != null) {
|
||||
return state.routingTable().index(indexMetaData.getIndex()).allPrimaryShardsActive();
|
||||
}
|
||||
} catch (IllegalStateException e) {
|
||||
logger.trace((Supplier<?>) () -> new ParameterizedMessage("error getting index meta data [{}]: ", INDEX_NAME), e);
|
||||
return false;
|
||||
@ -108,29 +107,20 @@ public class TriggeredWatchStore extends AbstractComponent {
|
||||
}
|
||||
}
|
||||
|
||||
public void put(final TriggeredWatch triggeredWatch, final ActionListener<Boolean> listener) throws Exception {
|
||||
public void put(final TriggeredWatch triggeredWatch, final ActionListener<Boolean> listener) {
|
||||
ensureStarted();
|
||||
try {
|
||||
IndexRequest request = new IndexRequest(INDEX_NAME, DOC_TYPE, triggeredWatch.id().value())
|
||||
.source(XContentFactory.jsonBuilder().value(triggeredWatch))
|
||||
.opType(IndexRequest.OpType.CREATE);
|
||||
client.index(request, new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse response) {
|
||||
listener.onResponse(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
client.index(request, ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure));
|
||||
} catch (IOException e) {
|
||||
throw ioException("failed to persist triggered watch [{}]", e, triggeredWatch);
|
||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("could not index triggered watch [{}], ignoring it...",
|
||||
triggeredWatch.id()), e);
|
||||
}
|
||||
}
|
||||
|
||||
public void putAll(final List<TriggeredWatch> triggeredWatches, final ActionListener<BitSet> listener) throws Exception {
|
||||
public void putAll(final List<TriggeredWatch> triggeredWatches, final ActionListener<BitSet> listener) {
|
||||
|
||||
if (triggeredWatches.isEmpty()) {
|
||||
listener.onResponse(new BitSet(0));
|
||||
@ -138,55 +128,39 @@ public class TriggeredWatchStore extends AbstractComponent {
|
||||
}
|
||||
|
||||
if (triggeredWatches.size() == 1) {
|
||||
put(triggeredWatches.get(0), new ActionListener<Boolean>() {
|
||||
@Override
|
||||
public void onResponse(Boolean success) {
|
||||
BitSet bitSet = new BitSet(1);
|
||||
bitSet.set(0);
|
||||
listener.onResponse(bitSet);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
put(triggeredWatches.get(0), ActionListener.wrap(success -> {
|
||||
BitSet bitSet = new BitSet(1);
|
||||
bitSet.set(0);
|
||||
listener.onResponse(bitSet);
|
||||
}, listener::onFailure));
|
||||
return;
|
||||
}
|
||||
|
||||
ensureStarted();
|
||||
try {
|
||||
BulkRequest request = new BulkRequest();
|
||||
for (TriggeredWatch triggeredWatch : triggeredWatches) {
|
||||
BulkRequest request = new BulkRequest();
|
||||
for (TriggeredWatch triggeredWatch : triggeredWatches) {
|
||||
try {
|
||||
IndexRequest indexRequest = new IndexRequest(INDEX_NAME, DOC_TYPE, triggeredWatch.id().value());
|
||||
indexRequest.source(XContentFactory.jsonBuilder().value(triggeredWatch));
|
||||
indexRequest.opType(IndexRequest.OpType.CREATE);
|
||||
request.add(indexRequest);
|
||||
} catch (IOException e) {
|
||||
logger.warn("could not create JSON to store triggered watch [{}]", triggeredWatch.id().value());
|
||||
}
|
||||
client.bulk(request, new ActionListener<BulkResponse>() {
|
||||
@Override
|
||||
public void onResponse(BulkResponse response) {
|
||||
BitSet successFullSlots = new BitSet(triggeredWatches.size());
|
||||
for (int i = 0; i < response.getItems().length; i++) {
|
||||
BulkItemResponse itemResponse = response.getItems()[i];
|
||||
if (itemResponse.isFailed()) {
|
||||
logger.error("could store triggered watch with id [{}], because failed [{}]", itemResponse.getId(),
|
||||
itemResponse.getFailureMessage());
|
||||
} else {
|
||||
successFullSlots.set(i);
|
||||
}
|
||||
}
|
||||
listener.onResponse(successFullSlots);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw ioException("failed to persist triggered watches", e);
|
||||
}
|
||||
client.bulk(request, ActionListener.wrap(response -> {
|
||||
BitSet successFullSlots = new BitSet(triggeredWatches.size());
|
||||
for (int i = 0; i < response.getItems().length; i++) {
|
||||
BulkItemResponse itemResponse = response.getItems()[i];
|
||||
if (itemResponse.isFailed()) {
|
||||
logger.error("could not store triggered watch with id [{}], failed [{}]", itemResponse.getId(),
|
||||
itemResponse.getFailureMessage());
|
||||
} else {
|
||||
successFullSlots.set(i);
|
||||
}
|
||||
}
|
||||
listener.onResponse(successFullSlots);
|
||||
}, listener::onFailure));
|
||||
}
|
||||
|
||||
public BitSet putAll(final List<TriggeredWatch> triggeredWatches) throws Exception {
|
||||
@ -229,10 +203,8 @@ public class TriggeredWatchStore extends AbstractComponent {
|
||||
}
|
||||
|
||||
public Collection<TriggeredWatch> loadTriggeredWatches(ClusterState state) {
|
||||
IndexMetaData indexMetaData;
|
||||
try {
|
||||
indexMetaData = WatchStoreUtils.getConcreteIndex(INDEX_NAME, state.metaData());
|
||||
} catch (IndexNotFoundException e) {
|
||||
IndexMetaData indexMetaData = WatchStoreUtils.getConcreteIndex(INDEX_NAME, state.metaData());
|
||||
if (indexMetaData == null) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.rest.action;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
@ -16,10 +17,8 @@ import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.rest.action.RestBuilderListener;
|
||||
import org.elasticsearch.xpack.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -48,11 +47,9 @@ public class RestGetWatchAction extends WatcherRestHandler {
|
||||
.field("found", response.isFound())
|
||||
.field("_id", response.getId());
|
||||
if (response.isFound()) {
|
||||
WatcherParams params = WatcherParams.builder(request)
|
||||
.put(WatchStatus.INCLUDE_VERSION_KEY, true)
|
||||
.build();
|
||||
builder.field("_status", response.getStatus(), params);
|
||||
builder.field("watch", response.getSource(), params);
|
||||
ToXContent.MapParams xContentParams = new ToXContent.MapParams(request.params());
|
||||
builder.field("_status", response.getStatus(), xContentParams);
|
||||
builder.field("watch", response.getSource(), xContentParams);
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
|
@ -15,7 +15,7 @@ import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.xpack.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -33,18 +33,18 @@ public class RestHijackOperationAction extends WatcherRestHandler {
|
||||
super(settings);
|
||||
if (!settings.getAsBoolean(ALLOW_DIRECT_ACCESS_TO_WATCH_INDEX_SETTING, false)) {
|
||||
WatcherRestHandler unsupportedHandler = new UnsupportedHandler(settings);
|
||||
controller.registerHandler(POST, WatchStore.INDEX + "/watch", this);
|
||||
controller.registerHandler(POST, WatchStore.INDEX + "/watch/{id}", this);
|
||||
controller.registerHandler(PUT, WatchStore.INDEX + "/watch/{id}", this);
|
||||
controller.registerHandler(POST, WatchStore.INDEX + "/watch/{id}/_update", this);
|
||||
controller.registerHandler(DELETE, WatchStore.INDEX + "/watch/_query", this);
|
||||
controller.registerHandler(DELETE, WatchStore.INDEX + "/watch/{id}", this);
|
||||
controller.registerHandler(GET, WatchStore.INDEX + "/watch/{id}", this);
|
||||
controller.registerHandler(POST, WatchStore.INDEX + "/watch/_bulk", unsupportedHandler);
|
||||
controller.registerHandler(POST, WatchStore.INDEX + "/_bulk", unsupportedHandler);
|
||||
controller.registerHandler(PUT, WatchStore.INDEX + "/watch/_bulk", unsupportedHandler);
|
||||
controller.registerHandler(PUT, WatchStore.INDEX + "/_bulk", unsupportedHandler);
|
||||
controller.registerHandler(DELETE, WatchStore.INDEX, unsupportedHandler);
|
||||
controller.registerHandler(POST, Watch.INDEX + "/watch", this);
|
||||
controller.registerHandler(POST, Watch.INDEX + "/watch/{id}", this);
|
||||
controller.registerHandler(PUT, Watch.INDEX + "/watch/{id}", this);
|
||||
controller.registerHandler(POST, Watch.INDEX + "/watch/{id}/_update", this);
|
||||
controller.registerHandler(DELETE, Watch.INDEX + "/watch/_query", this);
|
||||
controller.registerHandler(DELETE, Watch.INDEX + "/watch/{id}", this);
|
||||
controller.registerHandler(GET, Watch.INDEX + "/watch/{id}", this);
|
||||
controller.registerHandler(POST, Watch.INDEX + "/watch/_bulk", unsupportedHandler);
|
||||
controller.registerHandler(POST, Watch.INDEX + "/_bulk", unsupportedHandler);
|
||||
controller.registerHandler(PUT, Watch.INDEX + "/watch/_bulk", unsupportedHandler);
|
||||
controller.registerHandler(PUT, Watch.INDEX + "/_bulk", unsupportedHandler);
|
||||
controller.registerHandler(DELETE, Watch.INDEX, unsupportedHandler);
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,7 +56,7 @@ public class RestHijackOperationAction extends WatcherRestHandler {
|
||||
}
|
||||
XContentBuilder jsonBuilder = XContentFactory.jsonBuilder();
|
||||
jsonBuilder.startObject().field("error", "This endpoint is not supported for " +
|
||||
request.method().name() + " on " + WatchStore.INDEX + " index. Please use " +
|
||||
request.method().name() + " on " + Watch.INDEX + " index. Please use " +
|
||||
request.method().name() + " " + URI_BASE + "/watch/<watch_id> instead");
|
||||
jsonBuilder.field("status", RestStatus.BAD_REQUEST.getStatus());
|
||||
jsonBuilder.endObject();
|
||||
@ -77,7 +77,7 @@ public class RestHijackOperationAction extends WatcherRestHandler {
|
||||
}
|
||||
XContentBuilder jsonBuilder = XContentFactory.jsonBuilder();
|
||||
jsonBuilder.startObject().field("error", "This endpoint is not supported for " +
|
||||
request.method().name() + " on " + WatchStore.INDEX + " index.");
|
||||
request.method().name() + " on " + Watch.INDEX + " index.");
|
||||
jsonBuilder.field("status", RestStatus.BAD_REQUEST.getStatus());
|
||||
jsonBuilder.endObject();
|
||||
return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.BAD_REQUEST, jsonBuilder));
|
||||
|
@ -14,6 +14,8 @@ import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.ClearScrollRequest;
|
||||
@ -21,13 +23,23 @@ import org.elasticsearch.action.search.ClearScrollResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.routing.Preference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.engine.DocumentMissingException;
|
||||
import org.elasticsearch.xpack.common.init.proxy.ClientProxy;
|
||||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* A lazily initialized proxy to an elasticsearch {@link Client}. Inject this proxy whenever a client
|
||||
@ -65,6 +77,10 @@ public class WatcherClientProxy extends ClientProxy {
|
||||
return client.update(preProcess(request)).actionGet(defaultIndexTimeout);
|
||||
}
|
||||
|
||||
public void update(UpdateRequest request, ActionListener<UpdateResponse> listener) {
|
||||
client.update(preProcess(request), listener);
|
||||
}
|
||||
|
||||
public BulkResponse bulk(BulkRequest request, TimeValue timeout) {
|
||||
if (timeout == null) {
|
||||
timeout = defaultBulkTimeout;
|
||||
@ -110,4 +126,47 @@ public class WatcherClientProxy extends ClientProxy {
|
||||
preProcess(request);
|
||||
client.admin().indices().putTemplate(request, listener);
|
||||
}
|
||||
|
||||
public GetResponse getWatch(String id) {
|
||||
PlainActionFuture<GetResponse> future = PlainActionFuture.newFuture();
|
||||
getWatch(id, future);
|
||||
return future.actionGet();
|
||||
}
|
||||
|
||||
public void getWatch(String id, ActionListener<GetResponse> listener) {
|
||||
GetRequest getRequest = new GetRequest(Watch.INDEX, Watch.DOC_TYPE, id).preference(Preference.LOCAL.type()).realtime(true);
|
||||
client.get(preProcess(getRequest), listener);
|
||||
}
|
||||
|
||||
public void deleteWatch(String id, ActionListener<DeleteResponse> listener) {
|
||||
DeleteRequest request = new DeleteRequest(Watch.INDEX, Watch.DOC_TYPE, id);
|
||||
client.delete(preProcess(request), listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates and persists the status of the given watch
|
||||
*
|
||||
* If the watch is missing (because it might have been deleted by the user during an execution), then this method
|
||||
* does nothing and just returns without throwing an exception
|
||||
*/
|
||||
public void updateWatchStatus(Watch watch) throws IOException {
|
||||
// at the moment we store the status together with the watch,
|
||||
// so we just need to update the watch itself
|
||||
ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(Watch.INCLUDE_STATUS_KEY, "true"));
|
||||
XContentBuilder source = JsonXContent.contentBuilder().
|
||||
startObject()
|
||||
.field(Watch.Field.STATUS.getPreferredName(), watch.status(), params)
|
||||
.endObject();
|
||||
|
||||
UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, watch.id());
|
||||
updateRequest.doc(source);
|
||||
updateRequest.version(watch.version());
|
||||
try {
|
||||
this.update(updateRequest);
|
||||
} catch (DocumentMissingException e) {
|
||||
// do not rethrow this exception, otherwise the watch history will contain an exception
|
||||
// even though the execution might have been fine
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -25,10 +25,6 @@ public class WatcherParams extends ToXContent.DelegatingMapParams {
|
||||
return wrap(params).hideSecrets();
|
||||
}
|
||||
|
||||
public static boolean collapseArrays(ToXContent.Params params) {
|
||||
return wrap(params).collapseArrays();
|
||||
}
|
||||
|
||||
public static boolean debug(ToXContent.Params params) {
|
||||
return wrap(params).debug();
|
||||
}
|
||||
@ -41,10 +37,6 @@ public class WatcherParams extends ToXContent.DelegatingMapParams {
|
||||
return paramAsBoolean(HIDE_SECRETS_KEY, false);
|
||||
}
|
||||
|
||||
public boolean collapseArrays() {
|
||||
return paramAsBoolean(COLLAPSE_ARRAYS_KEY, false);
|
||||
}
|
||||
|
||||
public boolean debug() {
|
||||
return paramAsBoolean(DEBUG_KEY, false);
|
||||
}
|
||||
@ -77,11 +69,6 @@ public class WatcherParams extends ToXContent.DelegatingMapParams {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder collapseArrays(boolean collapseArrays) {
|
||||
params.put(COLLAPSE_ARRAYS_KEY, String.valueOf(collapseArrays));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder debug(boolean debug) {
|
||||
params.put(DEBUG_KEY, String.valueOf(debug));
|
||||
return this;
|
||||
|
@ -6,8 +6,10 @@
|
||||
package org.elasticsearch.xpack.watcher.transport.actions.ack;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
@ -15,29 +17,43 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.watcher.WatcherService;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStatus;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.time.Clock;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
/**
|
||||
* Performs the ack operation.
|
||||
*/
|
||||
public class TransportAckWatchAction extends WatcherTransportAction<AckWatchRequest, AckWatchResponse> {
|
||||
|
||||
private final WatcherService watcherService;
|
||||
private final Clock clock;
|
||||
private final Watch.Parser parser;
|
||||
private final WatcherClientProxy client;
|
||||
|
||||
@Inject
|
||||
public TransportAckWatchAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, WatcherService watcherService,
|
||||
XPackLicenseState licenseState) {
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, Clock clock, XPackLicenseState licenseState,
|
||||
Watch.Parser parser, WatcherClientProxy client) {
|
||||
super(settings, AckWatchAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver,
|
||||
licenseState, AckWatchRequest::new);
|
||||
this.watcherService = watcherService;
|
||||
this.clock = clock;
|
||||
this.parser = parser;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -53,19 +69,56 @@ public class TransportAckWatchAction extends WatcherTransportAction<AckWatchRequ
|
||||
@Override
|
||||
protected void masterOperation(AckWatchRequest request, ClusterState state, ActionListener<AckWatchResponse> listener) throws
|
||||
ElasticsearchException {
|
||||
try {
|
||||
WatchStatus watchStatus = watcherService.ackWatch(request.getWatchId(), request.getActionIds());
|
||||
AckWatchResponse response = new AckWatchResponse(watchStatus);
|
||||
listener.onResponse(response);
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
client.getWatch(request.getWatchId(), ActionListener.wrap((response) -> {
|
||||
if (response.isExists() == false) {
|
||||
listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exit", request.getWatchId()));
|
||||
} else {
|
||||
DateTime now = new DateTime(clock.millis(), UTC);
|
||||
Watch watch = parser.parseWithSecrets(request.getWatchId(), true, response.getSourceAsBytesRef(), now);
|
||||
watch.version(response.getVersion());
|
||||
watch.status().version(response.getVersion());
|
||||
String[] actionIds = request.getActionIds();
|
||||
if (actionIds == null || actionIds.length == 0) {
|
||||
actionIds = new String[]{Watch.ALL_ACTIONS_ID};
|
||||
}
|
||||
|
||||
// exit early in case nothing changes
|
||||
boolean isChanged = watch.ack(now, actionIds);
|
||||
if (isChanged == false) {
|
||||
listener.onResponse(new AckWatchResponse(watch.status()));
|
||||
return;
|
||||
}
|
||||
|
||||
UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId());
|
||||
// this may reject this action, but prevents concurrent updates from a watch execution
|
||||
updateRequest.version(response.getVersion());
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
builder.startObject()
|
||||
.startObject(Watch.Field.STATUS.getPreferredName())
|
||||
.startObject("actions");
|
||||
|
||||
List<String> actionIdsAsList = Arrays.asList(actionIds);
|
||||
boolean updateAll = actionIdsAsList.contains("_all");
|
||||
for (ActionWrapper actionWrapper : watch.actions()) {
|
||||
if (updateAll || actionIdsAsList.contains(actionWrapper.id())) {
|
||||
builder.startObject(actionWrapper.id())
|
||||
.field("ack", watch.status().actionStatus(actionWrapper.id()).ackStatus(), ToXContent.EMPTY_PARAMS)
|
||||
.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
builder.endObject().endObject().endObject();
|
||||
updateRequest.doc(builder);
|
||||
|
||||
client.update(updateRequest, ActionListener.wrap(
|
||||
(updateResponse) -> listener.onResponse(new AckWatchResponse(watch.status())),
|
||||
listener::onFailure));
|
||||
}
|
||||
}, listener::onFailure));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(AckWatchRequest request, ClusterState state) {
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, WatchStore.INDEX);
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, Watch.INDEX);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -6,8 +6,10 @@
|
||||
package org.elasticsearch.xpack.watcher.transport.actions.activate;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
@ -15,29 +17,46 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.watcher.WatcherService;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerService;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStatus;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Clock;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils.writeDate;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
/**
|
||||
* Performs the watch de/activation operation.
|
||||
*/
|
||||
public class TransportActivateWatchAction extends WatcherTransportAction<ActivateWatchRequest, ActivateWatchResponse> {
|
||||
|
||||
private final WatcherService watcherService;
|
||||
private final Clock clock;
|
||||
private final TriggerService triggerService;
|
||||
private final Watch.Parser parser;
|
||||
private final WatcherClientProxy client;
|
||||
|
||||
@Inject
|
||||
public TransportActivateWatchAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, WatcherService watcherService,
|
||||
XPackLicenseState licenseState) {
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, Clock clock,
|
||||
XPackLicenseState licenseState, TriggerService triggerService, Watch.Parser parser,
|
||||
WatcherClientProxy client) {
|
||||
super(settings, ActivateWatchAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver,
|
||||
licenseState, ActivateWatchRequest::new);
|
||||
this.watcherService = watcherService;
|
||||
this.clock = clock;
|
||||
this.triggerService = triggerService;
|
||||
this.parser = parser;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -54,20 +73,50 @@ public class TransportActivateWatchAction extends WatcherTransportAction<Activat
|
||||
protected void masterOperation(ActivateWatchRequest request, ClusterState state, ActionListener<ActivateWatchResponse> listener)
|
||||
throws ElasticsearchException {
|
||||
try {
|
||||
WatchStatus watchStatus = request.isActivate() ?
|
||||
watcherService.activateWatch(request.getWatchId()) :
|
||||
watcherService.deactivateWatch(request.getWatchId());
|
||||
ActivateWatchResponse response = new ActivateWatchResponse(watchStatus);
|
||||
listener.onResponse(response);
|
||||
} catch (Exception e) {
|
||||
// if this is about deactivation, remove this immediately from the trigger service, no need to wait for all those async calls
|
||||
if (request.isActivate() == false) {
|
||||
triggerService.remove(request.getWatchId());
|
||||
}
|
||||
|
||||
DateTime now = new DateTime(clock.millis(), UTC);
|
||||
UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId());
|
||||
XContentBuilder builder = activateWatchBuilder(request.isActivate(), now);
|
||||
updateRequest.doc(builder);
|
||||
|
||||
client.update(updateRequest, ActionListener.wrap(updateResponse -> {
|
||||
client.getWatch(request.getWatchId(), ActionListener.wrap(getResponse -> {
|
||||
if (getResponse.isExists()) {
|
||||
Watch watch = parser.parseWithSecrets(request.getWatchId(), true, getResponse.getSourceAsBytesRef(), now);
|
||||
watch.version(getResponse.getVersion());
|
||||
watch.status().version(getResponse.getVersion());
|
||||
|
||||
if (request.isActivate()) {
|
||||
triggerService.add(watch);
|
||||
}
|
||||
listener.onResponse(new ActivateWatchResponse(watch.status()));
|
||||
} else {
|
||||
listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exist", request.getWatchId()));
|
||||
}
|
||||
}, listener::onFailure));
|
||||
}, listener::onFailure));
|
||||
} catch (IOException e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(ActivateWatchRequest request, ClusterState state) {
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, WatchStore.INDEX);
|
||||
private XContentBuilder activateWatchBuilder(boolean active, DateTime now) throws IOException {
|
||||
XContentBuilder builder = jsonBuilder().startObject()
|
||||
.startObject(Watch.Field.STATUS.getPreferredName())
|
||||
.startObject(WatchStatus.Field.STATE.getPreferredName())
|
||||
.field(WatchStatus.Field.ACTIVE.getPreferredName(), active);
|
||||
|
||||
writeDate(WatchStatus.Field.TIMESTAMP.getPreferredName(), builder, now);
|
||||
builder.endObject().endObject().endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(ActivateWatchRequest request, ClusterState state) {
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, Watch.INDEX);
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.transport.actions.delete;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
@ -20,25 +19,28 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.watcher.WatcherService;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerService;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
|
||||
/**
|
||||
* Performs the delete operation.
|
||||
*/
|
||||
public class TransportDeleteWatchAction extends WatcherTransportAction<DeleteWatchRequest, DeleteWatchResponse> {
|
||||
|
||||
private final WatcherService watcherService;
|
||||
private final WatcherClientProxy client;
|
||||
private final TriggerService triggerService;
|
||||
|
||||
@Inject
|
||||
public TransportDeleteWatchAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, WatcherService watcherService,
|
||||
XPackLicenseState licenseState) {
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, WatcherClientProxy client,
|
||||
XPackLicenseState licenseState, TriggerService triggerService) {
|
||||
super(settings, DeleteWatchAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver,
|
||||
licenseState, DeleteWatchRequest::new);
|
||||
this.watcherService = watcherService;
|
||||
this.client = client;
|
||||
this.triggerService = triggerService;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -54,20 +56,19 @@ public class TransportDeleteWatchAction extends WatcherTransportAction<DeleteWat
|
||||
@Override
|
||||
protected void masterOperation(DeleteWatchRequest request, ClusterState state, ActionListener<DeleteWatchResponse> listener) throws
|
||||
ElasticsearchException {
|
||||
try {
|
||||
DeleteResponse deleteResponse = watcherService.deleteWatch(request.getId()).deleteResponse();
|
||||
boolean deleted = deleteResponse.getResult() == DocWriteResponse.Result.DELETED;
|
||||
DeleteWatchResponse response = new DeleteWatchResponse(deleteResponse.getId(), deleteResponse.getVersion(), deleted);
|
||||
listener.onResponse(response);
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
client.deleteWatch(request.getId(), ActionListener.wrap(deleteResponse -> {
|
||||
boolean deleted = deleteResponse.getResult() == DocWriteResponse.Result.DELETED;
|
||||
DeleteWatchResponse response = new DeleteWatchResponse(deleteResponse.getId(), deleteResponse.getVersion(), deleted);
|
||||
if (deleted) {
|
||||
triggerService.remove(request.getId());
|
||||
}
|
||||
listener.onResponse(response);
|
||||
},
|
||||
listener::onFailure));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(DeleteWatchRequest request, ClusterState state) {
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, WatchStore.INDEX);
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, Watch.INDEX);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
@ -29,6 +30,7 @@ import org.elasticsearch.xpack.watcher.execution.ExecutionService;
|
||||
import org.elasticsearch.xpack.watcher.execution.ManualExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.history.WatchRecord;
|
||||
import org.elasticsearch.xpack.watcher.input.simple.SimpleInput;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
@ -36,13 +38,12 @@ import org.elasticsearch.xpack.watcher.trigger.TriggerService;
|
||||
import org.elasticsearch.xpack.watcher.trigger.manual.ManualTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Clock;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalArgument;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
/**
|
||||
@ -51,24 +52,24 @@ import static org.joda.time.DateTimeZone.UTC;
|
||||
public class TransportExecuteWatchAction extends WatcherTransportAction<ExecuteWatchRequest, ExecuteWatchResponse> {
|
||||
|
||||
private final ExecutionService executionService;
|
||||
private final WatchStore watchStore;
|
||||
private final Clock clock;
|
||||
private final TriggerService triggerService;
|
||||
private final Watch.Parser watchParser;
|
||||
private final WatcherClientProxy client;
|
||||
|
||||
@Inject
|
||||
public TransportExecuteWatchAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, ExecutionService executionService,
|
||||
Clock clock, XPackLicenseState licenseState, WatchStore watchStore, TriggerService triggerService,
|
||||
Watch.Parser watchParser) {
|
||||
Clock clock, XPackLicenseState licenseState, TriggerService triggerService,
|
||||
Watch.Parser watchParser, WatcherClientProxy client) {
|
||||
super(settings, ExecuteWatchAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver,
|
||||
licenseState, ExecuteWatchRequest::new);
|
||||
this.executionService = executionService;
|
||||
this.watchStore = watchStore;
|
||||
this.clock = clock;
|
||||
this.triggerService = triggerService;
|
||||
this.watchParser = watchParser;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -84,57 +85,61 @@ public class TransportExecuteWatchAction extends WatcherTransportAction<ExecuteW
|
||||
@Override
|
||||
protected void masterOperation(ExecuteWatchRequest request, ClusterState state, ActionListener<ExecuteWatchResponse> listener)
|
||||
throws ElasticsearchException {
|
||||
try {
|
||||
Watch watch;
|
||||
boolean knownWatch;
|
||||
if (request.getId() != null) {
|
||||
watch = watchStore.get(request.getId());
|
||||
if (watch == null) {
|
||||
//todo we need to find a better std exception for this one
|
||||
throw new ElasticsearchException("watch [{}] does not exist", request.getId());
|
||||
}
|
||||
knownWatch = true;
|
||||
} else if (request.getWatchSource() != null) {
|
||||
if (request.getId() != null) {
|
||||
try {
|
||||
// should be executed async in the future
|
||||
GetResponse getResponse = client.getWatch(request.getId());
|
||||
Watch watch = watchParser.parse(request.getId(), true, getResponse.getSourceAsBytesRef());
|
||||
ExecuteWatchResponse executeWatchResponse = executeWatch(request, watch, true);
|
||||
listener.onResponse(executeWatchResponse);
|
||||
} catch (IOException e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
} else if (request.getWatchSource() != null) {
|
||||
try {
|
||||
assert !request.isRecordExecution();
|
||||
watch = watchParser.parse(ExecuteWatchRequest.INLINE_WATCH_ID, false, request.getWatchSource());
|
||||
knownWatch = false;
|
||||
} else {
|
||||
throw illegalArgument("no watch provided");
|
||||
Watch watch = watchParser.parse(ExecuteWatchRequest.INLINE_WATCH_ID, true, request.getWatchSource());
|
||||
ExecuteWatchResponse response = executeWatch(request, watch, false);
|
||||
listener.onResponse(response);
|
||||
} catch (Exception e) {
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute [{}]", request.getId()), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
|
||||
String triggerType = watch.trigger().type();
|
||||
TriggerEvent triggerEvent = triggerService.simulateEvent(triggerType, watch.id(), request.getTriggerData());
|
||||
|
||||
ManualExecutionContext.Builder ctxBuilder = ManualExecutionContext.builder(watch, knownWatch,
|
||||
new ManualTriggerEvent(triggerEvent.jobName(), triggerEvent), executionService.defaultThrottlePeriod());
|
||||
|
||||
DateTime executionTime = new DateTime(clock.millis(), UTC);
|
||||
ctxBuilder.executionTime(executionTime);
|
||||
for (Map.Entry<String, ActionExecutionMode> entry : request.getActionModes().entrySet()) {
|
||||
ctxBuilder.actionMode(entry.getKey(), entry.getValue());
|
||||
}
|
||||
if (request.getAlternativeInput() != null) {
|
||||
ctxBuilder.withInput(new SimpleInput.Result(new Payload.Simple(request.getAlternativeInput())));
|
||||
}
|
||||
if (request.isIgnoreCondition()) {
|
||||
ctxBuilder.withCondition(AlwaysCondition.RESULT_INSTANCE);
|
||||
}
|
||||
ctxBuilder.recordExecution(request.isRecordExecution());
|
||||
|
||||
WatchRecord record = executionService.execute(ctxBuilder.build());
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
record.toXContent(builder, WatcherParams.builder().hideSecrets(true).debug(request.isDebug()).build());
|
||||
ExecuteWatchResponse response = new ExecuteWatchResponse(record.id().value(), builder.bytes(), XContentType.JSON);
|
||||
listener.onResponse(response);
|
||||
} catch (Exception e) {
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute [{}]", request.getId()), e);
|
||||
listener.onFailure(e);
|
||||
} else {
|
||||
listener.onFailure(new IllegalArgumentException("no watch provided"));
|
||||
}
|
||||
}
|
||||
|
||||
private ExecuteWatchResponse executeWatch(ExecuteWatchRequest request, Watch watch, boolean knownWatch) throws IOException {
|
||||
String triggerType = watch.trigger().type();
|
||||
TriggerEvent triggerEvent = triggerService.simulateEvent(triggerType, watch.id(), request.getTriggerData());
|
||||
|
||||
ManualExecutionContext.Builder ctxBuilder = ManualExecutionContext.builder(watch, knownWatch,
|
||||
new ManualTriggerEvent(triggerEvent.jobName(), triggerEvent), executionService.defaultThrottlePeriod());
|
||||
|
||||
DateTime executionTime = new DateTime(clock.millis(), UTC);
|
||||
ctxBuilder.executionTime(executionTime);
|
||||
for (Map.Entry<String, ActionExecutionMode> entry : request.getActionModes().entrySet()) {
|
||||
ctxBuilder.actionMode(entry.getKey(), entry.getValue());
|
||||
}
|
||||
if (request.getAlternativeInput() != null) {
|
||||
ctxBuilder.withInput(new SimpleInput.Result(new Payload.Simple(request.getAlternativeInput())));
|
||||
}
|
||||
if (request.isIgnoreCondition()) {
|
||||
ctxBuilder.withCondition(AlwaysCondition.RESULT_INSTANCE);
|
||||
}
|
||||
ctxBuilder.recordExecution(request.isRecordExecution());
|
||||
|
||||
WatchRecord record = executionService.execute(ctxBuilder.build());
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
||||
record.toXContent(builder, WatcherParams.builder().hideSecrets(true).debug(request.isDebug()).build());
|
||||
return new ExecuteWatchResponse(record.id().value(), builder.bytes(), XContentType.JSON);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(ExecuteWatchRequest request, ClusterState state) {
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, WatchStore.INDEX);
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, Watch.INDEX);
|
||||
}
|
||||
|
||||
|
||||
|
@ -5,8 +5,6 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher.transport.actions.get;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
@ -15,7 +13,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -25,36 +22,38 @@ import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.watcher.WatcherService;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Clock;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
/**
|
||||
* Performs the get operation.
|
||||
*/
|
||||
public class TransportGetWatchAction extends WatcherTransportAction<GetWatchRequest, GetWatchResponse> {
|
||||
|
||||
private final WatcherService watcherService;
|
||||
private final Watch.Parser parser;
|
||||
private final Clock clock;
|
||||
private final WatcherClientProxy client;
|
||||
|
||||
@Inject
|
||||
public TransportGetWatchAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, WatcherService watcherService,
|
||||
XPackLicenseState licenseState) {
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, XPackLicenseState licenseState,
|
||||
Watch.Parser parser, Clock clock, WatcherClientProxy client) {
|
||||
super(settings, GetWatchAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, licenseState, GetWatchRequest::new);
|
||||
this.watcherService = watcherService;
|
||||
indexNameExpressionResolver, licenseState, GetWatchRequest::new);
|
||||
this.parser = parser;
|
||||
this.clock = clock;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME; // Super lightweight operation, so don't fork
|
||||
return ThreadPool.Names.MANAGEMENT;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -70,32 +69,30 @@ public class TransportGetWatchAction extends WatcherTransportAction<GetWatchRequ
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
Watch watch = watcherService.getWatch(request.getId());
|
||||
if (watch == null) {
|
||||
client.getWatch(request.getId(), ActionListener.wrap(getResponse -> {
|
||||
if (getResponse.isExists() == false) {
|
||||
listener.onResponse(new GetWatchResponse(request.getId()));
|
||||
return;
|
||||
}
|
||||
|
||||
try (XContentBuilder builder = jsonBuilder()) {
|
||||
// When we return the watch via the get api, we want to return the watch as was specified in the put api,
|
||||
// When we return the watch via the Get Watch REST API, we want to return the watch as was specified in the put api,
|
||||
// we don't include the status in the watch source itself, but as a separate top level field, so that
|
||||
// it indicates the the status is managed by watcher itself.
|
||||
watch.toXContent(builder, WatcherParams.builder().hideSecrets(true).build());
|
||||
BytesReference watchSource = builder.bytes();
|
||||
listener.onResponse(new GetWatchResponse(watch.id(), watch.status(), watchSource, XContentType.JSON));
|
||||
} catch (IOException e) {
|
||||
listener.onFailure(e);
|
||||
DateTime now = new DateTime(clock.millis(), UTC);
|
||||
Watch watch = parser.parseWithSecrets(request.getId(), true, getResponse.getSourceAsBytesRef(), now);
|
||||
watch.toXContent(builder, WatcherParams.builder()
|
||||
.hideSecrets(true)
|
||||
.put(Watch.INCLUDE_STATUS_KEY, false)
|
||||
.build());
|
||||
watch.version(getResponse.getVersion());
|
||||
watch.status().version(getResponse.getVersion());
|
||||
listener.onResponse(new GetWatchResponse(watch.id(), watch.status(), builder.bytes(), XContentType.JSON));
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to get watch [{}]", request.getId()), e);
|
||||
throw e;
|
||||
}
|
||||
}, listener::onFailure));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(GetWatchRequest request, ClusterState state) {
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.READ, WatchStore.INDEX);
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.READ, Watch.INDEX);
|
||||
}
|
||||
}
|
||||
|
@ -8,36 +8,53 @@ package org.elasticsearch.xpack.watcher.transport.actions.put;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.LicenseUtils;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.watcher.WatcherService;
|
||||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerService;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.time.Clock;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
public class TransportPutWatchAction extends WatcherTransportAction<PutWatchRequest, PutWatchResponse> {
|
||||
|
||||
private final WatcherService watcherService;
|
||||
private final Clock clock;
|
||||
private final TriggerService triggerService;
|
||||
private final Watch.Parser parser;
|
||||
private final InternalClient client;
|
||||
|
||||
@Inject
|
||||
public TransportPutWatchAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, WatcherService watcherService,
|
||||
XPackLicenseState licenseState) {
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, Clock clock, XPackLicenseState licenseState,
|
||||
TriggerService triggerService, Watch.Parser parser, InternalClient client) {
|
||||
super(settings, PutWatchAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, licenseState, PutWatchRequest::new);
|
||||
this.watcherService = watcherService;
|
||||
indexNameExpressionResolver, licenseState, PutWatchRequest::new);
|
||||
this.clock = clock;
|
||||
this.triggerService = triggerService;
|
||||
this.parser = parser;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -53,15 +70,30 @@ public class TransportPutWatchAction extends WatcherTransportAction<PutWatchRequ
|
||||
@Override
|
||||
protected void masterOperation(PutWatchRequest request, ClusterState state, ActionListener<PutWatchResponse> listener) throws
|
||||
ElasticsearchException {
|
||||
if (licenseState.isWatcherAllowed() == false) {
|
||||
listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.WATCHER));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
IndexResponse indexResponse = watcherService.putWatch(request.getId(), request.getSource(), request.isActive());
|
||||
boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED;
|
||||
listener.onResponse(new PutWatchResponse(indexResponse.getId(), indexResponse.getVersion(), created));
|
||||
DateTime now = new DateTime(clock.millis(), UTC);
|
||||
Watch watch = parser.parseWithSecrets(request.getId(), false, request.getSource(), now);
|
||||
watch.setState(request.isActive(), now);
|
||||
|
||||
try (XContentBuilder builder = jsonBuilder()) {
|
||||
Payload.XContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(Watch.INCLUDE_STATUS_KEY, "true"));
|
||||
watch.toXContent(builder, params);
|
||||
BytesReference bytesReference = builder.bytes();
|
||||
|
||||
IndexRequest indexRequest = new IndexRequest(Watch.INDEX).type(Watch.DOC_TYPE).id(request.getId());
|
||||
indexRequest.source(bytesReference);
|
||||
|
||||
client.index(indexRequest, ActionListener.wrap(indexResponse -> {
|
||||
boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED;
|
||||
if (request.isActive()) {
|
||||
triggerService.add(watch);
|
||||
} else {
|
||||
triggerService.remove(request.getId());
|
||||
}
|
||||
listener.onResponse(new PutWatchResponse(indexResponse.getId(), indexResponse.getVersion(), created));
|
||||
}, listener::onFailure));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
@ -69,7 +101,7 @@ public class TransportPutWatchAction extends WatcherTransportAction<PutWatchRequ
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(PutWatchRequest request, ClusterState state) {
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, WatchStore.INDEX);
|
||||
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, Watch.INDEX);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -7,7 +7,9 @@ package org.elasticsearch.xpack.watcher.transport.actions.stats;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
@ -16,12 +18,15 @@ import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
import org.elasticsearch.xpack.watcher.WatcherLifeCycleService;
|
||||
import org.elasticsearch.xpack.watcher.WatcherService;
|
||||
import org.elasticsearch.xpack.watcher.execution.ExecutionService;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
|
||||
/**
|
||||
* Performs the stats operation.
|
||||
@ -31,23 +36,24 @@ public class TransportWatcherStatsAction extends WatcherTransportAction<WatcherS
|
||||
private final WatcherService watcherService;
|
||||
private final ExecutionService executionService;
|
||||
private final WatcherLifeCycleService lifeCycleService;
|
||||
private final InternalClient client;
|
||||
|
||||
@Inject
|
||||
public TransportWatcherStatsAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, WatcherService watcherService,
|
||||
ExecutionService executionService, XPackLicenseState licenseState,
|
||||
WatcherLifeCycleService lifeCycleService) {
|
||||
WatcherLifeCycleService lifeCycleService, InternalClient client) {
|
||||
super(settings, WatcherStatsAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver,
|
||||
licenseState, WatcherStatsRequest::new);
|
||||
this.watcherService = watcherService;
|
||||
this.executionService = executionService;
|
||||
this.lifeCycleService = lifeCycleService;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
// cheap operation, no need to fork into another thread
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@ -59,13 +65,12 @@ public class TransportWatcherStatsAction extends WatcherTransportAction<WatcherS
|
||||
@Override
|
||||
protected void masterOperation(WatcherStatsRequest request, ClusterState state, ActionListener<WatcherStatsResponse> listener) throws
|
||||
ElasticsearchException {
|
||||
|
||||
WatcherStatsResponse statsResponse = new WatcherStatsResponse();
|
||||
statsResponse.setWatcherState(watcherService.state());
|
||||
statsResponse.setThreadPoolQueueSize(executionService.executionThreadPoolQueueSize());
|
||||
statsResponse.setWatchesCount(watcherService.watchesCount());
|
||||
statsResponse.setThreadPoolMaxSize(executionService.executionThreadPoolMaxSize());
|
||||
statsResponse.setWatcherMetaData(lifeCycleService.watcherMetaData());
|
||||
|
||||
if (request.includeCurrentWatches()) {
|
||||
statsResponse.setSnapshots(executionService.currentExecutions());
|
||||
}
|
||||
@ -73,7 +78,13 @@ public class TransportWatcherStatsAction extends WatcherTransportAction<WatcherS
|
||||
statsResponse.setQueuedWatches(executionService.queuedWatches());
|
||||
}
|
||||
|
||||
listener.onResponse(statsResponse);
|
||||
SearchRequest searchRequest =
|
||||
Requests.searchRequest(Watch.INDEX).types(Watch.DOC_TYPE).source(new SearchSourceBuilder().size(0));
|
||||
client.search(searchRequest, ActionListener.wrap(searchResponse -> {
|
||||
statsResponse.setWatchesCount(searchResponse.getHits().totalHits());
|
||||
listener.onResponse(statsResponse);
|
||||
},
|
||||
e -> listener.onResponse(statsResponse)));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
@ -37,9 +38,9 @@ public class TriggerService extends AbstractComponent {
|
||||
this.engines = unmodifiableMap(builder);
|
||||
}
|
||||
|
||||
public synchronized void start(Collection<? extends TriggerEngine.Job> jobs) throws Exception {
|
||||
public synchronized void start(Collection<Watch> watches) throws Exception {
|
||||
for (TriggerEngine engine : engines.values()) {
|
||||
engine.start(jobs);
|
||||
engine.start(watches);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.trigger.schedule.engine;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleRegistry;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEngine;
|
||||
@ -72,7 +71,7 @@ public class SchedulerScheduleTriggerEngine extends ScheduleTriggerEngine {
|
||||
final ScheduleTriggerEvent event = new ScheduleTriggerEvent(name, new DateTime(triggeredTime, UTC),
|
||||
new DateTime(scheduledTime, UTC));
|
||||
for (Listener listener : listeners) {
|
||||
listener.triggered(Collections.<TriggerEvent>singletonList(event));
|
||||
listener.triggered(Collections.singletonList(event));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -59,6 +59,8 @@ public class Watch implements TriggerEngine.Job, ToXContent {
|
||||
|
||||
public static final String ALL_ACTIONS_ID = "_all";
|
||||
public static final String INCLUDE_STATUS_KEY = "include_status";
|
||||
public static final String INDEX = ".watches";
|
||||
public static final String DOC_TYPE = "watch";
|
||||
|
||||
private final String id;
|
||||
private final Trigger trigger;
|
||||
@ -250,7 +252,6 @@ public class Watch implements TriggerEngine.Job, ToXContent {
|
||||
* This method is only called once - when the user adds a new watch. From that moment on, all representations
|
||||
* of the watch in the system will be use secrets for sensitive data.
|
||||
*
|
||||
* @see org.elasticsearch.xpack.watcher.WatcherService#putWatch(String, BytesReference, boolean)
|
||||
*/
|
||||
public Watch parseWithSecrets(String id, boolean includeStatus, BytesReference source, DateTime now) throws IOException {
|
||||
return parse(id, includeStatus, true, source, now);
|
||||
@ -317,7 +318,7 @@ public class Watch implements TriggerEngine.Job, ToXContent {
|
||||
metatdata = parser.map();
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.STATUS)) {
|
||||
if (includeStatus) {
|
||||
status = WatchStatus.parse(id, parser);
|
||||
status = WatchStatus.parse(id, parser, clock);
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ import java.io.IOException;
|
||||
import java.time.Clock;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
@ -37,18 +38,13 @@ import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
public class WatchStatus implements ToXContent, Streamable {
|
||||
|
||||
public static final String INCLUDE_VERSION_KEY = "include_version";
|
||||
|
||||
private transient long version;
|
||||
|
||||
private State state;
|
||||
|
||||
@Nullable private DateTime lastChecked;
|
||||
@Nullable private DateTime lastMetCondition;
|
||||
@Nullable private long version;
|
||||
private Map<String, ActionStatus> actions;
|
||||
|
||||
private volatile boolean dirty = false;
|
||||
|
||||
// for serialization
|
||||
private WatchStatus() {
|
||||
}
|
||||
@ -73,14 +69,6 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
return state;
|
||||
}
|
||||
|
||||
public long version() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public void version(long version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
public boolean checked() {
|
||||
return lastChecked != null;
|
||||
}
|
||||
@ -93,19 +81,12 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
return actions.get(actionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* marks this status as non-dirty. this should only be done when the current state of the status is in sync with
|
||||
* the persisted state.
|
||||
*/
|
||||
public void resetDirty() {
|
||||
this.dirty = false;
|
||||
public long version() {
|
||||
return version;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return does this Watch.Status needs to be persisted to the index
|
||||
*/
|
||||
public boolean dirty() {
|
||||
return dirty;
|
||||
public void version(long version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -115,20 +96,15 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
|
||||
WatchStatus that = (WatchStatus) o;
|
||||
|
||||
if (version != that.version) return false;
|
||||
if (lastChecked != null ? !lastChecked.equals(that.lastChecked) : that.lastChecked != null) return false;
|
||||
if (lastMetCondition != null ? !lastMetCondition.equals(that.lastMetCondition) : that.lastMetCondition != null)
|
||||
return false;
|
||||
return !(actions != null ? !actions.equals(that.actions) : that.actions != null);
|
||||
return Objects.equals(lastChecked, that.lastChecked) &&
|
||||
Objects.equals(lastMetCondition, that.lastMetCondition) &&
|
||||
Objects.equals(version, that.version) &&
|
||||
Objects.equals(actions, that.actions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = (int) (version ^ (version >>> 32));
|
||||
result = 31 * result + (lastChecked != null ? lastChecked.hashCode() : 0);
|
||||
result = 31 * result + (lastMetCondition != null ? lastMetCondition.hashCode() : 0);
|
||||
result = 31 * result + (actions != null ? actions.hashCode() : 0);
|
||||
return result;
|
||||
return Objects.hash(lastChecked, lastMetCondition, actions, version);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -139,7 +115,6 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
*/
|
||||
public void onCheck(boolean metCondition, DateTime timestamp) {
|
||||
lastChecked = timestamp;
|
||||
dirty = true;
|
||||
if (metCondition) {
|
||||
lastMetCondition = timestamp;
|
||||
}
|
||||
@ -148,7 +123,6 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
public void onActionResult(String actionId, DateTime timestamp, Action.Result result) {
|
||||
ActionStatus status = actions.get(actionId);
|
||||
status.update(timestamp, result);
|
||||
dirty = true;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -173,7 +147,6 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
for (ActionStatus status : actions.values()) {
|
||||
changed |= status.onAck(timestamp);
|
||||
}
|
||||
dirty |= changed;
|
||||
return changed;
|
||||
}
|
||||
|
||||
@ -183,14 +156,13 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
changed |= status.onAck(timestamp);
|
||||
}
|
||||
}
|
||||
dirty |= changed;
|
||||
|
||||
return changed;
|
||||
}
|
||||
|
||||
boolean setActive(boolean active, DateTime now) {
|
||||
boolean change = this.state.active != active;
|
||||
if (change) {
|
||||
this.dirty = true;
|
||||
this.state = new State(active, now);
|
||||
}
|
||||
return change;
|
||||
@ -233,9 +205,6 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (params.paramAsBoolean(INCLUDE_VERSION_KEY, false)) {
|
||||
builder.field(Field.VERSION.getPreferredName(), version);
|
||||
}
|
||||
builder.field(Field.STATE.getPreferredName(), state, params);
|
||||
if (lastChecked != null) {
|
||||
builder.field(Field.LAST_CHECKED.getPreferredName(), lastChecked);
|
||||
@ -250,14 +219,16 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.field(Field.VERSION.getPreferredName(), version);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
public static WatchStatus parse(String watchId, XContentParser parser) throws IOException {
|
||||
public static WatchStatus parse(String watchId, XContentParser parser, Clock clock) throws IOException {
|
||||
State state = null;
|
||||
DateTime lastChecked = null;
|
||||
DateTime lastMetCondition = null;
|
||||
Map<String, ActionStatus> actions = null;
|
||||
long version = -1;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
@ -266,11 +237,18 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.STATE)) {
|
||||
try {
|
||||
state = State.parse(parser);
|
||||
state = State.parse(parser, clock);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw new ElasticsearchParseException("could not parse watch status for [{}]. failed to parse field [{}]",
|
||||
e, watchId, currentFieldName);
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.VERSION)) {
|
||||
if (token.isValue()) {
|
||||
version = parser.longValue();
|
||||
} else {
|
||||
throw new ElasticsearchParseException("could not parse watch status for [{}]. expecting field [{}] to hold a long " +
|
||||
"value, found [{}] instead", watchId, currentFieldName, token);
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.LAST_CHECKED)) {
|
||||
if (token.isValue()) {
|
||||
lastChecked = parseDate(currentFieldName, parser, UTC);
|
||||
@ -311,7 +289,7 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
}
|
||||
actions = actions == null ? emptyMap() : unmodifiableMap(actions);
|
||||
|
||||
return new WatchStatus(-1, state, lastChecked, lastMetCondition, actions);
|
||||
return new WatchStatus(version, state, lastChecked, lastMetCondition, actions);
|
||||
}
|
||||
|
||||
public static class State implements ToXContent {
|
||||
@ -340,12 +318,12 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
public static State parse(XContentParser parser) throws IOException {
|
||||
public static State parse(XContentParser parser, Clock clock) throws IOException {
|
||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchParseException("expected an object but found [{}] instead", parser.currentToken());
|
||||
}
|
||||
boolean active = true;
|
||||
DateTime timestamp = new DateTime(Clock.systemUTC().millis(), UTC);
|
||||
DateTime timestamp = new DateTime(clock.millis(), UTC);
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
@ -361,13 +339,13 @@ public class WatchStatus implements ToXContent, Streamable {
|
||||
}
|
||||
}
|
||||
|
||||
interface Field {
|
||||
ParseField VERSION = new ParseField("version");
|
||||
public interface Field {
|
||||
ParseField STATE = new ParseField("state");
|
||||
ParseField ACTIVE = new ParseField("active");
|
||||
ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
ParseField LAST_CHECKED = new ParseField("last_checked");
|
||||
ParseField LAST_MET_CONDITION = new ParseField("last_met_condition");
|
||||
ParseField ACTIONS = new ParseField("actions");
|
||||
ParseField VERSION = new ParseField("version");
|
||||
}
|
||||
}
|
||||
|
@ -1,368 +0,0 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher.watch;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.engine.DocumentMissingException;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.xpack.common.stats.Counters;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalState;
|
||||
|
||||
public class WatchStore extends AbstractComponent {
|
||||
|
||||
public static final String INDEX = ".watches";
|
||||
public static final String DOC_TYPE = "watch";
|
||||
|
||||
private final WatcherClientProxy client;
|
||||
private final Watch.Parser watchParser;
|
||||
|
||||
private final ConcurrentMap<String, Watch> watches;
|
||||
private final AtomicBoolean started = new AtomicBoolean(false);
|
||||
|
||||
private final int scrollSize;
|
||||
private final TimeValue scrollTimeout;
|
||||
|
||||
public WatchStore(Settings settings, WatcherClientProxy client, Watch.Parser watchParser) {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
this.watchParser = watchParser;
|
||||
this.watches = ConcurrentCollections.newConcurrentMap();
|
||||
|
||||
this.scrollTimeout = settings.getAsTime("xpack.watcher.watch.scroll.timeout", TimeValue.timeValueSeconds(30));
|
||||
this.scrollSize = settings.getAsInt("xpack.watcher.watch.scroll.size", 100);
|
||||
}
|
||||
|
||||
public void start(ClusterState state) throws Exception {
|
||||
if (started.get()) {
|
||||
logger.debug("watch store already started");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
IndexMetaData indexMetaData = WatchStoreUtils.getConcreteIndex(INDEX, state.metaData());
|
||||
int count = loadWatches(indexMetaData.getNumberOfShards());
|
||||
logger.debug("loaded [{}] watches from the watches index [{}]", count, indexMetaData.getIndex().getName());
|
||||
} catch (IndexNotFoundException e) {
|
||||
} catch (Exception e) {
|
||||
logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to load watches for watch index [{}]", INDEX), e);
|
||||
watches.clear();
|
||||
throw e;
|
||||
}
|
||||
|
||||
started.set(true);
|
||||
}
|
||||
|
||||
public boolean validate(ClusterState state) {
|
||||
IndexMetaData watchesIndexMetaData;
|
||||
try {
|
||||
watchesIndexMetaData = WatchStoreUtils.getConcreteIndex(INDEX, state.metaData());
|
||||
} catch (IndexNotFoundException e) {
|
||||
return true;
|
||||
} catch (IllegalStateException e) {
|
||||
logger.trace((Supplier<?>) () -> new ParameterizedMessage("error getting index meta data [{}]: ", INDEX), e);
|
||||
return false;
|
||||
}
|
||||
|
||||
return state.routingTable().index(watchesIndexMetaData.getIndex().getName()).allPrimaryShardsActive();
|
||||
}
|
||||
|
||||
public boolean started() {
|
||||
return started.get();
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
if (started.compareAndSet(true, false)) {
|
||||
watches.clear();
|
||||
logger.info("stopped watch store");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the watch with the specified id otherwise <code>null</code> is returned.
|
||||
*/
|
||||
public Watch get(String id) {
|
||||
ensureStarted();
|
||||
return watches.get(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an watch if this watch already exists it will be overwritten
|
||||
*/
|
||||
public WatchPut put(Watch watch) throws IOException {
|
||||
ensureStarted();
|
||||
IndexRequest indexRequest = createIndexRequest(watch.id(), watch.getAsBytes(), Versions.MATCH_ANY);
|
||||
IndexResponse response = client.index(indexRequest, (TimeValue) null);
|
||||
watch.status().version(response.getVersion());
|
||||
watch.version(response.getVersion());
|
||||
Watch previous = watches.put(watch.id(), watch);
|
||||
return new WatchPut(previous, watch, response);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates and persists the status of the given watch
|
||||
*/
|
||||
public void updateStatus(Watch watch) throws IOException {
|
||||
ensureStarted();
|
||||
if (!watch.status().dirty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// at the moment we store the status together with the watch,
|
||||
// so we just need to update the watch itself
|
||||
XContentBuilder source = JsonXContent.contentBuilder().
|
||||
startObject()
|
||||
.field(Watch.Field.STATUS.getPreferredName(), watch.status(), ToXContent.EMPTY_PARAMS)
|
||||
.endObject();
|
||||
|
||||
UpdateRequest updateRequest = new UpdateRequest(INDEX, DOC_TYPE, watch.id());
|
||||
updateRequest.doc(source);
|
||||
updateRequest.version(watch.version());
|
||||
try {
|
||||
UpdateResponse response = client.update(updateRequest);
|
||||
watch.status().version(response.getVersion());
|
||||
watch.version(response.getVersion());
|
||||
watch.status().resetDirty();
|
||||
} catch (DocumentMissingException e) {
|
||||
// do not rethrow an exception, otherwise the watch history will contain an exception
|
||||
// even though the execution might has been fine
|
||||
logger.warn("Watch [{}] was deleted during watch execution, not updating watch status", watch.id());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the watch with the specified id if exists
|
||||
*/
|
||||
public WatchDelete delete(String id) {
|
||||
ensureStarted();
|
||||
Watch watch = watches.remove(id);
|
||||
// even if the watch was not found in the watch map, we should still try to delete it
|
||||
// from the index, just to make sure we don't leave traces of it
|
||||
DeleteRequest request = new DeleteRequest(INDEX, DOC_TYPE, id);
|
||||
DeleteResponse response = client.delete(request);
|
||||
// Another operation may hold the Watch instance, so lets set the version for consistency:
|
||||
if (watch != null) {
|
||||
watch.version(response.getVersion());
|
||||
}
|
||||
return new WatchDelete(response);
|
||||
}
|
||||
|
||||
public Collection<Watch> watches() {
|
||||
return watches.values();
|
||||
}
|
||||
|
||||
public Collection<Watch> activeWatches() {
|
||||
Set<Watch> watches = new HashSet<>();
|
||||
for (Watch watch : watches()) {
|
||||
if (watch.status().state().isActive()) {
|
||||
watches.add(watch);
|
||||
}
|
||||
}
|
||||
return watches;
|
||||
}
|
||||
|
||||
public Map<String, Object> usageStats() {
|
||||
Counters counters = new Counters("count.total", "count.active");
|
||||
for (Watch watch : watches.values()) {
|
||||
boolean isActive = watch.status().state().isActive();
|
||||
addToCounters("count", isActive, counters);
|
||||
|
||||
// schedule
|
||||
if (watch.trigger() != null) {
|
||||
addToCounters("watch.trigger._all", isActive, counters);
|
||||
if ("schedule".equals(watch.trigger().type())) {
|
||||
Schedule schedule = ((ScheduleTrigger) watch.trigger()).getSchedule();
|
||||
addToCounters("watch.trigger.schedule._all", isActive, counters);
|
||||
addToCounters("watch.trigger.schedule." + schedule.type(), isActive, counters);
|
||||
}
|
||||
}
|
||||
|
||||
// input
|
||||
if (watch.input() != null) {
|
||||
String type = watch.input().type();
|
||||
addToCounters("watch.input._all", isActive, counters);
|
||||
addToCounters("watch.input." + type, isActive, counters);
|
||||
}
|
||||
|
||||
// condition
|
||||
if (watch.condition() != null) {
|
||||
String type = watch.condition().type();
|
||||
addToCounters("watch.condition._all", isActive, counters);
|
||||
addToCounters("watch.condition." + type, isActive, counters);
|
||||
}
|
||||
|
||||
// actions
|
||||
for (ActionWrapper actionWrapper : watch.actions()) {
|
||||
String type = actionWrapper.action().type();
|
||||
addToCounters("watch.action." + type, isActive, counters);
|
||||
if (actionWrapper.transform() != null) {
|
||||
String transformType = actionWrapper.transform().type();
|
||||
addToCounters("watch.transform." + transformType, isActive, counters);
|
||||
}
|
||||
}
|
||||
|
||||
// transform
|
||||
if (watch.transform() != null) {
|
||||
String type = watch.transform().type();
|
||||
addToCounters("watch.transform." + type, isActive, counters);
|
||||
}
|
||||
|
||||
// metadata
|
||||
if (watch.metadata() != null && watch.metadata().size() > 0) {
|
||||
addToCounters("watch.metadata", isActive, counters);
|
||||
}
|
||||
}
|
||||
|
||||
return counters.toMap();
|
||||
}
|
||||
|
||||
private void addToCounters(String name, boolean isActive, Counters counters) {
|
||||
counters.inc(name + ".total");
|
||||
if (isActive) {
|
||||
counters.inc(name + ".active");
|
||||
}
|
||||
}
|
||||
|
||||
IndexRequest createIndexRequest(String id, BytesReference source, long version) {
|
||||
IndexRequest indexRequest = new IndexRequest(INDEX, DOC_TYPE, id);
|
||||
indexRequest.source(BytesReference.toBytes(source));
|
||||
indexRequest.version(version);
|
||||
return indexRequest;
|
||||
}
|
||||
|
||||
/**
|
||||
* scrolls all the watch documents in the watches index, parses them, and loads them into
|
||||
* the given map.
|
||||
*/
|
||||
int loadWatches(int numPrimaryShards) {
|
||||
assert watches.isEmpty() : "no watches should reside, but there are [" + watches.size() + "] watches.";
|
||||
RefreshResponse refreshResponse = client.refresh(new RefreshRequest(INDEX));
|
||||
if (refreshResponse.getSuccessfulShards() < numPrimaryShards) {
|
||||
throw illegalState("not all required shards have been refreshed");
|
||||
}
|
||||
|
||||
int count = 0;
|
||||
SearchRequest searchRequest = new SearchRequest(INDEX)
|
||||
.types(DOC_TYPE)
|
||||
.preference("_primary")
|
||||
.scroll(scrollTimeout)
|
||||
.source(new SearchSourceBuilder()
|
||||
.size(scrollSize)
|
||||
.sort(SortBuilders.fieldSort("_doc"))
|
||||
.version(true));
|
||||
SearchResponse response = client.search(searchRequest, null);
|
||||
try {
|
||||
if (response.getTotalShards() != response.getSuccessfulShards()) {
|
||||
throw new ElasticsearchException("Partial response while loading watches");
|
||||
}
|
||||
|
||||
while (response.getHits().hits().length != 0) {
|
||||
for (SearchHit hit : response.getHits()) {
|
||||
String id = hit.getId();
|
||||
try {
|
||||
Watch watch = watchParser.parse(id, true, hit.getSourceRef());
|
||||
watch.status().version(hit.version());
|
||||
watch.version(hit.version());
|
||||
watches.put(id, watch);
|
||||
count++;
|
||||
} catch (Exception e) {
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("couldn't load watch [{}], ignoring it...", id), e);
|
||||
}
|
||||
}
|
||||
response = client.searchScroll(response.getScrollId(), scrollTimeout);
|
||||
}
|
||||
} finally {
|
||||
client.clearScroll(response.getScrollId());
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
private void ensureStarted() {
|
||||
if (!started.get()) {
|
||||
throw new IllegalStateException("watch store not started");
|
||||
}
|
||||
}
|
||||
|
||||
public void clearWatchesInMemory() {
|
||||
watches.clear();
|
||||
}
|
||||
|
||||
public class WatchPut {
|
||||
|
||||
private final Watch previous;
|
||||
private final Watch current;
|
||||
private final IndexResponse response;
|
||||
|
||||
public WatchPut(Watch previous, Watch current, IndexResponse response) {
|
||||
this.current = current;
|
||||
this.previous = previous;
|
||||
this.response = response;
|
||||
}
|
||||
|
||||
public Watch current() {
|
||||
return current;
|
||||
}
|
||||
|
||||
public Watch previous() {
|
||||
return previous;
|
||||
}
|
||||
|
||||
public IndexResponse indexResponse() {
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
public class WatchDelete {
|
||||
|
||||
private final DeleteResponse response;
|
||||
|
||||
public WatchDelete(DeleteResponse response) {
|
||||
this.response = response;
|
||||
}
|
||||
|
||||
public DeleteResponse deleteResponse() {
|
||||
return response;
|
||||
}
|
||||
}
|
||||
}
|
@ -24,7 +24,7 @@ public class WatchStoreUtils {
|
||||
public static IndexMetaData getConcreteIndex(String name, MetaData metaData) {
|
||||
AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(name);
|
||||
if (aliasOrIndex == null) {
|
||||
throw new IndexNotFoundException(name);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (aliasOrIndex.isAlias() && aliasOrIndex.getIndices().size() > 1) {
|
||||
|
@ -95,6 +95,7 @@ public class LatchScriptEngine implements ScriptEngineService {
|
||||
}
|
||||
|
||||
public static class LatchScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return INSTANCE;
|
||||
|
@ -39,7 +39,7 @@ public class SettingsFilterTests extends ESTestCase {
|
||||
configureUnfilteredSetting("xpack.security.authc.realms.ldap1.type", "ldap");
|
||||
configureUnfilteredSetting("xpack.security.authc.realms.ldap1.enabled", "false");
|
||||
configureUnfilteredSetting("xpack.security.authc.realms.ldap1.url", "ldap://host.domain");
|
||||
configureFilteredSetting("xpack.security.authc.realms.ldap1.hostname_verification", randomAsciiOfLength(5));
|
||||
configureFilteredSetting("xpack.security.authc.realms.ldap1.hostname_verification", randomBooleanSetting());
|
||||
configureFilteredSetting("xpack.security.authc.realms.ldap1.bind_dn", randomAsciiOfLength(5));
|
||||
configureFilteredSetting("xpack.security.authc.realms.ldap1.bind_password", randomAsciiOfLength(5));
|
||||
|
||||
@ -47,7 +47,7 @@ public class SettingsFilterTests extends ESTestCase {
|
||||
configureUnfilteredSetting("xpack.security.authc.realms.ad1.type", "active_directory");
|
||||
configureUnfilteredSetting("xpack.security.authc.realms.ad1.enabled", "false");
|
||||
configureUnfilteredSetting("xpack.security.authc.realms.ad1.url", "ldap://host.domain");
|
||||
configureFilteredSetting("xpack.security.authc.realms.ad1.hostname_verification", randomAsciiOfLength(5));
|
||||
configureFilteredSetting("xpack.security.authc.realms.ad1.hostname_verification", randomBooleanSetting());
|
||||
|
||||
// pki filtering
|
||||
configureUnfilteredSetting("xpack.security.authc.realms.pki1.type", "pki");
|
||||
@ -115,6 +115,10 @@ public class SettingsFilterTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
private String randomBooleanSetting() {
|
||||
return randomFrom("true", "1", "on", "yes", "false", "0", "off", "no");
|
||||
}
|
||||
|
||||
private void configureUnfilteredSetting(String settingName, String value) {
|
||||
configureSetting(settingName, value, is(value));
|
||||
}
|
||||
|
@ -284,8 +284,8 @@ public class HttpClientTests extends ESTestCase {
|
||||
proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody("fullProxiedContent"));
|
||||
proxyServer.start();
|
||||
Settings settings = Settings.builder()
|
||||
.put(HttpClient.SETTINGS_PROXY_HOST, "localhost")
|
||||
.put(HttpClient.SETTINGS_PROXY_PORT, proxyServer.getPort())
|
||||
.put(HttpSettings.PROXY_HOST.getKey(), "localhost")
|
||||
.put(HttpSettings.PROXY_PORT.getKey(), proxyServer.getPort())
|
||||
.build();
|
||||
HttpClient httpClient = new HttpClient(settings, authRegistry, new SSLService(settings, environment));
|
||||
|
||||
@ -309,8 +309,8 @@ public class HttpClientTests extends ESTestCase {
|
||||
proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody("fullProxiedContent"));
|
||||
proxyServer.start();
|
||||
Settings settings = Settings.builder()
|
||||
.put(HttpClient.SETTINGS_PROXY_HOST, "localhost")
|
||||
.put(HttpClient.SETTINGS_PROXY_PORT, proxyServer.getPort() + 1)
|
||||
.put(HttpSettings.PROXY_HOST.getKey(), "localhost")
|
||||
.put(HttpSettings.PROXY_PORT.getKey(), proxyServer.getPort() + 1)
|
||||
.build();
|
||||
HttpClient httpClient = new HttpClient(settings, authRegistry, new SSLService(settings, environment));
|
||||
|
||||
|
@ -19,7 +19,7 @@ import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchRes
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.After;
|
||||
@ -84,7 +84,7 @@ public class EmailSecretsIntegrationTests extends AbstractWatcherIntegrationTest
|
||||
.get();
|
||||
|
||||
// verifying the email password is stored encrypted in the index
|
||||
GetResponse response = client().prepareGet(WatchStore.INDEX, WatchStore.DOC_TYPE, "_id").get();
|
||||
GetResponse response = client().prepareGet(Watch.INDEX, Watch.DOC_TYPE, "_id").get();
|
||||
assertThat(response, notNullValue());
|
||||
assertThat(response.getId(), is("_id"));
|
||||
Map<String, Object> source = response.getSource();
|
||||
|
@ -65,7 +65,7 @@ public class SecurityTests extends ESTestCase {
|
||||
ThreadPool threadPool = mock(ThreadPool.class);
|
||||
ClusterService clusterService = mock(ClusterService.class);
|
||||
settings = Security.additionalSettings(settings, false);
|
||||
Set<Setting<?>> allowedSettings = new HashSet<>(Security.getSettings(false));
|
||||
Set<Setting<?>> allowedSettings = new HashSet<>(Security.getSettings(false, null));
|
||||
allowedSettings.addAll(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
||||
ClusterSettings clusterSettings = new ClusterSettings(settings, allowedSettings);
|
||||
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
|
||||
|
@ -21,25 +21,34 @@ public class DestructiveOperationsTests extends SecurityIntegTestCase {
|
||||
}
|
||||
|
||||
public void testDeleteIndexDestructiveOperationsRequireName() {
|
||||
createIndex("index1");
|
||||
Settings settings = Settings.builder().put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), true).build();
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings));
|
||||
{
|
||||
IllegalArgumentException illegalArgumentException = expectThrows(IllegalArgumentException.class,
|
||||
() -> client().admin().indices().prepareDelete("*").get());
|
||||
assertEquals("Wildcard expressions or all indices are not allowed", illegalArgumentException.getMessage());
|
||||
String[] indices = client().admin().indices().prepareGetIndex().setIndices("index1").get().getIndices();
|
||||
assertEquals(1, indices.length);
|
||||
assertEquals("index1", indices[0]);
|
||||
}
|
||||
{
|
||||
IllegalArgumentException illegalArgumentException = expectThrows(IllegalArgumentException.class,
|
||||
() -> client().admin().indices().prepareDelete("*", "-index1").get());
|
||||
assertEquals("Wildcard expressions or all indices are not allowed", illegalArgumentException.getMessage());
|
||||
String[] indices = client().admin().indices().prepareGetIndex().setIndices("index1").get().getIndices();
|
||||
assertEquals(1, indices.length);
|
||||
assertEquals("index1", indices[0]);
|
||||
}
|
||||
{
|
||||
IllegalArgumentException illegalArgumentException = expectThrows(IllegalArgumentException.class,
|
||||
() -> client().admin().indices().prepareDelete("_all").get());
|
||||
assertEquals("Wildcard expressions or all indices are not allowed", illegalArgumentException.getMessage());
|
||||
String[] indices = client().admin().indices().prepareGetIndex().setIndices("index1").get().getIndices();
|
||||
assertEquals(1, indices.length);
|
||||
assertEquals("index1", indices[0]);
|
||||
}
|
||||
|
||||
createIndex("index1");
|
||||
assertAcked(client().admin().indices().prepareDelete("index1"));
|
||||
}
|
||||
|
||||
|
@ -133,6 +133,7 @@ public class SecurityActionFilterTests extends ESTestCase {
|
||||
filter.apply(task, action, request, listener, chain);
|
||||
if (failDestructiveOperations) {
|
||||
verify(listener).onFailure(isA(IllegalArgumentException.class));
|
||||
verifyNoMoreInteractions(authzService, chain);
|
||||
} else {
|
||||
verify(authzService).authorize(authentication, action, request, Collections.emptyList(), Collections.emptyList());
|
||||
verify(chain).proceed(eq(task), eq(action), eq(request), isA(ContextPreservingActionListener.class));
|
||||
|
@ -0,0 +1,288 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.authc;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.extensions.XPackExtension;
|
||||
import org.elasticsearch.xpack.security.authc.support.Hasher;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class RealmSettingsTests extends ESTestCase {
|
||||
|
||||
private static final List<String> HASH_ALGOS = Arrays.stream(Hasher.values()).map(Hasher::name).collect(Collectors.toList());
|
||||
|
||||
public void testRealmWithoutTypeDoesNotValidate() throws Exception {
|
||||
final Settings.Builder builder = baseSettings("x", false);
|
||||
builder.remove("type");
|
||||
assertErrorWithMessage("empty1", "missing realm type", realm("empty1", builder).build());
|
||||
}
|
||||
|
||||
public void testRealmWithBlankTypeDoesNotValidate() throws Exception {
|
||||
final Settings.Builder builder = baseSettings("", false);
|
||||
assertErrorWithMessage("empty2", "missing realm type", realm("empty2", builder).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* This test exists because (in 5.x), we want to be backwards compatible and accept custom realms that
|
||||
* have not been updated to explicitly declare their settings.
|
||||
*
|
||||
* @see XPackExtension#getRealmSettings()
|
||||
*/
|
||||
public void testRealmWithUnknownTypeAcceptsAllSettings() throws Exception {
|
||||
final Settings.Builder settings = baseSettings("tam", true)
|
||||
.put("ip", "8.6.75.309")
|
||||
.put(randomAsciiOfLengthBetween(4, 8), randomTimeValue());
|
||||
assertSuccess(realm("tam", settings));
|
||||
}
|
||||
|
||||
public void testFileRealmWithAllSettingsValidatesSuccessfully() throws Exception {
|
||||
assertSuccess(fileRealm("file1"));
|
||||
}
|
||||
|
||||
public void testFileRealmWithUnknownConfigurationDoesNotValidate() throws Exception {
|
||||
final Settings.Builder builder = realm("file2", fileSettings().put("not-valid", randomInt()));
|
||||
assertErrorWithCause("file2", "unknown setting [not-valid]", builder.build());
|
||||
}
|
||||
|
||||
public void testNativeRealmWithAllSettingsValidatesSuccessfully() throws Exception {
|
||||
assertSuccess(nativeRealm("native1"));
|
||||
}
|
||||
|
||||
public void testNativeRealmWithUnknownConfigurationDoesNotValidate() throws Exception {
|
||||
final Settings.Builder builder = realm("native2", nativeSettings().put("not-valid", randomAsciiOfLength(10)));
|
||||
assertErrorWithCause("native2", "unknown setting [not-valid]", builder.build());
|
||||
}
|
||||
|
||||
public void testLdapRealmWithUserTemplatesAndGroupAttributesValidatesSuccessfully() throws Exception {
|
||||
assertSuccess(ldapRealm("ldap1", false, false));
|
||||
}
|
||||
|
||||
public void testLdapRealmWithUserSearchAndGroupSearchValidatesSuccessfully() throws Exception {
|
||||
assertSuccess(ldapRealm("ldap2", true, true));
|
||||
}
|
||||
|
||||
public void testActiveDirectoryRealmWithAllSettingsValidatesSuccessfully() throws Exception {
|
||||
assertSuccess(activeDirectoryRealm("ad1"));
|
||||
}
|
||||
|
||||
public void testPkiRealmWithCertificateAuthoritiesValidatesSuccessfully() throws Exception {
|
||||
assertSuccess(pkiRealm("pki1", false));
|
||||
}
|
||||
|
||||
public void testPkiRealmWithTrustStoreValidatesSuccessfully() throws Exception {
|
||||
assertSuccess(pkiRealm("pki2", true));
|
||||
}
|
||||
|
||||
public void testPkiRealmWithFullSslSettingsDoesNotValidate() throws Exception {
|
||||
final Settings.Builder realm = realm("pki3", configureSsl("", pkiSettings(true), true, true));
|
||||
assertError("pki3", realm.build());
|
||||
}
|
||||
|
||||
public void testSettingsWithMultipleRealmsValidatesSuccessfully() throws Exception {
|
||||
final Settings settings = Settings.builder()
|
||||
.put(fileRealm("file1").build())
|
||||
.put(nativeRealm("native2").build())
|
||||
.put(ldapRealm("ldap3", true, false).build())
|
||||
.put(activeDirectoryRealm("ad4").build())
|
||||
.put(pkiRealm("pki5", false).build())
|
||||
.build();
|
||||
assertSuccess(settings);
|
||||
}
|
||||
|
||||
private Settings.Builder nativeRealm(String name) {
|
||||
return realm(name, nativeSettings());
|
||||
}
|
||||
|
||||
private Settings.Builder nativeSettings() {
|
||||
return baseSettings("native", true);
|
||||
}
|
||||
|
||||
private Settings.Builder fileRealm(String name) {
|
||||
return realm(name, fileSettings());
|
||||
}
|
||||
|
||||
private Settings.Builder fileSettings() {
|
||||
return baseSettings("file", true);
|
||||
}
|
||||
|
||||
private Settings.Builder ldapRealm(String name, boolean userSearch, boolean groupSearch) {
|
||||
return realm(name, ldapSettings(userSearch, groupSearch));
|
||||
}
|
||||
|
||||
private Settings.Builder ldapSettings(boolean userSearch, boolean groupSearch) {
|
||||
final Settings.Builder builder = commonLdapSettings("ldap")
|
||||
.put("bind_dn", "elasticsearch")
|
||||
.put("bind_password", "t0p_s3cr3t")
|
||||
.put("follow_referrals", randomBoolean());
|
||||
|
||||
if (userSearch) {
|
||||
builder.put("user_search.base_dn", "o=people, dc=example, dc=com");
|
||||
builder.put("user_search.scope", "sub_tree");
|
||||
builder.put("user_search.attribute", randomAsciiOfLengthBetween(2, 5));
|
||||
builder.put("user_search.pool.enabled", randomBoolean());
|
||||
builder.put("user_search.pool.size", randomIntBetween(10, 100));
|
||||
builder.put("user_search.pool.initial_size", randomIntBetween(1, 10));
|
||||
builder.put("user_search.pool.health_check.enabled", randomBoolean());
|
||||
builder.put("user_search.pool.health_check.dn", randomAsciiOfLength(32));
|
||||
builder.put("user_search.pool.health_check.interval", randomPositiveTimeValue());
|
||||
} else {
|
||||
builder.putArray("user_dn_templates",
|
||||
"cn={0}, ou=staff, o=people, dc=example, dc=com",
|
||||
"cn={0}, ou=visitors, o=people, dc=example, dc=com");
|
||||
}
|
||||
|
||||
if (groupSearch) {
|
||||
builder.put("group_search.base_dn", "o=groups, dc=example, dc=com");
|
||||
builder.put("group_search.scope", "one_level");
|
||||
builder.put("group_search.filter", "userGroup");
|
||||
builder.put("group_search.user_attribute", "uid");
|
||||
} else {
|
||||
builder.put("user_group_attribute", randomAsciiOfLength(8));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
private Settings.Builder activeDirectoryRealm(String name) {
|
||||
return realm(name, activeDirectorySettings());
|
||||
}
|
||||
|
||||
private Settings.Builder activeDirectorySettings() {
|
||||
final Settings.Builder builder = commonLdapSettings("active_directory")
|
||||
.put("domain_name", "MEGACORP");
|
||||
builder.put("user_search.base_dn", "o=people, dc.example, dc.com");
|
||||
builder.put("user_search.scope", "sub_tree");
|
||||
builder.put("user_search.filter", randomAsciiOfLength(5) + "={0}");
|
||||
builder.put("group_search.base_dn", "o=groups, dc=example, dc=com");
|
||||
builder.put("group_search.scope", "one_level");
|
||||
return builder;
|
||||
}
|
||||
|
||||
private Settings.Builder commonLdapSettings(String type) {
|
||||
final Settings.Builder builder = baseSettings(type, true)
|
||||
.putArray("url", "ldap://dir1.internal:9876", "ldap://dir2.internal:9876", "ldap://dir3.internal:9876")
|
||||
.put("load_balance.type", "round_robin")
|
||||
.put("load_balance.cache_ttl", randomTimeValue())
|
||||
.put("unmapped_groups_as_roles", randomBoolean())
|
||||
.put("files.role_mapping", "x-pack/" + randomAsciiOfLength(8) + ".yml")
|
||||
.put("timeout.tcp_connect", randomPositiveTimeValue())
|
||||
.put("timeout.tcp_read", randomPositiveTimeValue())
|
||||
.put("timeout.ldap_search", randomPositiveTimeValue());
|
||||
configureSsl("ssl.", builder, randomBoolean(), randomBoolean());
|
||||
return builder;
|
||||
}
|
||||
|
||||
private Settings.Builder pkiRealm(String name, boolean useTrustStore) {
|
||||
return realm(name, pkiSettings(useTrustStore));
|
||||
}
|
||||
|
||||
private Settings.Builder pkiSettings(boolean useTrustStore) {
|
||||
final Settings.Builder builder = baseSettings("pki", false)
|
||||
.put("username_pattern", "CN=\\D(\\d+)(?:,\\|$)")
|
||||
.put("files.role_mapping", "x-pack/" + randomAsciiOfLength(8) + ".yml");
|
||||
|
||||
if (useTrustStore) {
|
||||
builder.put("truststore.path", randomAsciiOfLengthBetween(8, 32));
|
||||
builder.put("truststore.password", randomAsciiOfLengthBetween(4, 12));
|
||||
builder.put("truststore.algorithm", randomAsciiOfLengthBetween(6, 10));
|
||||
} else {
|
||||
builder.putArray("certificate_authorities", generateRandomStringArray(5, 32, false, false));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
private Settings.Builder configureSsl(String prefix, Settings.Builder builder, boolean useKeyStore, boolean useTrustStore) {
|
||||
if (useKeyStore) {
|
||||
builder.put(prefix + "keystore.path", "x-pack/ssl/" + randomAsciiOfLength(5) + ".jks");
|
||||
builder.put(prefix + "keystore.password", randomAsciiOfLength(8));
|
||||
builder.put(prefix + "keystore.key_password", randomAsciiOfLength(8));
|
||||
} else {
|
||||
builder.put(prefix + "key", "x-pack/ssl/" + randomAsciiOfLength(5) + ".key");
|
||||
builder.put(prefix + "key_passphrase", randomAsciiOfLength(32));
|
||||
builder.put(prefix + "certificate", "x-pack/ssl/" + randomAsciiOfLength(5) + ".cert");
|
||||
}
|
||||
|
||||
if (useTrustStore) {
|
||||
builder.put(prefix + "truststore.path", "x-pack/ssl/" + randomAsciiOfLength(5) + ".jts");
|
||||
builder.put(prefix + "truststore.password", randomAsciiOfLength(8));
|
||||
} else {
|
||||
builder.put(prefix + "certificate_authorities", "x-pack/ssl/" + randomAsciiOfLength(8) + ".ca");
|
||||
}
|
||||
|
||||
builder.put(prefix + "verification_mode", "full");
|
||||
builder.putArray(prefix + "supported_protocols", randomSubsetOf(XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS));
|
||||
builder.putArray(prefix + "cipher_suites", randomSubsetOf(XPackSettings.DEFAULT_CIPHERS));
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
private Settings.Builder baseSettings(String type, boolean withCacheSettings) {
|
||||
final Settings.Builder builder = Settings.builder()
|
||||
.put("type", type)
|
||||
.put("order", randomInt())
|
||||
.put("enabled", true);
|
||||
if (withCacheSettings) {
|
||||
builder.put("cache.ttl", randomPositiveTimeValue())
|
||||
.put("cache.max_users", randomIntBetween(1_000, 1_000_000))
|
||||
.put("cache.hash_algo", randomFrom(HASH_ALGOS));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
private Settings.Builder realm(String name, Settings.Builder settings) {
|
||||
return settings.normalizePrefix(realmPrefix(name));
|
||||
}
|
||||
|
||||
private String realmPrefix(String name) {
|
||||
return RealmSettings.PREFIX + name + ".";
|
||||
}
|
||||
|
||||
private void assertSuccess(Settings.Builder builder) {
|
||||
assertSuccess(builder.build());
|
||||
}
|
||||
|
||||
private void assertSuccess(Settings settings) {
|
||||
assertThat(group().get(settings), notNullValue());
|
||||
}
|
||||
|
||||
private void assertErrorWithCause(String realmName, String message, Settings settings) {
|
||||
final IllegalArgumentException exception = assertError(realmName, settings);
|
||||
assertThat(exception.getCause(), notNullValue());
|
||||
assertThat(exception.getCause().getMessage(), containsString(message));
|
||||
}
|
||||
|
||||
private void assertErrorWithMessage(String realmName, String message, Settings settings) {
|
||||
final IllegalArgumentException exception = assertError(realmName, settings);
|
||||
assertThat(exception.getMessage(), containsString(message));
|
||||
}
|
||||
|
||||
private IllegalArgumentException assertError(String realmName, Settings settings) {
|
||||
final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
|
||||
() -> group().get(settings)
|
||||
);
|
||||
assertThat(exception.getMessage(), containsString(realmPrefix(realmName)));
|
||||
return exception;
|
||||
}
|
||||
|
||||
private Setting<?> group() {
|
||||
final List<Setting<?>> list = new ArrayList<>();
|
||||
final List<XPackExtension> noExtensions = Collections.emptyList();
|
||||
RealmSettings.addSettings(list, noExtensions);
|
||||
assertThat(list, hasSize(1));
|
||||
return list.get(0);
|
||||
}
|
||||
}
|
@ -64,6 +64,7 @@ import static org.mockito.Mockito.verify;
|
||||
public class ActiveDirectoryRealmTests extends ESTestCase {
|
||||
|
||||
private static final String PASSWORD = "password";
|
||||
private static final String ROLE_MAPPING_FILE_SETTING = DnRoleMapper.ROLE_MAPPING_FILE_SETTING.getKey();
|
||||
|
||||
static int numberOfLdapServers;
|
||||
InMemoryDirectoryServer[] directoryServers;
|
||||
@ -168,7 +169,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testAuthenticateCachingCanBeDisabled() throws Exception {
|
||||
Settings settings = settings(Settings.builder().put(CachingUsernamePasswordRealm.CACHE_TTL_SETTING, -1).build());
|
||||
Settings settings = settings(Settings.builder().put(CachingUsernamePasswordRealm.CACHE_TTL_SETTING.getKey(), -1).build());
|
||||
RealmConfig config = new RealmConfig("testAuthenticateCachingCanBeDisabled", settings, globalSettings);
|
||||
ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, null));
|
||||
DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService, () -> {});
|
||||
@ -216,7 +217,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
||||
|
||||
public void testRealmMapsGroupsToRoles() throws Exception {
|
||||
Settings settings = settings(Settings.builder()
|
||||
.put(DnRoleMapper.ROLE_MAPPING_FILE_SETTING, getDataPath("role_mapping.yml"))
|
||||
.put(ROLE_MAPPING_FILE_SETTING, getDataPath("role_mapping.yml"))
|
||||
.build());
|
||||
RealmConfig config = new RealmConfig("testRealmMapsGroupsToRoles", settings, globalSettings);
|
||||
ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, null);
|
||||
@ -232,7 +233,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
||||
|
||||
public void testRealmMapsUsersToRoles() throws Exception {
|
||||
Settings settings = settings(Settings.builder()
|
||||
.put(DnRoleMapper.ROLE_MAPPING_FILE_SETTING, getDataPath("role_mapping.yml"))
|
||||
.put(ROLE_MAPPING_FILE_SETTING, getDataPath("role_mapping.yml"))
|
||||
.build());
|
||||
RealmConfig config = new RealmConfig("testRealmMapsGroupsToRoles", settings, globalSettings);
|
||||
ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, null);
|
||||
@ -249,7 +250,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
||||
public void testRealmUsageStats() throws Exception {
|
||||
String loadBalanceType = randomFrom("failover", "round_robin");
|
||||
Settings settings = settings(Settings.builder()
|
||||
.put(DnRoleMapper.ROLE_MAPPING_FILE_SETTING, getDataPath("role_mapping.yml"))
|
||||
.put(ROLE_MAPPING_FILE_SETTING, getDataPath("role_mapping.yml"))
|
||||
.put("load_balance.type", loadBalanceType)
|
||||
.build());
|
||||
RealmConfig config = new RealmConfig("testRealmUsageStats", settings, globalSettings);
|
||||
@ -274,7 +275,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
||||
return Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrls())
|
||||
.put(ActiveDirectorySessionFactory.AD_DOMAIN_NAME_SETTING, "ad.test.elasticsearch.com")
|
||||
.put(DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING, true)
|
||||
.put(DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING.getKey(), true)
|
||||
.put(HOSTNAME_VERIFICATION_SETTING, false)
|
||||
.put(extraSettings)
|
||||
.build();
|
||||
|
@ -12,6 +12,7 @@ import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapTestCase;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory;
|
||||
import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm;
|
||||
import org.elasticsearch.xpack.security.authc.support.DnRoleMapper;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredStringTests;
|
||||
@ -23,9 +24,9 @@ import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.LdapSessionFactory.USER_DN_TEMPLATES_SETTING;
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory.HOSTNAME_VERIFICATION_SETTING;
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory.URLS_SETTING;
|
||||
import static org.hamcrest.Matchers.arrayContaining;
|
||||
@ -46,6 +47,8 @@ public class LdapRealmTests extends LdapTestCase {
|
||||
public static final String VALID_USERNAME = "Thomas Masterman Hardy";
|
||||
public static final String PASSWORD = "pass";
|
||||
|
||||
private static final String USER_DN_TEMPLATES_SETTING_KEY = LdapSessionFactory.USER_DN_TEMPLATES_SETTING.getKey();
|
||||
|
||||
private ThreadPool threadPool;
|
||||
private ResourceWatcherService resourceWatcherService;
|
||||
private Settings globalSettings;
|
||||
@ -95,7 +98,7 @@ public class LdapRealmTests extends LdapTestCase {
|
||||
ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, SecuredStringTests.build(PASSWORD)), future);
|
||||
User user = future.actionGet();
|
||||
assertThat(user, notNullValue());
|
||||
assertThat(user.roles(), arrayContaining("HMS Victory"));
|
||||
assertThat("For roles " + Arrays.toString(user.roles()), user.roles(), arrayContaining("HMS Victory"));
|
||||
}
|
||||
|
||||
public void testAuthenticateCaching() throws Exception {
|
||||
@ -158,7 +161,7 @@ public class LdapRealmTests extends LdapTestCase {
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = Settings.builder()
|
||||
.put(buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(LdapRealm.CACHE_TTL_SETTING, -1)
|
||||
.put(CachingUsernamePasswordRealm.CACHE_TTL_SETTING.getKey(), -1)
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings);
|
||||
|
||||
@ -182,7 +185,7 @@ public class LdapRealmTests extends LdapTestCase {
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrls())
|
||||
.putArray(USER_DN_TEMPLATES_SETTING, userTemplate)
|
||||
.putArray(USER_DN_TEMPLATES_SETTING_KEY, userTemplate)
|
||||
.put("group_search.base_dn", groupSearchBase)
|
||||
.put("group_search.scope", LdapSearchScope.SUB_TREE)
|
||||
.put(HOSTNAME_VERIFICATION_SETTING, false)
|
||||
@ -215,7 +218,7 @@ public class LdapRealmTests extends LdapTestCase {
|
||||
public void testLdapRealmThrowsExceptionForUserTemplateAndSearchSettings() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrls())
|
||||
.putArray(USER_DN_TEMPLATES_SETTING, "cn=foo")
|
||||
.putArray(USER_DN_TEMPLATES_SETTING_KEY, "cn=foo")
|
||||
.put("user_search.base_dn", "cn=bar")
|
||||
.put("group_search.base_dn", "")
|
||||
.put("group_search.scope", LdapSearchScope.SUB_TREE)
|
||||
@ -224,7 +227,26 @@ public class LdapRealmTests extends LdapTestCase {
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm-user-search", settings, globalSettings);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> LdapRealm.sessionFactory(config, null, LdapRealm.LDAP_TYPE));
|
||||
assertThat(e.getMessage(), containsString("settings were found for both user search and user template"));
|
||||
assertThat(e.getMessage(),
|
||||
containsString("settings were found for both" +
|
||||
" user search [xpack.security.authc.realms.test-ldap-realm-user-search.user_search.] and" +
|
||||
" user template [xpack.security.authc.realms.test-ldap-realm-user-search.user_dn_templates]"));
|
||||
}
|
||||
|
||||
public void testLdapRealmThrowsExceptionWhenNeitherUserTemplateNorSearchSettingsProvided() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrls())
|
||||
.put("group_search.base_dn", "")
|
||||
.put("group_search.scope", LdapSearchScope.SUB_TREE)
|
||||
.put(HOSTNAME_VERIFICATION_SETTING, false)
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm-user-search", settings, globalSettings);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> LdapRealm.sessionFactory(config, null, LdapRealm.LDAP_TYPE));
|
||||
assertThat(e.getMessage(),
|
||||
containsString("settings were not found for either" +
|
||||
" user search [xpack.security.authc.realms.test-ldap-realm-user-search.user_search.] or" +
|
||||
" user template [xpack.security.authc.realms.test-ldap-realm-user-search.user_dn_templates]"));
|
||||
}
|
||||
|
||||
public void testLdapRealmMapsUserDNToRole() throws Exception {
|
||||
@ -232,7 +254,7 @@ public class LdapRealmTests extends LdapTestCase {
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = Settings.builder()
|
||||
.put(buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(DnRoleMapper.ROLE_MAPPING_FILE_SETTING,
|
||||
.put(DnRoleMapper.ROLE_MAPPING_FILE_SETTING.getKey(),
|
||||
getDataPath("/org/elasticsearch/xpack/security/authc/support/role_mapping.yml"))
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm-userdn", settings, globalSettings);
|
||||
@ -256,6 +278,7 @@ public class LdapRealmTests extends LdapTestCase {
|
||||
.put("bind_password", PASSWORD)
|
||||
.put("group_search.base_dn", groupSearchBase)
|
||||
.put("group_search.scope", LdapSearchScope.SUB_TREE)
|
||||
.put(LdapSessionFactory.USER_DN_TEMPLATES_SETTING.getKey(), "--")
|
||||
.put(HOSTNAME_VERIFICATION_SETTING, false);
|
||||
|
||||
int order = randomIntBetween(0, 10);
|
||||
|
@ -27,7 +27,7 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testResolveSubTree() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("group_search.base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.build();
|
||||
|
||||
SearchGroupsResolver resolver = new SearchGroupsResolver(settings);
|
||||
@ -42,8 +42,8 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testResolveOneLevel() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("base_dn", "ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("scope", LdapSearchScope.ONE_LEVEL)
|
||||
.put("group_search.base_dn", "ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("group_search.scope", LdapSearchScope.ONE_LEVEL)
|
||||
.build();
|
||||
|
||||
SearchGroupsResolver resolver = new SearchGroupsResolver(settings);
|
||||
@ -58,8 +58,8 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testResolveBase() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("base_dn", "cn=Avengers,ou=People,dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("scope", LdapSearchScope.BASE)
|
||||
.put("group_search.base_dn", "cn=Avengers,ou=People,dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("group_search.scope", LdapSearchScope.BASE)
|
||||
.build();
|
||||
|
||||
SearchGroupsResolver resolver = new SearchGroupsResolver(settings);
|
||||
@ -70,9 +70,9 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testResolveCustomFilter() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("filter", "(&(objectclass=posixGroup)(memberUID={0}))")
|
||||
.put("user_attribute", "uid")
|
||||
.put("group_search.base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("group_search.filter", "(&(objectclass=posixGroup)(memberUID={0}))")
|
||||
.put("group_search.user_attribute", "uid")
|
||||
.build();
|
||||
|
||||
SearchGroupsResolver resolver = new SearchGroupsResolver(settings);
|
||||
@ -84,8 +84,8 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testFilterIncludesPosixGroups() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("user_attribute", "uid")
|
||||
.put("group_search.base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("group_search.user_attribute", "uid")
|
||||
.build();
|
||||
|
||||
SearchGroupsResolver resolver = new SearchGroupsResolver(settings);
|
||||
@ -97,7 +97,7 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testCreateWithoutSpecifyingBaseDN() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("scope", LdapSearchScope.SUB_TREE)
|
||||
.put("group_search.scope", LdapSearchScope.SUB_TREE)
|
||||
.build();
|
||||
|
||||
try {
|
||||
@ -110,8 +110,8 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testReadUserAttributeUid() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("user_attribute", "uid").build();
|
||||
.put("group_search.base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("group_search.user_attribute", "uid").build();
|
||||
SearchGroupsResolver resolver = new SearchGroupsResolver(settings);
|
||||
PlainActionFuture<String> future = new PlainActionFuture<>();
|
||||
resolver.readUserAttribute(ldapConnection, BRUCE_BANNER_DN, TimeValue.timeValueSeconds(5), future);
|
||||
@ -120,8 +120,8 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testReadUserAttributeCn() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("user_attribute", "cn")
|
||||
.put("group_search.base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("group_search.user_attribute", "cn")
|
||||
.build();
|
||||
SearchGroupsResolver resolver = new SearchGroupsResolver(settings);
|
||||
|
||||
@ -132,8 +132,8 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testReadNonExistentUserAttribute() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("user_attribute", "doesntExists")
|
||||
.put("group_search.base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("group_search.user_attribute", "doesntExists")
|
||||
.build();
|
||||
SearchGroupsResolver resolver = new SearchGroupsResolver(settings);
|
||||
|
||||
@ -144,8 +144,8 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase {
|
||||
|
||||
public void testReadBinaryUserAttribute() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("user_attribute", "userPassword")
|
||||
.put("group_search.base_dn", "dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("group_search.user_attribute", "userPassword")
|
||||
.build();
|
||||
SearchGroupsResolver resolver = new SearchGroupsResolver(settings);
|
||||
|
||||
|
@ -14,6 +14,7 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.LdapRealm;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.LdapSessionFactory;
|
||||
import org.elasticsearch.xpack.security.authc.support.DnRoleMapper;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
@ -28,10 +29,11 @@ import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.LdapSessionFactory.HOSTNAME_VERIFICATION_SETTING;
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.LdapSessionFactory.URLS_SETTING;
|
||||
import static org.elasticsearch.xpack.security.authc.ldap.LdapSessionFactory.USER_DN_TEMPLATES_SETTING;
|
||||
|
||||
public abstract class LdapTestCase extends ESTestCase {
|
||||
|
||||
private static final String USER_DN_TEMPLATES_SETTING_KEY = LdapSessionFactory.USER_DN_TEMPLATES_SETTING.getKey();
|
||||
|
||||
static int numberOfLdapServers;
|
||||
protected InMemoryDirectoryServer[] ldapServers;
|
||||
|
||||
@ -86,7 +88,7 @@ public abstract class LdapTestCase extends ESTestCase {
|
||||
LdapLoadBalancing serverSetType) {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrl)
|
||||
.putArray(USER_DN_TEMPLATES_SETTING, userTemplate)
|
||||
.putArray(USER_DN_TEMPLATES_SETTING_KEY, userTemplate)
|
||||
.put("group_search.base_dn", groupSearchBase)
|
||||
.put("group_search.scope", scope)
|
||||
.put(HOSTNAME_VERIFICATION_SETTING, false);
|
||||
@ -100,14 +102,14 @@ public abstract class LdapTestCase extends ESTestCase {
|
||||
public static Settings buildLdapSettings(String[] ldapUrl, String userTemplate, boolean hostnameVerification) {
|
||||
return Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrl)
|
||||
.putArray(USER_DN_TEMPLATES_SETTING, userTemplate)
|
||||
.putArray(USER_DN_TEMPLATES_SETTING_KEY, userTemplate)
|
||||
.put(HOSTNAME_VERIFICATION_SETTING, hostnameVerification)
|
||||
.build();
|
||||
}
|
||||
|
||||
protected DnRoleMapper buildGroupAsRoleMapper(ResourceWatcherService resourceWatcherService) {
|
||||
Settings settings = Settings.builder()
|
||||
.put(DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING, true)
|
||||
.put(DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING.getKey(), true)
|
||||
.build();
|
||||
Settings global = Settings.builder().put("path.home", createTempDir()).build();
|
||||
RealmConfig config = new RealmConfig("ldap1", settings, global);
|
||||
|
@ -42,9 +42,9 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
|
||||
int maxUsers = randomIntBetween(10, 100);
|
||||
TimeValue ttl = TimeValue.timeValueMinutes(randomIntBetween(10, 20));
|
||||
Settings settings = Settings.builder()
|
||||
.put(CachingUsernamePasswordRealm.CACHE_HASH_ALGO_SETTING, hashAlgo)
|
||||
.put(CachingUsernamePasswordRealm.CACHE_MAX_USERS_SETTING, maxUsers)
|
||||
.put(CachingUsernamePasswordRealm.CACHE_TTL_SETTING, ttl)
|
||||
.put(CachingUsernamePasswordRealm.CACHE_HASH_ALGO_SETTING.getKey(), hashAlgo)
|
||||
.put(CachingUsernamePasswordRealm.CACHE_MAX_USERS_SETTING.getKey(), maxUsers)
|
||||
.put(CachingUsernamePasswordRealm.CACHE_TTL_SETTING.getKey(), ttl)
|
||||
.build();
|
||||
|
||||
RealmConfig config = new RealmConfig("test_realm", settings, globalSettings);
|
||||
|
@ -47,6 +47,9 @@ import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class DnRoleMapperTests extends ESTestCase {
|
||||
|
||||
private static final String ROLE_MAPPING_FILE_SETTING = DnRoleMapper.ROLE_MAPPING_FILE_SETTING.getKey();
|
||||
private static final String USE_UNMAPPED_GROUPS_AS_ROLES_SETTING_KEY = DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING.getKey();
|
||||
|
||||
private static final String[] STARK_GROUP_DNS = new String[] {
|
||||
//groups can be named by different attributes, depending on the directory,
|
||||
//we don't care what it is named by
|
||||
@ -230,7 +233,7 @@ public class DnRoleMapperTests extends ESTestCase {
|
||||
public void testYaml() throws Exception {
|
||||
Path file = getDataPath("role_mapping.yml");
|
||||
Settings ldapSettings = Settings.builder()
|
||||
.put(DnRoleMapper.ROLE_MAPPING_FILE_SETTING, file.toAbsolutePath())
|
||||
.put(ROLE_MAPPING_FILE_SETTING, file.toAbsolutePath())
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("ldap1", ldapSettings, settings);
|
||||
|
||||
@ -244,7 +247,7 @@ public class DnRoleMapperTests extends ESTestCase {
|
||||
|
||||
public void testRelativeDN() {
|
||||
Settings ldapSettings = Settings.builder()
|
||||
.put(DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING, true)
|
||||
.put(USE_UNMAPPED_GROUPS_AS_ROLES_SETTING_KEY, true)
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("ldap1", ldapSettings, settings);
|
||||
|
||||
@ -257,8 +260,8 @@ public class DnRoleMapperTests extends ESTestCase {
|
||||
public void testUserDNMapping() throws Exception {
|
||||
Path file = getDataPath("role_mapping.yml");
|
||||
Settings ldapSettings = Settings.builder()
|
||||
.put(DnRoleMapper.ROLE_MAPPING_FILE_SETTING, file.toAbsolutePath())
|
||||
.put(DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING, false)
|
||||
.put(ROLE_MAPPING_FILE_SETTING, file.toAbsolutePath())
|
||||
.put(USE_UNMAPPED_GROUPS_AS_ROLES_SETTING_KEY, false)
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("ldap-userdn-role", ldapSettings, settings);
|
||||
|
||||
|
@ -44,7 +44,6 @@ import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Optional;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static java.util.Collections.singletonMap;
|
||||
@ -148,8 +147,7 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends ESTestCase {
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i])));
|
||||
when(queryShardContext.newParseContext(anyParser())).thenReturn(queryParseContext);
|
||||
when(queryParseContext.parseInnerQueryBuilder())
|
||||
.thenReturn(Optional.of(new TermQueryBuilder("field", values[i])));
|
||||
when(queryParseContext.parseInnerQueryBuilder()).thenReturn(new TermQueryBuilder("field", values[i]));
|
||||
when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery);
|
||||
DirectoryReader wrappedDirectoryReader = wrapper.wrap(directoryReader);
|
||||
IndexSearcher indexSearcher = wrapper.wrap(new IndexSearcher(wrappedDirectoryReader));
|
||||
|
@ -106,6 +106,7 @@ public class ServerTransportFilterTests extends ESTestCase {
|
||||
filter.inbound(action, request, channel, listener);
|
||||
if (failDestructiveOperations) {
|
||||
verify(listener).onFailure(isA(IllegalArgumentException.class));
|
||||
verifyNoMoreInteractions(authzService);
|
||||
} else {
|
||||
verify(authzService).authorize(authentication, action, request, Collections.emptyList(), Collections.emptyList());
|
||||
}
|
||||
|
@ -0,0 +1,103 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ssl;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class SSLConfigurationSettingsTests extends ESTestCase {
|
||||
|
||||
public void testParseCipherSettingsWithoutPrefix() {
|
||||
final SSLConfigurationSettings ssl = SSLConfigurationSettings.withoutPrefix();
|
||||
assertThat(ssl.ciphers.match("cipher_suites"), is(true));
|
||||
assertThat(ssl.ciphers.match("ssl.cipher_suites"), is(false));
|
||||
assertThat(ssl.ciphers.match("xpack.ssl.cipher_suites"), is(false));
|
||||
|
||||
final Settings settings = Settings.builder()
|
||||
.put("cipher_suites.0", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256")
|
||||
.put("cipher_suites.1", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256")
|
||||
.put("cipher_suites.2", "TLS_RSA_WITH_AES_128_CBC_SHA256")
|
||||
.build();
|
||||
assertThat(ssl.ciphers.get(settings), is(Arrays.asList(
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", "TLS_RSA_WITH_AES_128_CBC_SHA256"
|
||||
)));
|
||||
}
|
||||
|
||||
public void testParseClientAuthWithPrefix() {
|
||||
final SSLConfigurationSettings ssl = SSLConfigurationSettings.withPrefix("xpack.security.http.ssl.");
|
||||
assertThat(ssl.clientAuth.match("xpack.security.http.ssl.client_authentication"), is(true));
|
||||
assertThat(ssl.clientAuth.match("client_authentication"), is(false));
|
||||
|
||||
final Settings settings = Settings.builder()
|
||||
.put("xpack.security.http.ssl.client_authentication", SSLClientAuth.OPTIONAL.name())
|
||||
.build();
|
||||
assertThat(ssl.clientAuth.get(settings).get(), is(SSLClientAuth.OPTIONAL));
|
||||
}
|
||||
|
||||
public void testParseKeystoreAlgorithmWithPrefix() {
|
||||
final SSLConfigurationSettings ssl = SSLConfigurationSettings.withPrefix("xpack.security.authc.realms.ldap1.ssl.");
|
||||
assertThat(ssl.keystoreAlgorithm.match("xpack.security.authc.realms.ldap1.ssl.keystore.algorithm"), is(true));
|
||||
|
||||
final String algo = randomAsciiOfLength(16);
|
||||
final Settings settings = Settings.builder()
|
||||
.put("xpack.security.authc.realms.ldap1.ssl.keystore.algorithm", algo)
|
||||
.build();
|
||||
assertThat(ssl.keystoreAlgorithm.get(settings), is(algo));
|
||||
}
|
||||
|
||||
public void testParseProtocolsListWithPrefix() {
|
||||
final SSLConfigurationSettings ssl = SSLConfigurationSettings.withPrefix("ssl.");
|
||||
assertThat(ssl.supportedProtocols.match("ssl.supported_protocols"), is(true));
|
||||
|
||||
final Settings settings = Settings.builder()
|
||||
.putArray("ssl.supported_protocols", "SSLv3", "SSLv2Hello", "SSLv2")
|
||||
.build();
|
||||
assertThat(ssl.supportedProtocols.get(settings), is(Arrays.asList("SSLv3", "SSLv2Hello", "SSLv2")));
|
||||
}
|
||||
|
||||
public void testKeyStoreKeyPasswordDefaultsToKeystorePassword() {
|
||||
final SSLConfigurationSettings ssl = SSLConfigurationSettings.withPrefix("xpack.ssl.");
|
||||
|
||||
assertThat(ssl.keystorePassword.match("xpack.ssl.keystore.password"), is(true));
|
||||
assertThat(ssl.keystoreKeyPassword.match("xpack.ssl.keystore.key_password"), is(true));
|
||||
|
||||
assertThat(ssl.keystorePassword.match("xpack.ssl.keystore.key_password"), is(false));
|
||||
assertThat(ssl.keystoreKeyPassword.match("xpack.ssl.keystore.password"), is(false));
|
||||
|
||||
final String password = randomAsciiOfLength(16);
|
||||
final Settings settings = Settings.builder()
|
||||
.put("xpack.ssl.keystore.password", password)
|
||||
.build();
|
||||
assertThat(ssl.keystoreKeyPassword.get(settings).get(), is(password));
|
||||
}
|
||||
|
||||
public void testEmptySettingsParsesToDefaults() {
|
||||
final SSLConfigurationSettings ssl = SSLConfigurationSettings.withoutPrefix();
|
||||
final Settings settings = Settings.EMPTY;
|
||||
assertThat(ssl.caPaths.get(settings).size(), is(0));
|
||||
assertThat(ssl.cert.get(settings).isPresent(), is(false));
|
||||
assertThat(ssl.ciphers.get(settings).size(), is(0));
|
||||
assertThat(ssl.clientAuth.get(settings).isPresent(), is(false));
|
||||
assertThat(ssl.keyPassword.get(settings).isPresent(), is(false));
|
||||
assertThat(ssl.keyPath.get(settings).isPresent(), is(false));
|
||||
assertThat(ssl.keystoreAlgorithm.get(settings), is(KeyManagerFactory.getDefaultAlgorithm()));
|
||||
assertThat(ssl.keystoreKeyPassword.get(settings).isPresent(), is(false));
|
||||
assertThat(ssl.keystorePassword.get(settings).isPresent(), is(false));
|
||||
assertThat(ssl.keystorePath.get(settings).isPresent(), is(false));
|
||||
assertThat(ssl.supportedProtocols.get(settings).size(), is(0));
|
||||
assertThat(ssl.truststoreAlgorithm.get(settings), is(TrustManagerFactory.getDefaultAlgorithm()));
|
||||
assertThat(ssl.truststorePassword.get(settings).isPresent(), is(false));
|
||||
assertThat(ssl.truststorePath.get(settings).isPresent(), is(false));
|
||||
assertThat(ssl.verificationMode.get(settings).isPresent(), is(false));
|
||||
}
|
||||
|
||||
}
|
@ -37,6 +37,7 @@ import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
|
||||
@ -236,6 +237,21 @@ public class SSLServiceTests extends ESTestCase {
|
||||
settings.getByPrefix("xpack.security.transport.ssl.")));
|
||||
}
|
||||
|
||||
public void testThatHttpClientAuthDefaultsToNone() {
|
||||
final Settings globalSettings = Settings.builder()
|
||||
.put("xpack.security.http.ssl.enabled", true)
|
||||
.put("xpack.ssl.client_authentication", SSLClientAuth.OPTIONAL.name())
|
||||
.build();
|
||||
final SSLService sslService = new SSLService(globalSettings, env);
|
||||
|
||||
final SSLConfiguration globalConfig = sslService.sslConfiguration(Settings.EMPTY);
|
||||
assertThat(globalConfig.sslClientAuth(), is(SSLClientAuth.OPTIONAL));
|
||||
|
||||
final Settings httpSettings = SSLService.getHttpTransportSSLSettings(globalSettings);
|
||||
final SSLConfiguration httpConfig = sslService.sslConfiguration(httpSettings);
|
||||
assertThat(httpConfig.sslClientAuth(), is(SSLClientAuth.NONE));
|
||||
}
|
||||
|
||||
public void testThatTruststorePasswordIsRequired() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("xpack.ssl.keystore.path", testnodeStore)
|
||||
|
@ -21,7 +21,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.junit.Before;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
@ -177,7 +177,7 @@ public class WatcherLifeCycleServiceTests extends ESTestCase {
|
||||
// old cluster state that contains watcher index
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
ClusterState oldClusterState = ClusterState.builder(new ClusterName("my-cluster"))
|
||||
.metaData(new MetaData.Builder().put(IndexMetaData.builder(WatchStore.INDEX)
|
||||
.metaData(new MetaData.Builder().put(IndexMetaData.builder(Watch.INDEX)
|
||||
.settings(indexSettings).numberOfReplicas(0).numberOfShards(1)))
|
||||
.nodes(discoveryNodes).build();
|
||||
|
||||
@ -196,13 +196,13 @@ public class WatcherLifeCycleServiceTests extends ESTestCase {
|
||||
// old cluster state that contains watcher index
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
ClusterState oldClusterState = ClusterState.builder(new ClusterName("my-cluster"))
|
||||
.metaData(new MetaData.Builder().put(IndexMetaData.builder(WatchStore.INDEX)
|
||||
.metaData(new MetaData.Builder().put(IndexMetaData.builder(Watch.INDEX)
|
||||
.settings(indexSettings).numberOfReplicas(0).numberOfShards(1)))
|
||||
.nodes(discoveryNodes).build();
|
||||
|
||||
// new cluster state with a closed watcher index
|
||||
ClusterState newClusterState = ClusterState.builder(new ClusterName("my-cluster"))
|
||||
.metaData(new MetaData.Builder().put(IndexMetaData.builder(WatchStore.INDEX).state(IndexMetaData.State.CLOSE)
|
||||
.metaData(new MetaData.Builder().put(IndexMetaData.builder(Watch.INDEX).state(IndexMetaData.State.CLOSE)
|
||||
.settings(indexSettings).numberOfReplicas(0).numberOfShards(1)))
|
||||
.nodes(discoveryNodes).build();
|
||||
when(watcherService.state()).thenReturn(WatcherState.STARTED);
|
||||
|
@ -1,307 +0,0 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher;
|
||||
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.support.clock.ClockMock;
|
||||
import org.elasticsearch.xpack.watcher.execution.ExecutionService;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry;
|
||||
import org.elasticsearch.xpack.watcher.trigger.Trigger;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEngine;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerService;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchLockService;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStatus;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.time.Clock;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.verifyZeroInteractions;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class WatcherServiceTests extends ESTestCase {
|
||||
private TriggerService triggerService;
|
||||
private WatchStore watchStore;
|
||||
private Watch.Parser watchParser;
|
||||
private WatcherService watcherService;
|
||||
private ClockMock clock;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
triggerService = mock(TriggerService.class);
|
||||
watchStore = mock(WatchStore.class);
|
||||
watchParser = mock(Watch.Parser.class);
|
||||
ExecutionService executionService = mock(ExecutionService.class);
|
||||
WatchLockService watchLockService = mock(WatchLockService.class);
|
||||
clock = ClockMock.frozen();
|
||||
WatcherIndexTemplateRegistry watcherIndexTemplateRegistry = mock(WatcherIndexTemplateRegistry.class);
|
||||
watcherService = new WatcherService(Settings.EMPTY, clock, triggerService, watchStore, watchParser, executionService,
|
||||
watchLockService, watcherIndexTemplateRegistry);
|
||||
AtomicReference<WatcherState> state = watcherService.state;
|
||||
state.set(WatcherState.STARTED);
|
||||
}
|
||||
|
||||
public void testPutWatch() throws Exception {
|
||||
boolean activeByDefault = randomBoolean();
|
||||
|
||||
IndexResponse indexResponse = mock(IndexResponse.class);
|
||||
Watch newWatch = mock(Watch.class);
|
||||
WatchStatus status = mock(WatchStatus.class);
|
||||
when(status.state()).thenReturn(new WatchStatus.State(activeByDefault, new DateTime(clock.millis(), UTC)));
|
||||
when(newWatch.status()).thenReturn(status);
|
||||
|
||||
WatchStore.WatchPut watchPut = mock(WatchStore.WatchPut.class);
|
||||
when(watchPut.indexResponse()).thenReturn(indexResponse);
|
||||
when(watchPut.current()).thenReturn(newWatch);
|
||||
|
||||
when(watchParser.parseWithSecrets(any(String.class), eq(false), any(BytesReference.class), any(DateTime.class)))
|
||||
.thenReturn(newWatch);
|
||||
when(watchStore.put(newWatch)).thenReturn(watchPut);
|
||||
IndexResponse response = watcherService.putWatch("_id", new BytesArray("{}"), activeByDefault);
|
||||
assertThat(response, sameInstance(indexResponse));
|
||||
|
||||
verify(newWatch, times(1)).setState(activeByDefault, new DateTime(clock.millis(), UTC));
|
||||
if (activeByDefault) {
|
||||
verify(triggerService, times(1)).add(any(TriggerEngine.Job.class));
|
||||
} else {
|
||||
verifyZeroInteractions(triggerService);
|
||||
}
|
||||
}
|
||||
|
||||
public void testPutWatchDifferentActiveStates() throws Exception {
|
||||
Trigger trigger = mock(Trigger.class);
|
||||
|
||||
IndexResponse indexResponse = mock(IndexResponse.class);
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.id()).thenReturn("_id");
|
||||
WatchStatus status = mock(WatchStatus.class);
|
||||
boolean active = randomBoolean();
|
||||
DateTime now = new DateTime(clock.millis(), UTC);
|
||||
when(status.state()).thenReturn(new WatchStatus.State(active, now));
|
||||
when(watch.status()).thenReturn(status);
|
||||
when(watch.trigger()).thenReturn(trigger);
|
||||
WatchStore.WatchPut watchPut = mock(WatchStore.WatchPut.class);
|
||||
when(watchPut.indexResponse()).thenReturn(indexResponse);
|
||||
when(watchPut.current()).thenReturn(watch);
|
||||
|
||||
Watch previousWatch = mock(Watch.class);
|
||||
WatchStatus previousStatus = mock(WatchStatus.class);
|
||||
boolean prevActive = randomBoolean();
|
||||
when(previousStatus.state()).thenReturn(new WatchStatus.State(prevActive, now));
|
||||
when(previousWatch.status()).thenReturn(previousStatus);
|
||||
when(previousWatch.trigger()).thenReturn(trigger);
|
||||
when(watchPut.previous()).thenReturn(previousWatch);
|
||||
|
||||
when(watchParser.parseWithSecrets(any(String.class), eq(false), any(BytesReference.class), eq(now))).thenReturn(watch);
|
||||
when(watchStore.put(watch)).thenReturn(watchPut);
|
||||
|
||||
IndexResponse response = watcherService.putWatch("_id", new BytesArray("{}"), active);
|
||||
assertThat(response, sameInstance(indexResponse));
|
||||
|
||||
if (!active) {
|
||||
// we should always remove the watch from the trigger service, just to be safe
|
||||
verify(triggerService, times(1)).remove("_id");
|
||||
} else if (prevActive) {
|
||||
// if both the new watch and the prev one are active, we should do nothing
|
||||
verifyZeroInteractions(triggerService);
|
||||
} else {
|
||||
// if the prev watch was not active and the new one is active, we should add the watch
|
||||
verify(triggerService, times(1)).add(watch);
|
||||
}
|
||||
}
|
||||
|
||||
public void testDeleteWatch() throws Exception {
|
||||
WatchStore.WatchDelete expectedWatchDelete = mock(WatchStore.WatchDelete.class);
|
||||
DeleteResponse deleteResponse = mock(DeleteResponse.class);
|
||||
when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.DELETED);
|
||||
when(expectedWatchDelete.deleteResponse()).thenReturn(deleteResponse);
|
||||
when(watchStore.delete("_id")).thenReturn(expectedWatchDelete);
|
||||
WatchStore.WatchDelete watchDelete = watcherService.deleteWatch("_id");
|
||||
|
||||
assertThat(watchDelete, sameInstance(expectedWatchDelete));
|
||||
verify(triggerService, times(1)).remove("_id");
|
||||
}
|
||||
|
||||
public void testDeleteWatchNotFound() throws Exception {
|
||||
WatchStore.WatchDelete expectedWatchDelete = mock(WatchStore.WatchDelete.class);
|
||||
DeleteResponse deleteResponse = mock(DeleteResponse.class);
|
||||
when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOOP);
|
||||
when(expectedWatchDelete.deleteResponse()).thenReturn(deleteResponse);
|
||||
when(watchStore.delete("_id")).thenReturn(expectedWatchDelete);
|
||||
WatchStore.WatchDelete watchDelete = watcherService.deleteWatch("_id");
|
||||
|
||||
assertThat(watchDelete, sameInstance(expectedWatchDelete));
|
||||
verifyZeroInteractions(triggerService);
|
||||
}
|
||||
|
||||
public void testAckWatch() throws Exception {
|
||||
DateTime now = new DateTime(UTC);
|
||||
clock.setTime(now);
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.ack(now, "_all")).thenReturn(true);
|
||||
WatchStatus status = new WatchStatus(now, emptyMap());
|
||||
when(watch.status()).thenReturn(status);
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
|
||||
WatchStatus result = watcherService.ackWatch("_id", Strings.EMPTY_ARRAY);
|
||||
assertThat(result, not(sameInstance(status)));
|
||||
|
||||
verify(watchStore, times(1)).updateStatus(watch);
|
||||
}
|
||||
|
||||
public void testActivate() throws Exception {
|
||||
WatcherService service = spy(watcherService);
|
||||
WatchStatus expectedStatus = mock(WatchStatus.class);
|
||||
doReturn(expectedStatus).when(service).setWatchState("_id", true);
|
||||
WatchStatus actualStatus = service.activateWatch("_id");
|
||||
assertThat(actualStatus, sameInstance(expectedStatus));
|
||||
verify(service, times(1)).setWatchState("_id", true);
|
||||
}
|
||||
|
||||
public void testDeactivate() throws Exception {
|
||||
WatcherService service = spy(watcherService);
|
||||
WatchStatus expectedStatus = mock(WatchStatus.class);
|
||||
doReturn(expectedStatus).when(service).setWatchState("_id", false);
|
||||
WatchStatus actualStatus = service.deactivateWatch("_id");
|
||||
assertThat(actualStatus, sameInstance(expectedStatus));
|
||||
verify(service, times(1)).setWatchState("_id", false);
|
||||
}
|
||||
|
||||
public void testSetWatchStateSetActiveOnCurrentlyActive() throws Exception {
|
||||
// trying to activate a watch that is already active:
|
||||
// - the watch status should not change
|
||||
// - the watch doesn't need to be updated in the store
|
||||
// - the watch should not be removed or re-added to the trigger service
|
||||
DateTime now = new DateTime(UTC);
|
||||
clock.setTime(now);
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
WatchStatus status = new WatchStatus(now, emptyMap());
|
||||
when(watch.status()).thenReturn(status);
|
||||
when(watch.setState(true, now)).thenReturn(false);
|
||||
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
|
||||
|
||||
WatchStatus result = watcherService.setWatchState("_id", true);
|
||||
assertThat(result, not(sameInstance(status)));
|
||||
|
||||
verifyZeroInteractions(triggerService);
|
||||
verify(watchStore, never()).updateStatus(watch);
|
||||
}
|
||||
|
||||
public void testSetWatchStateSetActiveOnCurrentlyInactive() throws Exception {
|
||||
// activating a watch that is currently inactive:
|
||||
// - the watch status should be updated
|
||||
// - the watch needs to be updated in the store
|
||||
// - the watch should be re-added to the trigger service (the assumption is that it's not there)
|
||||
|
||||
DateTime now = new DateTime(UTC);
|
||||
clock.setTime(now);
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
WatchStatus status = new WatchStatus(now, emptyMap());
|
||||
when(watch.status()).thenReturn(status);
|
||||
when(watch.setState(true, now)).thenReturn(true);
|
||||
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
|
||||
WatchStatus result = watcherService.setWatchState("_id", true);
|
||||
assertThat(result, not(sameInstance(status)));
|
||||
|
||||
verify(triggerService, times(1)).add(watch);
|
||||
verify(watchStore, times(1)).updateStatus(watch);
|
||||
}
|
||||
|
||||
public void testSetWatchStateSetInactiveOnCurrentlyActive() throws Exception {
|
||||
// deactivating a watch that is currently active:
|
||||
// - the watch status should change
|
||||
// - the watch needs to be updated in the store
|
||||
// - the watch should be removed from the trigger service
|
||||
DateTime now = new DateTime(UTC);
|
||||
clock.setTime(now);
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.id()).thenReturn("_id");
|
||||
WatchStatus status = new WatchStatus(now, emptyMap());
|
||||
when(watch.status()).thenReturn(status);
|
||||
when(watch.setState(false, now)).thenReturn(true);
|
||||
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
|
||||
WatchStatus result = watcherService.setWatchState("_id", false);
|
||||
assertThat(result, not(sameInstance(status)));
|
||||
|
||||
verify(triggerService, times(1)).remove("_id");
|
||||
verify(watchStore, times(1)).updateStatus(watch);
|
||||
}
|
||||
|
||||
public void testSetWatchStateSetInactiveOnCurrentlyInactive() throws Exception {
|
||||
// trying to deactivate a watch that is currently inactive:
|
||||
// - the watch status should not be updated
|
||||
// - the watch should not be updated in the store
|
||||
// - the watch should be re-added or removed to/from the trigger service
|
||||
DateTime now = new DateTime(UTC);
|
||||
clock.setTime(now);
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.id()).thenReturn("_id");
|
||||
WatchStatus status = new WatchStatus(now, emptyMap());
|
||||
when(watch.status()).thenReturn(status);
|
||||
when(watch.setState(false, now)).thenReturn(false);
|
||||
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
|
||||
WatchStatus result = watcherService.setWatchState("_id", false);
|
||||
assertThat(result, not(sameInstance(status)));
|
||||
|
||||
verifyZeroInteractions(triggerService);
|
||||
verify(watchStore, never()).updateStatus(watch);
|
||||
}
|
||||
|
||||
public void testAckWatchNotAck() throws Exception {
|
||||
DateTime now = new DateTime(Clock.systemUTC().millis(), UTC);
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.ack(now)).thenReturn(false);
|
||||
WatchStatus status = new WatchStatus(now, emptyMap());
|
||||
when(watch.status()).thenReturn(status);
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
|
||||
WatchStatus result = watcherService.ackWatch("_id", Strings.EMPTY_ARRAY);
|
||||
assertThat(result, not(sameInstance(status)));
|
||||
|
||||
verify(watchStore, never()).updateStatus(watch);
|
||||
}
|
||||
|
||||
public void testAckWatchNoWatch() throws Exception {
|
||||
when(watchStore.get("_id")).thenReturn(null);
|
||||
expectThrows(IllegalArgumentException.class, () -> watcherService.ackWatch("_id", Strings.EMPTY_ARRAY));
|
||||
verify(watchStore, never()).updateStatus(any(Watch.class));
|
||||
}
|
||||
}
|
@ -52,14 +52,11 @@ public class ActionErrorIntegrationTests extends AbstractWatcherIntegrationTestC
|
||||
flush();
|
||||
|
||||
// there should be a single history record with a failure status for the action:
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
long count = watchRecordCount(QueryBuilders.boolQuery()
|
||||
.must(termsQuery("result.actions.id", "_action"))
|
||||
.must(termsQuery("result.actions.status", "failure")));
|
||||
assertThat(count, is(1L));
|
||||
}
|
||||
assertBusy(() -> {
|
||||
long count = watchRecordCount(QueryBuilders.boolQuery()
|
||||
.must(termsQuery("result.actions.id", "_action"))
|
||||
.must(termsQuery("result.actions.status", "failure")));
|
||||
assertThat(count, is(1L));
|
||||
});
|
||||
|
||||
// now we'll trigger the watch again and make sure that it's not throttled and instead
|
||||
@ -72,14 +69,11 @@ public class ActionErrorIntegrationTests extends AbstractWatcherIntegrationTestC
|
||||
flush();
|
||||
|
||||
// there should be a single history record with a failure status for the action:
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
long count = watchRecordCount(QueryBuilders.boolQuery()
|
||||
.must(termsQuery("result.actions.id", "_action"))
|
||||
.must(termsQuery("result.actions.status", "failure")));
|
||||
assertThat(count, is(2L));
|
||||
}
|
||||
assertBusy(() -> {
|
||||
long count = watchRecordCount(QueryBuilders.boolQuery()
|
||||
.must(termsQuery("result.actions.id", "_action"))
|
||||
.must(termsQuery("result.actions.status", "failure")));
|
||||
assertThat(count, is(2L));
|
||||
});
|
||||
|
||||
// now lets confirm that the ack status of the action is awaits_successful_execution
|
||||
|
@ -7,11 +7,11 @@ package org.elasticsearch.xpack.watcher.actions.throttler;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.common.http.HttpMethod;
|
||||
import org.elasticsearch.xpack.common.http.HttpRequestTemplate;
|
||||
import org.elasticsearch.xpack.common.text.TextTemplate;
|
||||
import org.elasticsearch.xpack.notification.email.EmailTemplate;
|
||||
import org.elasticsearch.xpack.watcher.actions.Action;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
|
||||
import org.elasticsearch.xpack.watcher.actions.email.EmailAction;
|
||||
import org.elasticsearch.xpack.watcher.actions.index.IndexAction;
|
||||
import org.elasticsearch.xpack.watcher.actions.logging.LoggingAction;
|
||||
@ -19,24 +19,23 @@ import org.elasticsearch.xpack.watcher.actions.webhook.WebhookAction;
|
||||
import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder;
|
||||
import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode;
|
||||
import org.elasticsearch.xpack.watcher.execution.ExecutionState;
|
||||
import org.elasticsearch.xpack.watcher.execution.ManualExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.history.WatchRecord;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchRequestBuilder;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.trigger.manual.ManualTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Clock;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@ -47,12 +46,15 @@ import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBu
|
||||
import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
|
||||
public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
public void testSingleActionAckThrottle() throws Exception {
|
||||
boolean useClientForAcking = randomBoolean();
|
||||
|
||||
@Override
|
||||
protected boolean timeWarped() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testSingleActionAckThrottle() throws Exception {
|
||||
WatchSourceBuilder watchSourceBuilder = watchBuilder()
|
||||
.trigger(schedule(interval("60m")));
|
||||
|
||||
@ -60,38 +62,37 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
Action.Builder action = availableAction.action();
|
||||
watchSourceBuilder.addAction("test_id", action);
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
refresh(Watch.INDEX);
|
||||
|
||||
assertThat(watcherClient().prepareGetWatch("_id").get().isFound(), equalTo(true));
|
||||
ManualExecutionContext ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS));
|
||||
WatchRecord watchRecord = executionService().execute(ctx);
|
||||
ExecuteWatchRequestBuilder executeWatchRequestBuilder = watcherClient().prepareExecuteWatch("_id")
|
||||
.setRecordExecution(true)
|
||||
.setActionMode("test_id", ActionExecutionMode.SIMULATE);
|
||||
|
||||
Map<String, Object> responseMap = executeWatchRequestBuilder.get().getRecordSource().getAsMap();
|
||||
String status = ObjectPath.eval("result.actions.0.status", responseMap);
|
||||
assertThat(status, equalTo(Action.Result.Status.SIMULATED.toString().toLowerCase(Locale.ROOT)));
|
||||
|
||||
timeWarp().clock().fastForward(TimeValue.timeValueSeconds(15));
|
||||
|
||||
assertThat(watchRecord.result().actionsResults().get("test_id").action().status(), equalTo(Action.Result.Status.SIMULATED));
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().fastForward(TimeValue.timeValueSeconds(1));
|
||||
}
|
||||
boolean ack = randomBoolean();
|
||||
if (ack) {
|
||||
if (useClientForAcking) {
|
||||
watcherClient().prepareAckWatch("_id").setActionIds("test_id").get();
|
||||
} else {
|
||||
watchService().ackWatch("_id", new String[] { "test_id" });
|
||||
}
|
||||
watcherClient().prepareAckWatch("_id").setActionIds("test_id").get();
|
||||
}
|
||||
ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS));
|
||||
watchRecord = executionService().execute(ctx);
|
||||
|
||||
executeWatchRequestBuilder = watcherClient().prepareExecuteWatch("_id")
|
||||
.setRecordExecution(true)
|
||||
.setActionMode("test_id", ActionExecutionMode.SIMULATE);
|
||||
responseMap = executeWatchRequestBuilder.get().getRecordSource().getAsMap();
|
||||
status = ObjectPath.eval("result.actions.0.status", responseMap);
|
||||
if (ack) {
|
||||
assertThat(watchRecord.result().actionsResults().get("test_id").action().status(), equalTo(Action.Result.Status.THROTTLED));
|
||||
assertThat(status, equalTo(Action.Result.Status.THROTTLED.toString().toLowerCase(Locale.ROOT)));
|
||||
} else {
|
||||
assertThat(watchRecord.result().actionsResults().get("test_id").action().status(), equalTo(Action.Result.Status.SIMULATED));
|
||||
assertThat(status, equalTo(Action.Result.Status.SIMULATED.toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRandomMultiActionAckThrottle() throws Exception {
|
||||
boolean useClientForAcking = randomBoolean();
|
||||
|
||||
WatchSourceBuilder watchSourceBuilder = watchBuilder()
|
||||
.trigger(schedule(interval("60m")));
|
||||
|
||||
@ -105,38 +106,35 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
|
||||
assertThat(watcherClient().getWatch(new GetWatchRequest("_id")).actionGet().isFound(), equalTo(true));
|
||||
ManualExecutionContext ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS));
|
||||
executionService().execute(ctx);
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
refresh(Watch.INDEX);
|
||||
executeWatch("_id");
|
||||
|
||||
for (String actionId : ackingActions) {
|
||||
if (useClientForAcking) {
|
||||
watcherClient().prepareAckWatch("_id").setActionIds(actionId).get();
|
||||
} else {
|
||||
watchService().ackWatch("_id", new String[]{actionId});
|
||||
}
|
||||
watcherClient().prepareAckWatch("_id").setActionIds(actionId).get();
|
||||
}
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().fastForwardSeconds(5);
|
||||
}
|
||||
timeWarp().clock().fastForwardSeconds(15);
|
||||
|
||||
ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS));
|
||||
WatchRecord watchRecord = executionService().execute(ctx);
|
||||
for (ActionWrapper.Result result : watchRecord.result().actionsResults().values()) {
|
||||
if (ackingActions.contains(result.id())) {
|
||||
assertThat(result.action().status(), equalTo(Action.Result.Status.THROTTLED));
|
||||
Map<String, Object> responseMap = executeWatch("_id");
|
||||
List<Map<String, String>> actions = ObjectPath.eval("result.actions", responseMap);
|
||||
for (Map<String, String> result : actions) {
|
||||
if (ackingActions.contains(result.get("id"))) {
|
||||
assertThat(result.get("status"), equalTo(Action.Result.Status.THROTTLED.toString().toLowerCase(Locale.ROOT)));
|
||||
} else {
|
||||
assertThat(result.action().status(), equalTo(Action.Result.Status.SIMULATED));
|
||||
assertThat(result.get("status"), equalTo(Action.Result.Status.SIMULATED.toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Object> executeWatch(String id) {
|
||||
return watcherClient().prepareExecuteWatch(id)
|
||||
.setRecordExecution(true)
|
||||
.setActionMode("_all", ActionExecutionMode.SIMULATE).get().getRecordSource().getAsMap();
|
||||
}
|
||||
|
||||
public void testDifferentThrottlePeriods() throws Exception {
|
||||
timeWarp().clock().setTime(DateTime.now(DateTimeZone.UTC));
|
||||
WatchSourceBuilder watchSourceBuilder = watchBuilder()
|
||||
.trigger(schedule(interval("60m")));
|
||||
|
||||
@ -145,46 +143,34 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
watchSourceBuilder.addAction("fifteen_sec_throttle", new TimeValue(15, TimeUnit.SECONDS),
|
||||
randomFrom(AvailableAction.values()).action());
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
assertThat(watcherClient().getWatch(new GetWatchRequest("_id")).actionGet().isFound(), equalTo(true));
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
refresh(Watch.INDEX);
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().setTime(new DateTime(DateTimeZone.UTC));
|
||||
timeWarp().clock().fastForwardSeconds(1);
|
||||
Map<String, Object> responseMap = executeWatch("_id");
|
||||
List<Map<String, String>> actions = ObjectPath.eval("result.actions", responseMap);
|
||||
for (Map<String, String> result : actions) {
|
||||
assertThat(result.get("status"), equalTo(Action.Result.Status.SIMULATED.toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
timeWarp().clock().fastForwardSeconds(1);
|
||||
|
||||
responseMap = executeWatch("_id");
|
||||
actions = ObjectPath.eval("result.actions", responseMap);
|
||||
for (Map<String, String> result : actions) {
|
||||
assertThat(result.get("status"), equalTo(Action.Result.Status.THROTTLED.toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
|
||||
ManualExecutionContext ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS));
|
||||
WatchRecord watchRecord = executionService().execute(ctx);
|
||||
long firstExecution = System.currentTimeMillis();
|
||||
for(ActionWrapper.Result actionResult : watchRecord.result().actionsResults().values()) {
|
||||
assertThat(actionResult.action().status(), equalTo(Action.Result.Status.SIMULATED));
|
||||
}
|
||||
ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS));
|
||||
watchRecord = executionService().execute(ctx);
|
||||
for(ActionWrapper.Result actionResult : watchRecord.result().actionsResults().values()) {
|
||||
assertThat(actionResult.action().status(), equalTo(Action.Result.Status.THROTTLED));
|
||||
}
|
||||
timeWarp().clock().fastForwardSeconds(10);
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().fastForwardSeconds(11);
|
||||
}
|
||||
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
ManualExecutionContext ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS));
|
||||
WatchRecord watchRecord = executionService().execute(ctx);
|
||||
for (ActionWrapper.Result actionResult : watchRecord.result().actionsResults().values()) {
|
||||
if ("ten_sec_throttle".equals(actionResult.id())) {
|
||||
assertThat(actionResult.action().status(), equalTo(Action.Result.Status.SIMULATED));
|
||||
} else {
|
||||
assertThat(actionResult.action().status(), equalTo(Action.Result.Status.THROTTLED));
|
||||
}
|
||||
}
|
||||
responseMap = executeWatch("_id");
|
||||
actions = ObjectPath.eval("result.actions", responseMap);
|
||||
for (Map<String, String> result : actions) {
|
||||
if ("ten_sec_throttle".equals(result.get("id"))) {
|
||||
assertThat(result.get("status"), equalTo(Action.Result.Status.SIMULATED.toString().toLowerCase(Locale.ROOT)));
|
||||
} else {
|
||||
assertThat(result.get("status"), equalTo(Action.Result.Status.THROTTLED.toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
}, 11000 - (System.currentTimeMillis() - firstExecution), TimeUnit.MILLISECONDS);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void testDefaultThrottlePeriod() throws Exception {
|
||||
@ -194,9 +180,8 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
AvailableAction availableAction = randomFrom(AvailableAction.values());
|
||||
watchSourceBuilder.addAction("default_global_throttle", availableAction.action());
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
refresh(Watch.INDEX);
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().setTime(new DateTime(DateTimeZone.UTC));
|
||||
@ -208,9 +193,9 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
.setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE)
|
||||
.setRecordExecution(true)
|
||||
.get();
|
||||
Map<String, Object> watchRecordMap = executeWatchResponse.getRecordSource().getAsMap();
|
||||
Object resultStatus = getExecutionStatus(watchRecordMap);
|
||||
assertThat(resultStatus.toString(), equalTo("simulated"));
|
||||
|
||||
String status = ObjectPath.eval("result.actions.0.status", executeWatchResponse.getRecordSource().getAsMap());
|
||||
assertThat(status, equalTo("simulated"));
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().fastForwardSeconds(1);
|
||||
@ -222,30 +207,25 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
.setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE)
|
||||
.setRecordExecution(true)
|
||||
.get();
|
||||
watchRecordMap = executeWatchResponse.getRecordSource().getAsMap();
|
||||
resultStatus = getExecutionStatus(watchRecordMap);
|
||||
assertThat(resultStatus.toString(), equalTo("throttled"));
|
||||
status = ObjectPath.eval("result.actions.0.status", executeWatchResponse.getRecordSource().getAsMap());
|
||||
assertThat(status, equalTo("throttled"));
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().fastForwardSeconds(5);
|
||||
}
|
||||
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
ExecuteWatchResponse executeWatchResponse = watcherClient().prepareExecuteWatch("_id")
|
||||
.setTriggerEvent(new ManualTriggerEvent("execute_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC))))
|
||||
.setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE)
|
||||
.setRecordExecution(true)
|
||||
.get();
|
||||
Map<String, Object> watchRecordMap = executeWatchResponse.getRecordSource().getAsMap();
|
||||
Object resultStatus = getExecutionStatus(watchRecordMap);
|
||||
assertThat(resultStatus.toString(), equalTo("simulated"));
|
||||
} catch (IOException ioe) {
|
||||
throw new ElasticsearchException("failed to execute", ioe);
|
||||
}
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
ExecuteWatchResponse executeWatchResponse1 = watcherClient().prepareExecuteWatch("_id")
|
||||
.setTriggerEvent(new ManualTriggerEvent("execute_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC))))
|
||||
.setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE)
|
||||
.setRecordExecution(true)
|
||||
.get();
|
||||
String currentStatus = ObjectPath.eval("result.actions.0.status", executeWatchResponse1.getRecordSource().getAsMap());
|
||||
assertThat(currentStatus, equalTo("simulated"));
|
||||
} catch (IOException ioe) {
|
||||
throw new ElasticsearchException("failed to execute", ioe);
|
||||
}
|
||||
}, 6, TimeUnit.SECONDS);
|
||||
}
|
||||
@ -258,9 +238,8 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
AvailableAction availableAction = randomFrom(AvailableAction.values());
|
||||
watchSourceBuilder.addAction("default_global_throttle", availableAction.action());
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet();
|
||||
refresh(Watch.INDEX);
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().setTime(new DateTime(DateTimeZone.UTC));
|
||||
@ -272,9 +251,8 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
.setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE)
|
||||
.setRecordExecution(true)
|
||||
.get();
|
||||
Map<String, Object> watchRecordMap = executeWatchResponse.getRecordSource().getAsMap();
|
||||
Object resultStatus = getExecutionStatus(watchRecordMap);
|
||||
assertThat(resultStatus.toString(), equalTo("simulated"));
|
||||
String status = ObjectPath.eval("result.actions.0.status", executeWatchResponse.getRecordSource().getAsMap());
|
||||
assertThat(status, equalTo("simulated"));
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().fastForwardSeconds(1);
|
||||
@ -286,30 +264,25 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
.setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE)
|
||||
.setRecordExecution(true)
|
||||
.get();
|
||||
watchRecordMap = executeWatchResponse.getRecordSource().getAsMap();
|
||||
resultStatus = getExecutionStatus(watchRecordMap);
|
||||
assertThat(resultStatus.toString(), equalTo("throttled"));
|
||||
status = ObjectPath.eval("result.actions.0.status", executeWatchResponse.getRecordSource().getAsMap());
|
||||
assertThat(status, equalTo("throttled"));
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().clock().fastForwardSeconds(20);
|
||||
}
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
//Since the default throttle period is 5 seconds but we have overridden the period in the watch this should trigger
|
||||
ExecuteWatchResponse executeWatchResponse = watcherClient().prepareExecuteWatch("_id")
|
||||
.setTriggerEvent(new ManualTriggerEvent("execute_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC))))
|
||||
.setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE)
|
||||
.setRecordExecution(true)
|
||||
.get();
|
||||
Map<String, Object> watchRecordMap = executeWatchResponse.getRecordSource().getAsMap();
|
||||
Object resultStatus = getExecutionStatus(watchRecordMap);
|
||||
assertThat(resultStatus.toString(), equalTo("simulated"));
|
||||
} catch (IOException ioe) {
|
||||
throw new ElasticsearchException("failed to execute", ioe);
|
||||
}
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
//Since the default throttle period is 5 seconds but we have overridden the period in the watch this should trigger
|
||||
ExecuteWatchResponse executeWatchResponse1 = watcherClient().prepareExecuteWatch("_id")
|
||||
.setTriggerEvent(new ManualTriggerEvent("execute_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC))))
|
||||
.setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE)
|
||||
.setRecordExecution(true)
|
||||
.get();
|
||||
String status1 = ObjectPath.eval("result.actions.0.status", executeWatchResponse1.getRecordSource().getAsMap());
|
||||
assertThat(status1, equalTo("simulated"));
|
||||
} catch (IOException ioe) {
|
||||
throw new ElasticsearchException("failed to execute", ioe);
|
||||
}
|
||||
}, 20, TimeUnit.SECONDS);
|
||||
}
|
||||
@ -317,7 +290,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
public void testFailingActionDoesGetThrottled() throws Exception {
|
||||
TimeValue throttlePeriod = new TimeValue(60, TimeUnit.MINUTES);
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id").setSource(watchBuilder()
|
||||
watcherClient().preparePutWatch("_id").setSource(watchBuilder()
|
||||
.trigger(new ScheduleTrigger(new IntervalSchedule(
|
||||
new IntervalSchedule.Interval(60, IntervalSchedule.Interval.Unit.MINUTES))))
|
||||
.defaultThrottlePeriod(throttlePeriod)
|
||||
@ -325,46 +298,50 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
// no DNS resolution here please
|
||||
.addAction("failing_hook", webhookAction(HttpRequestTemplate.builder("http://127.0.0.1/foobar", 80))))
|
||||
.get();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
refresh(Watch.INDEX);
|
||||
|
||||
ManualTriggerEvent triggerEvent = new ManualTriggerEvent("_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)));
|
||||
ManualExecutionContext.Builder ctxBuilder = ManualExecutionContext.builder(watchService().getWatch("_id"), true, triggerEvent,
|
||||
throttlePeriod);
|
||||
ctxBuilder.recordExecution(true);
|
||||
{
|
||||
Map<String, Object> responseMap = watcherClient().prepareExecuteWatch("_id")
|
||||
.setRecordExecution(true)
|
||||
.get().getRecordSource().getAsMap();
|
||||
|
||||
ManualExecutionContext ctx = ctxBuilder.build();
|
||||
WatchRecord watchRecord = executionService().execute(ctx);
|
||||
String state = ObjectPath.eval("state", responseMap);
|
||||
|
||||
assertThat(watchRecord.state(), equalTo(ExecutionState.EXECUTED));
|
||||
assertThat(watchRecord.result().actionsResults().get("logging").action().status(), equalTo(Action.Result.Status.SUCCESS));
|
||||
assertThat(watchRecord.result().actionsResults().get("failing_hook").action().status(), equalTo(Action.Result.Status.FAILURE));
|
||||
String firstId = ObjectPath.eval("result.actions.0.id", responseMap);
|
||||
String statusLogging, statusFailingHook;
|
||||
if ("logging".equals(firstId)) {
|
||||
statusLogging = ObjectPath.eval("result.actions.0.status", responseMap);
|
||||
statusFailingHook = ObjectPath.eval("result.actions.1.status", responseMap);
|
||||
} else {
|
||||
statusFailingHook = ObjectPath.eval("result.actions.0.status", responseMap);
|
||||
statusLogging = ObjectPath.eval("result.actions.1.status", responseMap);
|
||||
}
|
||||
|
||||
triggerEvent = new ManualTriggerEvent("_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)));
|
||||
ctxBuilder = ManualExecutionContext.builder(watchService().getWatch("_id"), true, triggerEvent, throttlePeriod);
|
||||
ctxBuilder.recordExecution(true);
|
||||
assertThat(state, equalTo(ExecutionState.EXECUTED.toString().toLowerCase(Locale.ROOT)));
|
||||
assertThat(statusLogging, equalTo(Action.Result.Status.SUCCESS.toString().toLowerCase(Locale.ROOT)));
|
||||
assertThat(statusFailingHook, equalTo(Action.Result.Status.FAILURE.toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
|
||||
ctx = ctxBuilder.build();
|
||||
watchRecord = executionService().execute(ctx);
|
||||
assertThat(watchRecord.result().actionsResults().get("logging").action().status(), equalTo(Action.Result.Status.THROTTLED));
|
||||
assertThat(watchRecord.result().actionsResults().get("failing_hook").action().status(), equalTo(Action.Result.Status.FAILURE));
|
||||
assertThat(watchRecord.state(), equalTo(ExecutionState.THROTTLED));
|
||||
}
|
||||
{
|
||||
Map<String, Object> responseMap = watcherClient().prepareExecuteWatch("_id")
|
||||
.setRecordExecution(true)
|
||||
.get().getRecordSource().getAsMap();
|
||||
String state = ObjectPath.eval("state", responseMap);
|
||||
|
||||
private String getExecutionStatus(Map<String, Object> watchRecordMap) {
|
||||
return ObjectPath.eval("result.actions.0.status", watchRecordMap);
|
||||
}
|
||||
String firstId = ObjectPath.eval("result.actions.0.id", responseMap);
|
||||
String statusLogging, statusFailingHook;
|
||||
if ("logging".equals(firstId)) {
|
||||
statusLogging = ObjectPath.eval("result.actions.0.status", responseMap);
|
||||
statusFailingHook = ObjectPath.eval("result.actions.1.status", responseMap);
|
||||
} else {
|
||||
statusFailingHook = ObjectPath.eval("result.actions.0.status", responseMap);
|
||||
statusLogging = ObjectPath.eval("result.actions.1.status", responseMap);
|
||||
}
|
||||
|
||||
private ManualExecutionContext getManualExecutionContext(TimeValue throttlePeriod) {
|
||||
ManualTriggerEvent triggerEvent = new ManualTriggerEvent("_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)));
|
||||
return ManualExecutionContext.builder(watchService().getWatch("_id"), true, triggerEvent, throttlePeriod)
|
||||
.executionTime(timeWarped() ? new DateTime(timeWarp().clock().millis()) : new DateTime(Clock.systemUTC().millis()))
|
||||
.allActionsMode(ActionExecutionMode.SIMULATE)
|
||||
.recordExecution(true)
|
||||
.build();
|
||||
assertThat(state, equalTo(ExecutionState.THROTTLED.toString().toLowerCase(Locale.ROOT)));
|
||||
assertThat(statusLogging, equalTo(Action.Result.Status.THROTTLED.toString().toLowerCase(Locale.ROOT)));
|
||||
assertThat(statusFailingHook, equalTo(Action.Result.Status.FAILURE.toString().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
}
|
||||
|
||||
enum AvailableAction {
|
||||
@ -386,7 +363,9 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
||||
WEBHOOK {
|
||||
@Override
|
||||
public Action.Builder action() throws Exception {
|
||||
HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder("foo.bar.com", 1234);
|
||||
HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder("localhost", 1234)
|
||||
.path("/")
|
||||
.method(HttpMethod.GET);
|
||||
return WebhookAction.builder(requestBuilder.build());
|
||||
}
|
||||
|
||||
|
@ -6,6 +6,7 @@
|
||||
package org.elasticsearch.xpack.watcher.execution;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
@ -28,6 +29,7 @@ import org.elasticsearch.xpack.watcher.history.HistoryStore;
|
||||
import org.elasticsearch.xpack.watcher.history.WatchRecord;
|
||||
import org.elasticsearch.xpack.watcher.input.ExecutableInput;
|
||||
import org.elasticsearch.xpack.watcher.input.Input;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.transform.ExecutableTransform;
|
||||
import org.elasticsearch.xpack.watcher.transform.Transform;
|
||||
@ -36,7 +38,6 @@ import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchLockService;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStatus;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.Before;
|
||||
@ -72,7 +73,6 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
private ExecutableInput input;
|
||||
private Input.Result inputResult;
|
||||
|
||||
private WatchStore watchStore;
|
||||
private TriggeredWatchStore triggeredWatchStore;
|
||||
private WatchExecutor executor;
|
||||
private HistoryStore historyStore;
|
||||
@ -80,6 +80,8 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
private ExecutionService executionService;
|
||||
private Clock clock;
|
||||
private ThreadPool threadPool;
|
||||
private WatcherClientProxy client;
|
||||
private Watch.Parser parser;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
@ -90,7 +92,6 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
when(inputResult.payload()).thenReturn(payload);
|
||||
when(input.execute(any(WatchExecutionContext.class), any(Payload.class))).thenReturn(inputResult);
|
||||
|
||||
watchStore = mock(WatchStore.class);
|
||||
triggeredWatchStore = mock(TriggeredWatchStore.class);
|
||||
historyStore = mock(HistoryStore.class);
|
||||
|
||||
@ -100,8 +101,11 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
watchLockService = mock(WatchLockService.class);
|
||||
clock = ClockMock.frozen();
|
||||
threadPool = mock(ThreadPool.class);
|
||||
executionService = new ExecutionService(Settings.EMPTY, historyStore, triggeredWatchStore, executor, watchStore,
|
||||
watchLockService, clock, threadPool);
|
||||
|
||||
client = mock(WatcherClientProxy.class);
|
||||
parser = mock(Watch.Parser.class);
|
||||
executionService = new ExecutionService(Settings.EMPTY, historyStore, triggeredWatchStore, executor, watchLockService, clock,
|
||||
threadPool, parser, client);
|
||||
|
||||
ClusterState clusterState = mock(ClusterState.class);
|
||||
when(triggeredWatchStore.loadTriggeredWatches(clusterState)).thenReturn(new ArrayList<>());
|
||||
@ -114,7 +118,9 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.id()).thenReturn("_id");
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
GetResponse getResponse = mock(GetResponse.class);
|
||||
when(getResponse.isExists()).thenReturn(true);
|
||||
when(client.getWatch("_id")).thenReturn(getResponse);
|
||||
|
||||
DateTime now = new DateTime(clock.millis());
|
||||
ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now);
|
||||
@ -205,9 +211,12 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
Releasable releasable = mock(Releasable.class);
|
||||
when(watchLockService.acquire("_id")).thenReturn(releasable);
|
||||
|
||||
GetResponse getResponse = mock(GetResponse.class);
|
||||
when(getResponse.isExists()).thenReturn(true);
|
||||
when(client.getWatch("_id")).thenReturn(getResponse);
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.id()).thenReturn("_id");
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
|
||||
DateTime now = new DateTime(clock.millis());
|
||||
ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now);
|
||||
@ -277,7 +286,9 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.id()).thenReturn("_id");
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
GetResponse getResponse = mock(GetResponse.class);
|
||||
when(getResponse.isExists()).thenReturn(true);
|
||||
when(client.getWatch("_id")).thenReturn(getResponse);
|
||||
|
||||
DateTime now = new DateTime(clock.millis());
|
||||
ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now);
|
||||
@ -343,7 +354,9 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.id()).thenReturn("_id");
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
GetResponse getResponse = mock(GetResponse.class);
|
||||
when(getResponse.isExists()).thenReturn(true);
|
||||
when(client.getWatch("_id")).thenReturn(getResponse);
|
||||
|
||||
DateTime now = new DateTime(clock.millis());
|
||||
ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now);
|
||||
@ -408,7 +421,9 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.id()).thenReturn("_id");
|
||||
when(watchStore.get("_id")).thenReturn(watch);
|
||||
GetResponse getResponse = mock(GetResponse.class);
|
||||
when(getResponse.isExists()).thenReturn(true);
|
||||
when(client.getWatch("_id")).thenReturn(getResponse);
|
||||
|
||||
DateTime now = new DateTime(clock.millis());
|
||||
ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now);
|
||||
@ -773,7 +788,11 @@ public class ExecutionServiceTests extends ESTestCase {
|
||||
Watch watch = mock(Watch.class);
|
||||
when(watch.id()).thenReturn("foo");
|
||||
when(watch.nonce()).thenReturn(1L);
|
||||
when(watchStore.get(any())).thenReturn(watch);
|
||||
GetResponse getResponse = mock(GetResponse.class);
|
||||
when(getResponse.isExists()).thenReturn(true);
|
||||
when(getResponse.getId()).thenReturn("foo");
|
||||
when(client.getWatch(any())).thenReturn(getResponse);
|
||||
when(parser.parseWithSecrets(eq("foo"), eq(true), any(), any())).thenReturn(watch);
|
||||
|
||||
// execute needs to fail as well as storing the history
|
||||
doThrow(new EsRejectedExecutionException()).when(executor).execute(any());
|
||||
|
@ -5,54 +5,40 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher.execution;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.xpack.watcher.WatcherService;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionStatus;
|
||||
import org.elasticsearch.xpack.watcher.actions.logging.LoggingAction;
|
||||
import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder;
|
||||
import org.elasticsearch.xpack.watcher.condition.AlwaysCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.NeverCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.ScriptCondition;
|
||||
import org.elasticsearch.xpack.watcher.history.HistoryStore;
|
||||
import org.elasticsearch.xpack.watcher.history.WatchRecord;
|
||||
import org.elasticsearch.xpack.watcher.input.simple.SimpleInput;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.delete.DeleteWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchRequestBuilder;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.manual.ManualTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.time.Clock;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction;
|
||||
import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
@ -61,13 +47,9 @@ import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.cron;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public class ManualExecutionTests extends AbstractWatcherIntegrationTestCase {
|
||||
@ -125,79 +107,44 @@ public class ManualExecutionTests extends AbstractWatcherIntegrationTestCase {
|
||||
.condition(conditionAlwaysTrue ? AlwaysCondition.INSTANCE : NeverCondition.INSTANCE)
|
||||
.addAction("log", loggingAction("foobar"));
|
||||
|
||||
ManualExecutionContext.Builder ctxBuilder;
|
||||
Watch parsedWatch = null;
|
||||
ManualTriggerEvent triggerEvent = new ManualTriggerEvent("_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)));
|
||||
if (recordExecution) {
|
||||
PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchBuilder)).actionGet();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
assertThat(watcherClient().getWatch(new GetWatchRequest("_id")).actionGet().isFound(), equalTo(true));
|
||||
//If we are persisting the state we need to use the exact watch that is in memory
|
||||
ctxBuilder = ManualExecutionContext.builder(watchService().getWatch("_id"), true, triggerEvent, timeValueSeconds(5));
|
||||
} else {
|
||||
parsedWatch = watchParser().parse("_id", false, watchBuilder.buildAsBytes(XContentType.JSON));
|
||||
ctxBuilder = ManualExecutionContext.builder(parsedWatch, false, triggerEvent, timeValueSeconds(5));
|
||||
}
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchBuilder)).actionGet();
|
||||
|
||||
if (ignoreCondition) {
|
||||
ctxBuilder.withCondition(AlwaysCondition.RESULT_INSTANCE);
|
||||
}
|
||||
|
||||
ctxBuilder.recordExecution(recordExecution);
|
||||
|
||||
|
||||
if ("_all".equals(action)) {
|
||||
ctxBuilder.allActionsMode(ActionExecutionMode.SIMULATE);
|
||||
} else {
|
||||
ctxBuilder.actionMode(action, ActionExecutionMode.SIMULATE);
|
||||
}
|
||||
|
||||
ManualExecutionContext ctx = ctxBuilder.build();
|
||||
ExecuteWatchRequestBuilder executeWatchRequestBuilder = watcherClient().prepareExecuteWatch("_id");
|
||||
executeWatchRequestBuilder.setIgnoreCondition(ignoreCondition);
|
||||
executeWatchRequestBuilder.setRecordExecution(recordExecution);
|
||||
executeWatchRequestBuilder.setActionMode(action, ActionExecutionMode.SIMULATE);
|
||||
|
||||
refresh();
|
||||
long oldRecordCount = docCount(HistoryStore.INDEX_PREFIX_WITH_TEMPLATE + "*", HistoryStore.DOC_TYPE, matchAllQuery());
|
||||
|
||||
WatchRecord watchRecord = executionService().execute(ctx);
|
||||
ExecuteWatchResponse executeWatchResponse = executeWatchRequestBuilder.get();
|
||||
Map<String, Object> responseMap = executeWatchResponse.getRecordSource().getAsMap();
|
||||
|
||||
refresh();
|
||||
|
||||
long newRecordCount = docCount(HistoryStore.INDEX_PREFIX_WITH_TEMPLATE + "*", HistoryStore.DOC_TYPE, matchAllQuery());
|
||||
long expectedCount = oldRecordCount + (recordExecution ? 1 : 0);
|
||||
|
||||
assertThat("the expected count of history records should be [" + expectedCount + "]", newRecordCount, equalTo(expectedCount));
|
||||
|
||||
List<Map<String, Object>> actions = ObjectPath.eval("result.actions", responseMap);
|
||||
if (ignoreCondition) {
|
||||
assertThat("The action should have run", watchRecord.result().actionsResults().size(), equalTo(1));
|
||||
assertThat("The action should have run", actions.size(), equalTo(1));
|
||||
} else if (!conditionAlwaysTrue) {
|
||||
assertThat("The action should not have run", watchRecord.result().actionsResults().size(), equalTo(0));
|
||||
assertThat("The action should not have run", actions.size(), equalTo(0));
|
||||
}
|
||||
|
||||
if ((ignoreCondition || conditionAlwaysTrue) && action == null) {
|
||||
assertThat("The action should have run non simulated", watchRecord.result().actionsResults().get("log").action(),
|
||||
not(instanceOf(LoggingAction.Result.Simulated.class)) );
|
||||
if (ignoreCondition || conditionAlwaysTrue) {
|
||||
assertThat("The action should have run simulated", actions.get(0).get("status"), is("simulated"));
|
||||
}
|
||||
|
||||
if ((ignoreCondition || conditionAlwaysTrue) && action != null ) {
|
||||
assertThat("The action should have run simulated",
|
||||
watchRecord.result().actionsResults().get("log").action(), instanceOf(LoggingAction.Result.Simulated.class));
|
||||
}
|
||||
|
||||
Watch testWatch = watchService().getWatch("_id");
|
||||
if (recordExecution) {
|
||||
refresh();
|
||||
GetWatchResponse response = watcherClient().getWatch(new GetWatchRequest("_id")).actionGet();
|
||||
if (ignoreCondition || conditionAlwaysTrue) {
|
||||
assertThat(testWatch.status().actionStatus("log").ackStatus().state(), equalTo(ActionStatus.AckStatus.State.ACKABLE));
|
||||
GetWatchResponse response = watcherClient().getWatch(new GetWatchRequest("_id")).actionGet();
|
||||
assertThat(response.getStatus().actionStatus("log").ackStatus().state(), equalTo(ActionStatus.AckStatus.State.ACKABLE));
|
||||
assertThat(response.getStatus().actionStatus("log").ackStatus().state(), is(ActionStatus.AckStatus.State.ACKABLE));
|
||||
} else {
|
||||
assertThat(testWatch.status().actionStatus("log").ackStatus().state(),
|
||||
equalTo(ActionStatus.AckStatus.State.AWAITS_SUCCESSFUL_EXECUTION));
|
||||
assertThat(response.getStatus().actionStatus("log").ackStatus().state(),
|
||||
is(ActionStatus.AckStatus.State.AWAITS_SUCCESSFUL_EXECUTION));
|
||||
}
|
||||
} else {
|
||||
assertThat(parsedWatch.status().actionStatus("log").ackStatus().state(),
|
||||
equalTo(ActionStatus.AckStatus.State.AWAITS_SUCCESSFUL_EXECUTION));
|
||||
}
|
||||
}
|
||||
|
||||
@ -214,7 +161,8 @@ public class ManualExecutionTests extends AbstractWatcherIntegrationTestCase {
|
||||
builder.setRecordExecution(false);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setTriggerEvent(new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)));
|
||||
DateTime now = new DateTime(DateTimeZone.UTC);
|
||||
builder.setTriggerEvent(new ScheduleTriggerEvent(now, now));
|
||||
}
|
||||
|
||||
ExecuteWatchResponse executeWatchResponse = builder.get();
|
||||
@ -231,16 +179,13 @@ public class ManualExecutionTests extends AbstractWatcherIntegrationTestCase {
|
||||
.condition(AlwaysCondition.INSTANCE)
|
||||
.addAction("log", loggingAction("foobar"));
|
||||
|
||||
try {
|
||||
watcherClient().prepareExecuteWatch()
|
||||
.setWatchSource(watchBuilder)
|
||||
.setRecordExecution(true)
|
||||
.setTriggerEvent(new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)))
|
||||
.get();
|
||||
fail();
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("the execution of an inline watch cannot be recorded"));
|
||||
}
|
||||
ActionRequestValidationException e = expectThrows(ActionRequestValidationException.class, () ->
|
||||
watcherClient().prepareExecuteWatch()
|
||||
.setWatchSource(watchBuilder)
|
||||
.setRecordExecution(true)
|
||||
.setTriggerEvent(new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)))
|
||||
.get());
|
||||
assertThat(e.getMessage(), containsString("the execution of an inline watch cannot be recorded"));
|
||||
}
|
||||
|
||||
public void testExecutionWithInlineWatch_withWatchId() throws Exception {
|
||||
@ -269,10 +214,8 @@ public class ManualExecutionTests extends AbstractWatcherIntegrationTestCase {
|
||||
.trigger(schedule(cron("0 0 0 1 * ? 2099")))
|
||||
.addAction("log", loggingAction("foobar"));
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchBuilder)).actionGet();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
assertThat(watcherClient().getWatch(new GetWatchRequest("_id")).actionGet().isFound(), equalTo(true));
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchBuilder)).actionGet();
|
||||
refresh(Watch.INDEX);
|
||||
|
||||
Map<String, Object> map1 = new HashMap<>();
|
||||
map1.put("foo", "bar");
|
||||
@ -280,28 +223,24 @@ public class ManualExecutionTests extends AbstractWatcherIntegrationTestCase {
|
||||
Map<String, Object> map2 = new HashMap<>();
|
||||
map2.put("foo", map1);
|
||||
|
||||
ManualTriggerEvent triggerEvent = new ManualTriggerEvent("_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)));
|
||||
ManualExecutionContext.Builder ctxBuilder1 = ManualExecutionContext.builder(watchService().getWatch("_id"), true, triggerEvent,
|
||||
timeValueSeconds(5));
|
||||
ctxBuilder1.actionMode("_all", ActionExecutionMode.SIMULATE);
|
||||
ExecuteWatchResponse firstResponse = watcherClient().prepareExecuteWatch("_id")
|
||||
.setActionMode("_all", ActionExecutionMode.SIMULATE)
|
||||
.setAlternativeInput(map1)
|
||||
.setRecordExecution(true)
|
||||
.setTriggerEvent(new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)))
|
||||
.get();
|
||||
|
||||
ctxBuilder1.withInput(new SimpleInput.Result(new Payload.Simple(map1)));
|
||||
ctxBuilder1.recordExecution(true);
|
||||
ExecuteWatchResponse secondResponse = watcherClient().prepareExecuteWatch("_id")
|
||||
.setActionMode("_all", ActionExecutionMode.SIMULATE)
|
||||
.setAlternativeInput(map2)
|
||||
.setRecordExecution(true)
|
||||
.setTriggerEvent(new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)))
|
||||
.get();
|
||||
|
||||
WatchRecord watchRecord1 = executionService().execute(ctxBuilder1.build());
|
||||
|
||||
ManualExecutionContext.Builder ctxBuilder2 = ManualExecutionContext.builder(watchService().getWatch("_id"), true, triggerEvent,
|
||||
timeValueSeconds(5));
|
||||
ctxBuilder2.actionMode("_all", ActionExecutionMode.SIMULATE);
|
||||
|
||||
ctxBuilder2.withInput(new SimpleInput.Result(new Payload.Simple(map2)));
|
||||
ctxBuilder2.recordExecution(true);
|
||||
|
||||
WatchRecord watchRecord2 = executionService().execute(ctxBuilder2.build());
|
||||
|
||||
assertThat(watchRecord1.result().inputResult().payload().data().get("foo").toString(), equalTo("bar"));
|
||||
assertThat(watchRecord2.result().inputResult().payload().data().get("foo"), instanceOf(Map.class));
|
||||
String firstPayload = ObjectPath.eval("result.input.payload.foo", firstResponse.getRecordSource().getAsMap());
|
||||
assertThat(firstPayload, is("bar"));
|
||||
Map<String, String> secondPayload = ObjectPath.eval("result.input.payload", secondResponse.getRecordSource().getAsMap());
|
||||
assertThat(secondPayload, instanceOf(Map.class));
|
||||
}
|
||||
|
||||
public void testExecutionRequestDefaults() throws Exception {
|
||||
@ -309,7 +248,7 @@ public class ManualExecutionTests extends AbstractWatcherIntegrationTestCase {
|
||||
.trigger(schedule(cron("0 0 0 1 * ? 2099")))
|
||||
.input(simpleInput("foo", "bar"))
|
||||
.condition(NeverCondition.INSTANCE)
|
||||
.defaultThrottlePeriod(new TimeValue(1, TimeUnit.HOURS))
|
||||
.defaultThrottlePeriod(TimeValue.timeValueHours(1))
|
||||
.addAction("log", loggingAction("foobar"));
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchBuilder)).actionGet();
|
||||
|
||||
@ -328,7 +267,7 @@ public class ManualExecutionTests extends AbstractWatcherIntegrationTestCase {
|
||||
.trigger(schedule(cron("0 0 0 1 * ? 2099")))
|
||||
.input(simpleInput("foo", "bar"))
|
||||
.condition(AlwaysCondition.INSTANCE)
|
||||
.defaultThrottlePeriod(new TimeValue(1, TimeUnit.HOURS))
|
||||
.defaultThrottlePeriod(TimeValue.timeValueHours(1))
|
||||
.addAction("log", loggingAction("foobar"));
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchBuilder)).actionGet();
|
||||
|
||||
@ -355,90 +294,13 @@ public class ManualExecutionTests extends AbstractWatcherIntegrationTestCase {
|
||||
.condition(new ScriptCondition(script))
|
||||
.addAction("log", loggingAction("foobar"));
|
||||
|
||||
Watch watch = watchParser().parse("_id", false, watchBuilder.buildAsBytes(XContentType.JSON));
|
||||
ManualExecutionContext.Builder ctxBuilder = ManualExecutionContext.builder(watch, false, new ManualTriggerEvent("_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC))),
|
||||
new TimeValue(1, TimeUnit.HOURS));
|
||||
WatchRecord record = executionService().execute(ctxBuilder.build());
|
||||
assertThat(record.result().executionDurationMs(), greaterThanOrEqualTo(100L));
|
||||
watcherClient().preparePutWatch("_id").setSource(watchBuilder).get();
|
||||
refresh(Watch.INDEX);
|
||||
|
||||
ScheduleTriggerEvent triggerEvent = new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC));
|
||||
ExecuteWatchResponse executeWatchResponse = watcherClient().prepareExecuteWatch("_id").setTriggerEvent(triggerEvent).get();
|
||||
Integer duration = ObjectPath.eval("result.execution_duration", executeWatchResponse.getRecordSource().getAsMap());
|
||||
|
||||
assertThat(duration, greaterThanOrEqualTo(100));
|
||||
}
|
||||
|
||||
public void testForceDeletionOfLongRunningWatch() throws Exception {
|
||||
Script script = new Script(ScriptType.INLINE, WATCHER_LANG, "sleep", singletonMap("millis", 10000L));
|
||||
WatchSourceBuilder watchBuilder = watchBuilder()
|
||||
.trigger(schedule(cron("0 0 0 1 * ? 2099")))
|
||||
.input(simpleInput("foo", "bar"))
|
||||
.condition(new ScriptCondition(script))
|
||||
.defaultThrottlePeriod(new TimeValue(1, TimeUnit.HOURS))
|
||||
.addAction("log", loggingAction("foobar"));
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchBuilder)).actionGet();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
assertThat(watcherClient().getWatch(new GetWatchRequest("_id")).actionGet().isFound(), equalTo(true));
|
||||
|
||||
int numberOfThreads = scaledRandomIntBetween(1, 5);
|
||||
CountDownLatch startLatch = new CountDownLatch(1);
|
||||
|
||||
List<Thread> threads = new ArrayList<>();
|
||||
for (int i = 0; i < numberOfThreads; ++i) {
|
||||
threads.add(new Thread(new ExecutionRunner(watchService(), executionService(), "_id", startLatch)));
|
||||
}
|
||||
|
||||
for (Thread thread : threads) {
|
||||
thread.start();
|
||||
}
|
||||
DeleteWatchResponse deleteWatchResponse = watcherClient().prepareDeleteWatch("_id").get();
|
||||
assertThat(deleteWatchResponse.isFound(), is(true));
|
||||
|
||||
deleteWatchResponse = watcherClient().prepareDeleteWatch("_id").get();
|
||||
assertThat(deleteWatchResponse.isFound(), is(false));
|
||||
|
||||
startLatch.countDown();
|
||||
|
||||
long startJoin = System.currentTimeMillis();
|
||||
for (Thread thread : threads) {
|
||||
thread.join(30_000L);
|
||||
assertFalse(thread.isAlive());
|
||||
}
|
||||
long endJoin = System.currentTimeMillis();
|
||||
TimeValue tv = new TimeValue(10 * (numberOfThreads+1), TimeUnit.SECONDS);
|
||||
assertThat("Shouldn't take longer than [" + tv.getSeconds() + "] seconds for all the threads to stop", (endJoin - startJoin),
|
||||
lessThan(tv.getMillis()));
|
||||
}
|
||||
|
||||
public static class ExecutionRunner implements Runnable {
|
||||
|
||||
final WatcherService watcherService;
|
||||
final ExecutionService executionService;
|
||||
final String watchId;
|
||||
final CountDownLatch startLatch;
|
||||
final ManualExecutionContext.Builder ctxBuilder;
|
||||
|
||||
public ExecutionRunner(WatcherService watcherService, ExecutionService executionService, String watchId,
|
||||
CountDownLatch startLatch) {
|
||||
this.watcherService = watcherService;
|
||||
this.executionService = executionService;
|
||||
this.watchId = watchId;
|
||||
this.startLatch = startLatch;
|
||||
ManualTriggerEvent triggerEvent = new ManualTriggerEvent(watchId,
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC)));
|
||||
ctxBuilder = ManualExecutionContext.builder(watcherService.getWatch(watchId), true, triggerEvent, timeValueSeconds(5));
|
||||
ctxBuilder.recordExecution(true);
|
||||
ctxBuilder.actionMode("_all", ActionExecutionMode.FORCE_EXECUTE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
startLatch.await();
|
||||
WatchRecord record = executionService.execute(ctxBuilder.build());
|
||||
assertThat(record, notNullValue());
|
||||
assertThat(record.state(), is(ExecutionState.NOT_EXECUTED_WATCH_MISSING));
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchException("Failure mode execution of [{}] failed in an unexpected way", e, watchId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -11,14 +11,13 @@ import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.transport.Netty4Plugin;
|
||||
import org.elasticsearch.xpack.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.xpack.watcher.condition.CompareCondition;
|
||||
import org.elasticsearch.xpack.watcher.history.HistoryStore;
|
||||
import org.elasticsearch.xpack.common.http.HttpRequestTemplate;
|
||||
import org.elasticsearch.xpack.common.http.auth.basic.BasicAuth;
|
||||
import org.elasticsearch.xpack.common.text.TextTemplate;
|
||||
import org.elasticsearch.xpack.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.xpack.watcher.condition.CompareCondition;
|
||||
import org.elasticsearch.xpack.watcher.history.HistoryStore;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
@ -57,7 +56,6 @@ public class HttpInputIntegrationTests extends AbstractWatcherIntegrationTestCas
|
||||
return plugins;
|
||||
}
|
||||
|
||||
@TestLogging("org.elasticsearch.watcher.support.http:TRACE")
|
||||
public void testHttpInput() throws Exception {
|
||||
createIndex("index");
|
||||
client().prepareIndex("index", "type", "id").setSource("{}").setRefreshPolicy(IMMEDIATE).get();
|
||||
@ -71,7 +69,7 @@ public class HttpInputIntegrationTests extends AbstractWatcherIntegrationTestCas
|
||||
.body(jsonBuilder().startObject().field("size", 1).endObject().string())
|
||||
.auth(securityEnabled() ? new BasicAuth("test", "changeme".toCharArray()) : null)))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L))
|
||||
.addAction("_id", loggingAction("watch [{{ctx.watch_id}}] matched")))
|
||||
.addAction("_id", loggingAction("anything")))
|
||||
.get();
|
||||
|
||||
if (timeWarped()) {
|
||||
@ -90,7 +88,7 @@ public class HttpInputIntegrationTests extends AbstractWatcherIntegrationTestCas
|
||||
.path("/_cluster/stats")
|
||||
.auth(securityEnabled() ? new BasicAuth("test", "changeme".toCharArray()) : null)))
|
||||
.condition(new CompareCondition("ctx.payload.nodes.count.total", CompareCondition.Op.GTE, 1L))
|
||||
.addAction("_id", loggingAction("watch [{{ctx.watch_id}}] matched")))
|
||||
.addAction("_id", loggingAction("anything")))
|
||||
.get();
|
||||
|
||||
assertTrue(putWatchResponse.isCreated());
|
||||
@ -101,7 +99,6 @@ public class HttpInputIntegrationTests extends AbstractWatcherIntegrationTestCas
|
||||
assertWatchWithMinimumPerformedActionsCount("_name", 1, false);
|
||||
}
|
||||
|
||||
@TestLogging("org.elasticsearch.watcher.support.http:TRACE")
|
||||
public void testInputFiltering() throws Exception {
|
||||
WatcherClient watcherClient = watcherClient();
|
||||
createIndex("idx");
|
||||
|
@ -18,10 +18,12 @@ import org.elasticsearch.xpack.watcher.transport.actions.stats.WatcherStatsRespo
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule;
|
||||
@ -56,6 +58,8 @@ public class BasicSecurityTests extends AbstractWatcherIntegrationTestCase {
|
||||
}
|
||||
|
||||
public void testWatcherMonitorRole() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate(Watch.INDEX));
|
||||
|
||||
// stats and get watch apis require at least monitor role:
|
||||
String token = basicAuthHeaderValue("test", new SecuredString("changeme".toCharArray()));
|
||||
try {
|
||||
|
@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
@ -69,11 +70,10 @@ import org.elasticsearch.xpack.watcher.execution.ExecutionState;
|
||||
import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore;
|
||||
import org.elasticsearch.xpack.watcher.history.HistoryStore;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.trigger.ScheduleTriggerEngineMock;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerService;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
@ -91,6 +91,7 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
@ -128,8 +129,8 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase
|
||||
protected TestCluster buildTestCluster(Scope scope, long seed) throws IOException {
|
||||
if (securityEnabled == null) {
|
||||
securityEnabled = enableSecurity();
|
||||
scheduleEngineName = randomFrom("ticker", "scheduler");
|
||||
}
|
||||
scheduleEngineName = randomFrom("ticker", "scheduler");
|
||||
return super.buildTestCluster(scope, seed);
|
||||
}
|
||||
|
||||
@ -327,7 +328,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase
|
||||
|
||||
CreateIndexResponse response = client().admin().indices().prepareCreate(newIndex)
|
||||
.setCause("Index to test aliases with .watches index")
|
||||
.addAlias(new Alias(WatchStore.INDEX))
|
||||
.addAlias(new Alias(Watch.INDEX))
|
||||
.setSettings((Map<String, Object>) parserMap.get("settings"))
|
||||
.addMapping("watch", (Map<String, Object>) allMappings.get("watch"))
|
||||
.get();
|
||||
@ -395,18 +396,6 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase
|
||||
return getInstanceFromMaster(Watch.Parser.class);
|
||||
}
|
||||
|
||||
protected ExecutionService executionService() {
|
||||
return getInstanceFromMaster(ExecutionService.class);
|
||||
}
|
||||
|
||||
protected WatcherService watchService() {
|
||||
return getInstanceFromMaster(WatcherService.class);
|
||||
}
|
||||
|
||||
protected TriggerService triggerService() {
|
||||
return getInstanceFromMaster(TriggerService.class);
|
||||
}
|
||||
|
||||
public AbstractWatcherIntegrationTestCase() {
|
||||
super();
|
||||
}
|
||||
|
@ -9,14 +9,12 @@ import org.elasticsearch.common.Randomness;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.watcher.trigger.Trigger;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEngine;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleRegistry;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEngine;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.engine.BaseTriggerEngineTestCase;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.engine.SchedulerScheduleTriggerEngine;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleTriggerEngine;
|
||||
|
||||
@ -59,7 +57,7 @@ public class ScheduleEngineTriggerBenchmark {
|
||||
.build();
|
||||
List<TriggerEngine.Job> jobs = new ArrayList<>(numWatches);
|
||||
for (int i = 0; i < numWatches; i++) {
|
||||
jobs.add(new SimpleJob("job_" + i, interval(interval + "s")));
|
||||
jobs.add(new BaseTriggerEngineTestCase.SimpleJob("job_" + i, interval(interval + "s")));
|
||||
}
|
||||
ScheduleRegistry scheduleRegistry = new ScheduleRegistry(emptySet());
|
||||
List<String> impls = new ArrayList<>(Arrays.asList(new String[]{"schedule", "ticker"}));
|
||||
@ -143,28 +141,6 @@ public class ScheduleEngineTriggerBenchmark {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class SimpleJob implements TriggerEngine.Job {
|
||||
|
||||
private final String name;
|
||||
private final ScheduleTrigger trigger;
|
||||
|
||||
public SimpleJob(String name, Schedule schedule) {
|
||||
this.name = name;
|
||||
this.trigger = new ScheduleTrigger(schedule);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Trigger trigger() {
|
||||
return trigger;
|
||||
}
|
||||
}
|
||||
|
||||
static class Stats {
|
||||
|
||||
final String implementation;
|
||||
|
@ -33,7 +33,7 @@ import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder;
|
||||
import org.elasticsearch.xpack.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.xpack.watcher.condition.ScriptCondition;
|
||||
import org.elasticsearch.xpack.watcher.history.HistoryStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -106,7 +106,7 @@ public class WatcherScheduleEngineBenchmark {
|
||||
System.out.println("===============> indexing [" + numWatches + "] watches");
|
||||
for (int i = 0; i < numWatches; i++) {
|
||||
final String id = "_id_" + i;
|
||||
client.prepareIndex(WatchStore.INDEX, WatchStore.DOC_TYPE, id)
|
||||
client.prepareIndex(Watch.INDEX, Watch.DOC_TYPE, id)
|
||||
.setSource(new WatchSourceBuilder()
|
||||
.trigger(schedule(interval(interval + "s")))
|
||||
.input(searchInput(templateRequest(new SearchSourceBuilder(), "test")))
|
||||
@ -115,7 +115,7 @@ public class WatcherScheduleEngineBenchmark {
|
||||
.buildAsBytes(XContentType.JSON)
|
||||
).get();
|
||||
}
|
||||
client.admin().indices().prepareFlush(WatchStore.INDEX, "test").get();
|
||||
client.admin().indices().prepareFlush(Watch.INDEX, "test").get();
|
||||
System.out.println("===============> indexed [" + numWatches + "] watches");
|
||||
}
|
||||
}
|
||||
@ -137,7 +137,7 @@ public class WatcherScheduleEngineBenchmark {
|
||||
try (final Client client = node.client()) {
|
||||
client.admin().cluster().prepareHealth().setWaitForNodes("2").get();
|
||||
client.admin().indices().prepareDelete(HistoryStore.INDEX_PREFIX_WITH_TEMPLATE + "*").get();
|
||||
client.admin().cluster().prepareHealth(WatchStore.INDEX, "test").setWaitForYellowStatus().get();
|
||||
client.admin().cluster().prepareHealth(Watch.INDEX, "test").setWaitForYellowStatus().get();
|
||||
|
||||
Clock clock = node.injector().getInstance(Clock.class);
|
||||
WatcherClient watcherClient = node.injector().getInstance(WatcherClient.class);
|
||||
|
@ -13,6 +13,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder;
|
||||
import org.elasticsearch.xpack.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.xpack.watcher.condition.AlwaysCondition;
|
||||
@ -27,11 +28,10 @@ import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.Schedules;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.support.MonthTimes;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.support.WeekTimes;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.time.Clock;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
@ -61,6 +61,11 @@ import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean enableSecurity() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean timeWarped() {
|
||||
return true;
|
||||
@ -130,7 +135,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
||||
assertThat(deleteWatchResponse.isFound(), is(true));
|
||||
|
||||
refresh();
|
||||
assertHitCount(client().prepareSearch(WatchStore.INDEX).setSize(0).get(), 0L);
|
||||
assertHitCount(client().prepareSearch(Watch.INDEX).setSize(0).get(), 0L);
|
||||
|
||||
// Deleting the same watch for the second time
|
||||
deleteWatchResponse = watcherClient.prepareDeleteWatch("_name").get();
|
||||
@ -163,7 +168,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
||||
// In watch store we fail parsing if an watch contains undefined fields.
|
||||
}
|
||||
try {
|
||||
client().prepareIndex(WatchStore.INDEX, WatchStore.DOC_TYPE, "_name")
|
||||
client().prepareIndex(Watch.INDEX, Watch.DOC_TYPE, "_name")
|
||||
.setSource(watchSource)
|
||||
.get();
|
||||
fail();
|
||||
@ -247,6 +252,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
||||
equalTo(before));
|
||||
}
|
||||
|
||||
@TestLogging("org.elasticsearch.xpack.watcher.trigger:DEBUG")
|
||||
public void testConditionSearchWithSource() throws Exception {
|
||||
SearchSourceBuilder searchSourceBuilder = searchSource().query(matchQuery("level", "a"));
|
||||
testConditionSearch(templateRequest(searchSourceBuilder, "events"));
|
||||
@ -394,6 +400,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
||||
.get();
|
||||
|
||||
refresh();
|
||||
timeWarp().clock().fastForwardSeconds(1);
|
||||
timeWarp().scheduler().trigger(watchName);
|
||||
assertWatchWithNoActionNeeded(watchName, 1);
|
||||
|
||||
@ -401,6 +408,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
||||
.setSource("level", "b")
|
||||
.get();
|
||||
refresh();
|
||||
timeWarp().clock().fastForwardSeconds(1);
|
||||
timeWarp().scheduler().trigger(watchName);
|
||||
assertWatchWithNoActionNeeded(watchName, 2);
|
||||
|
||||
@ -408,6 +416,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
||||
.setSource("level", "a")
|
||||
.get();
|
||||
refresh();
|
||||
timeWarp().clock().fastForwardSeconds(1);
|
||||
timeWarp().scheduler().trigger(watchName);
|
||||
assertWatchWithMinimumPerformedActionsCount(watchName, 1);
|
||||
}
|
||||
|
@ -7,13 +7,13 @@ package org.elasticsearch.xpack.watcher.test.integration;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.xpack.watcher.WatcherState;
|
||||
import org.elasticsearch.xpack.watcher.condition.Condition;
|
||||
import org.elasticsearch.xpack.watcher.condition.AlwaysCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.CompareCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.Condition;
|
||||
import org.elasticsearch.xpack.watcher.execution.ExecutionState;
|
||||
import org.elasticsearch.xpack.watcher.execution.TriggeredWatch;
|
||||
import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore;
|
||||
@ -25,7 +25,6 @@ import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.stats.WatcherStatsResponse;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
@ -36,6 +35,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDI
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.indexAction;
|
||||
import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
@ -51,62 +51,11 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean timeWarped() {
|
||||
// timewarping isn't necessary here, because we aren't testing triggering or throttling
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testLoadMalformedWatch() throws Exception {
|
||||
// valid watch
|
||||
client().prepareIndex(WatchStore.INDEX, WatchStore.DOC_TYPE, "_id0")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.startObject(Watch.Field.TRIGGER.getPreferredName())
|
||||
.startObject("schedule")
|
||||
.field("interval", "1s")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject(Watch.Field.ACTIONS.getPreferredName())
|
||||
.endObject()
|
||||
.endObject())
|
||||
.get();
|
||||
|
||||
// invalid interval
|
||||
client().prepareIndex(WatchStore.INDEX, WatchStore.DOC_TYPE, "_id2")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.startObject(Watch.Field.TRIGGER.getPreferredName())
|
||||
.startObject("schedule")
|
||||
.field("interval", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject(Watch.Field.ACTIONS.getPreferredName())
|
||||
.endObject()
|
||||
.endObject())
|
||||
.get();
|
||||
|
||||
// illegal top level field
|
||||
client().prepareIndex(WatchStore.INDEX, WatchStore.DOC_TYPE, "_id3")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.startObject(Watch.Field.TRIGGER.getPreferredName())
|
||||
.startObject("schedule")
|
||||
.field("interval", "1s")
|
||||
.endObject()
|
||||
.startObject("illegal_field").endObject()
|
||||
.endObject()
|
||||
.startObject(Watch.Field.ACTIONS.getPreferredName()).endObject()
|
||||
.endObject())
|
||||
.get();
|
||||
|
||||
stopWatcher();
|
||||
startWatcher();
|
||||
|
||||
WatcherStatsResponse response = watcherClient().prepareWatcherStats().get();
|
||||
assertThat(response.getWatcherState(), equalTo(WatcherState.STARTED));
|
||||
// Only the valid watch should been loaded
|
||||
assertThat(response.getWatchesCount(), equalTo(1L));
|
||||
assertThat(watcherClient().prepareGetWatch("_id0").get().getId(), Matchers.equalTo("_id0"));
|
||||
}
|
||||
|
||||
public void testLoadMalformedWatchRecord() throws Exception {
|
||||
client().prepareIndex(WatchStore.INDEX, WatchStore.DOC_TYPE, "_id")
|
||||
client().prepareIndex(Watch.INDEX, Watch.DOC_TYPE, "_id")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.startObject(Watch.Field.TRIGGER.getPreferredName())
|
||||
.startObject("schedule")
|
||||
@ -185,6 +134,8 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
||||
}
|
||||
|
||||
public void testDeletedWhileQueued() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate(".watches"));
|
||||
|
||||
DateTime now = DateTime.now(UTC);
|
||||
Wid wid = new Wid("_id", 1, now);
|
||||
ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now);
|
||||
@ -215,7 +166,7 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
||||
WatcherSearchTemplateRequest request =
|
||||
templateRequest(searchSource().query(termQuery("field", "value")), "my-index");
|
||||
for (int i = 0; i < numWatches; i++) {
|
||||
client().prepareIndex(WatchStore.INDEX, WatchStore.DOC_TYPE, "_id" + i)
|
||||
client().prepareIndex(Watch.INDEX, Watch.DOC_TYPE, "_id" + i)
|
||||
.setSource(watchBuilder()
|
||||
.trigger(schedule(cron("0 0/5 * * * ? 2050")))
|
||||
.input(searchInput(request))
|
||||
@ -235,15 +186,19 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
||||
assertThat(response.getWatchesCount(), equalTo((long) numWatches));
|
||||
}
|
||||
|
||||
@TestLogging("org.elasticsearch.watcher.actions:DEBUG")
|
||||
public void testTriggeredWatchLoading() throws Exception {
|
||||
createIndex("output");
|
||||
client().prepareIndex("my-index", "foo", "bar")
|
||||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
|
||||
.setSource("field", "value").get();
|
||||
|
||||
WatcherStatsResponse response = watcherClient().prepareWatcherStats().get();
|
||||
assertThat(response.getWatcherState(), equalTo(WatcherState.STARTED));
|
||||
assertThat(response.getWatchesCount(), equalTo(0L));
|
||||
|
||||
WatcherSearchTemplateRequest request =
|
||||
templateRequest(searchSource().query(termQuery("field", "value")), "my-index");
|
||||
|
||||
int numWatches = 8;
|
||||
for (int i = 0; i < numWatches; i++) {
|
||||
String watchId = "_id" + i;
|
||||
@ -273,27 +228,27 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
||||
stopWatcher();
|
||||
startWatcher();
|
||||
|
||||
assertBusy(new Runnable() {
|
||||
assertBusy(() -> {
|
||||
// We need to wait until all the records are processed from the internal execution queue, only then we can assert
|
||||
// that numRecords watch records have been processed as part of starting up.
|
||||
WatcherStatsResponse response1 = watcherClient().prepareWatcherStats().get();
|
||||
assertThat(response1.getWatcherState(), equalTo(WatcherState.STARTED));
|
||||
assertThat(response1.getThreadPoolQueueSize(), equalTo(0L));
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
// We need to wait until all the records are processed from the internal execution queue, only then we can assert
|
||||
// that numRecords watch records have been processed as part of starting up.
|
||||
WatcherStatsResponse response = watcherClient().prepareWatcherStats().get();
|
||||
assertThat(response.getWatcherState(), equalTo(WatcherState.STARTED));
|
||||
assertThat(response.getThreadPoolQueueSize(), equalTo(0L));
|
||||
|
||||
// but even then since the execution of the watch record is async it may take a little bit before
|
||||
// the actual documents are in the output index
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch("output").get();
|
||||
assertHitCount(searchResponse, numRecords);
|
||||
}
|
||||
// but even then since the execution of the watch record is async it may take a little bit before
|
||||
// the actual documents are in the output index
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch("output").get();
|
||||
assertHitCount(searchResponse, numRecords);
|
||||
}, 30, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
public void testMixedTriggeredWatchLoading() throws Exception {
|
||||
createIndex("output");
|
||||
client().prepareIndex("my-index", "foo", "bar")
|
||||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
|
||||
.setSource("field", "value").get();
|
||||
|
||||
WatcherStatsResponse response = watcherClient().prepareWatcherStats().get();
|
||||
assertThat(response.getWatcherState(), equalTo(WatcherState.STARTED));
|
||||
assertThat(response.getWatchesCount(), equalTo(0L));
|
||||
@ -324,21 +279,18 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
||||
stopWatcher();
|
||||
startWatcher();
|
||||
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
// We need to wait until all the records are processed from the internal execution queue, only then we can assert
|
||||
// that numRecords watch records have been processed as part of starting up.
|
||||
WatcherStatsResponse response = watcherClient().prepareWatcherStats().get();
|
||||
assertThat(response.getWatcherState(), equalTo(WatcherState.STARTED));
|
||||
assertThat(response.getThreadPoolQueueSize(), equalTo(0L));
|
||||
assertBusy(() -> {
|
||||
// We need to wait until all the records are processed from the internal execution queue, only then we can assert
|
||||
// that numRecords watch records have been processed as part of starting up.
|
||||
WatcherStatsResponse response1 = watcherClient().prepareWatcherStats().get();
|
||||
assertThat(response1.getWatcherState(), equalTo(WatcherState.STARTED));
|
||||
assertThat(response1.getThreadPoolQueueSize(), equalTo(0L));
|
||||
|
||||
// but even then since the execution of the watch record is async it may take a little bit before
|
||||
// the actual documents are in the output index
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch("output").get();
|
||||
assertHitCount(searchResponse, numRecords);
|
||||
}
|
||||
// but even then since the execution of the watch record is async it may take a little bit before
|
||||
// the actual documents are in the output index
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch("output").get();
|
||||
assertHitCount(searchResponse, numRecords);
|
||||
});
|
||||
}
|
||||
|
||||
@ -353,10 +305,16 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
||||
assertThat(response.getWatcherMetaData().manuallyStopped(), is(false));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean enableSecurity() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testWatchRecordSavedTwice() throws Exception {
|
||||
// Watcher could prevent to start if a watch record tried to executed twice or more and the watch didn't exist
|
||||
// for that watch record or the execution threadpool rejected the watch record.
|
||||
// A watch record without a watch is the easiest to simulate, so that is what this test does.
|
||||
assertAcked(client().admin().indices().prepareCreate(Watch.INDEX));
|
||||
|
||||
DateTime triggeredTime = new DateTime(2015, 11, 5, 0, 0, 0, 0, DateTimeZone.UTC);
|
||||
final String watchRecordIndex = HistoryStore.getHistoryIndexNameForTime(triggeredTime);
|
||||
@ -380,24 +338,21 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
stopWatcher();
|
||||
startWatcher();
|
||||
assertBusy(new Runnable() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
// We need to wait until all the records are processed from the internal execution queue, only then we can assert
|
||||
// that numRecords watch records have been processed as part of starting up.
|
||||
WatcherStatsResponse response = watcherClient().prepareWatcherStats().get();
|
||||
assertThat(response.getWatcherState(), equalTo(WatcherState.STARTED));
|
||||
assertThat(response.getThreadPoolQueueSize(), equalTo(0L));
|
||||
assertBusy(() -> {
|
||||
// We need to wait until all the records are processed from the internal execution queue, only then we can assert
|
||||
// that numRecords watch records have been processed as part of starting up.
|
||||
WatcherStatsResponse response = watcherClient().prepareWatcherStats().get();
|
||||
assertThat(response.getWatcherState(), equalTo(WatcherState.STARTED));
|
||||
assertThat(response.getThreadPoolQueueSize(), equalTo(0L));
|
||||
|
||||
// but even then since the execution of the watch record is async it may take a little bit before
|
||||
// the actual documents are in the output index
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch(watchRecordIndex).setSize(numRecords).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), Matchers.equalTo((long) numRecords));
|
||||
for (int i = 0; i < numRecords; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).getSource().get("state"), Matchers.equalTo("executed_multiple_times"));
|
||||
}
|
||||
// but even then since the execution of the watch record is async it may take a little bit before
|
||||
// the actual documents are in the output index
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch(watchRecordIndex).setSize(numRecords).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), Matchers.equalTo((long) numRecords));
|
||||
for (int i = 0; i < numRecords; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).getSource().get("state"), is(ExecutionState.EXECUTED_MULTIPLE_TIMES.id()));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchRes
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
@ -92,7 +92,7 @@ public class HttpSecretsIntegrationTests extends AbstractWatcherIntegrationTestC
|
||||
|
||||
// verifying the basic auth password is stored encrypted in the index when security
|
||||
// is enabled, and when it's not enabled, it's stored in plain text
|
||||
GetResponse response = client().prepareGet(WatchStore.INDEX, WatchStore.DOC_TYPE, "_id").get();
|
||||
GetResponse response = client().prepareGet(Watch.INDEX, Watch.DOC_TYPE, "_id").get();
|
||||
assertThat(response, notNullValue());
|
||||
assertThat(response.getId(), is("_id"));
|
||||
Map<String, Object> source = response.getSource();
|
||||
@ -156,7 +156,7 @@ public class HttpSecretsIntegrationTests extends AbstractWatcherIntegrationTestC
|
||||
|
||||
// verifying the basic auth password is stored encrypted in the index when security
|
||||
// is enabled, when it's not enabled, the the passowrd should be stored in plain text
|
||||
GetResponse response = client().prepareGet(WatchStore.INDEX, WatchStore.DOC_TYPE, "_id").get();
|
||||
GetResponse response = client().prepareGet(Watch.INDEX, Watch.DOC_TYPE, "_id").get();
|
||||
assertThat(response, notNullValue());
|
||||
assertThat(response.getId(), is("_id"));
|
||||
Map<String, Object> source = response.getSource();
|
||||
|
@ -26,7 +26,6 @@ import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStore;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -96,8 +95,8 @@ public class WatchAckTests extends AbstractWatcherIntegrationTestCase {
|
||||
refresh();
|
||||
long a1CountAfterAck = docCount("actions", "action1", matchAllQuery());
|
||||
long a2CountAfterAck = docCount("actions", "action2", matchAllQuery());
|
||||
assertThat(a1CountAfterAck, greaterThanOrEqualTo((long) 1));
|
||||
assertThat(a2CountAfterAck, greaterThanOrEqualTo((long) 1));
|
||||
assertThat(a1CountAfterAck, greaterThan(0L));
|
||||
assertThat(a2CountAfterAck, greaterThan(0L));
|
||||
|
||||
timeWarp().scheduler().trigger("_id", 4, TimeValue.timeValueSeconds(5));
|
||||
flush();
|
||||
@ -227,7 +226,7 @@ public class WatchAckTests extends AbstractWatcherIntegrationTestCase {
|
||||
assertThat(watchResponse.getStatus().actionStatus("_id").ackStatus().state(), Matchers.equalTo(ActionStatus.AckStatus.State.ACKED));
|
||||
|
||||
refresh();
|
||||
GetResponse getResponse = client().get(new GetRequest(WatchStore.INDEX, WatchStore.DOC_TYPE, "_name")).actionGet();
|
||||
GetResponse getResponse = client().get(new GetRequest(Watch.INDEX, Watch.DOC_TYPE, "_name")).actionGet();
|
||||
Watch indexedWatch = watchParser().parse("_name", true, getResponse.getSourceAsBytesRef());
|
||||
assertThat(watchResponse.getStatus().actionStatus("_id").ackStatus().state(),
|
||||
equalTo(indexedWatch.status().actionStatus("_id").ackStatus().state()));
|
||||
|
@ -15,7 +15,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.xpack.watcher.condition.AlwaysCondition;
|
||||
import org.elasticsearch.xpack.watcher.execution.ExecutionState;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
@ -38,12 +37,13 @@ import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class ActivateWatchTests extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean timeWarped() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-plugins/issues/4002")
|
||||
// FIXME not to be sleep based
|
||||
public void testDeactivateAndActivate() throws Exception {
|
||||
WatcherClient watcherClient = watcherClient();
|
||||
|
||||
@ -52,7 +52,6 @@ public class ActivateWatchTests extends AbstractWatcherIntegrationTestCase {
|
||||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval("1s")))
|
||||
.input(simpleInput("foo", "bar"))
|
||||
.condition(AlwaysCondition.INSTANCE)
|
||||
.addAction("_a1", indexAction("actions", "action1"))
|
||||
.defaultThrottlePeriod(new TimeValue(0, TimeUnit.SECONDS)))
|
||||
.get();
|
||||
@ -109,7 +108,6 @@ public class ActivateWatchTests extends AbstractWatcherIntegrationTestCase {
|
||||
.setSource(watchBuilder()
|
||||
.trigger(schedule(cron("0 0 0 1 1 ? 2050"))) // some time in 2050
|
||||
.input(simpleInput("foo", "bar"))
|
||||
.condition(AlwaysCondition.INSTANCE)
|
||||
.addAction("_a1", indexAction("actions", "action1"))
|
||||
.defaultThrottlePeriod(new TimeValue(0, TimeUnit.SECONDS)))
|
||||
.get();
|
||||
|
@ -38,6 +38,7 @@ import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class DeleteWatchTests extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
public void testDelete() throws Exception {
|
||||
ensureWatcherStarted();
|
||||
PutWatchResponse putResponse = watcherClient().preparePutWatch("_name").setSource(watchBuilder()
|
||||
|
@ -5,21 +5,27 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher.transport.action.get;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.xpack.watcher.condition.AlwaysCondition;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction;
|
||||
import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
@ -41,7 +47,6 @@ public class GetWatchTests extends AbstractWatcherIntegrationTestCase {
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.isFound(), is(true));
|
||||
assertThat(getResponse.getId(), is("_name"));
|
||||
assertThat(getResponse.getStatus().version(), is(putResponse.getVersion()));
|
||||
Map<String, Object> source = getResponse.getSource().getAsMap();
|
||||
assertThat(source, notNullValue());
|
||||
assertThat(source, hasKey("trigger"));
|
||||
@ -51,7 +56,13 @@ public class GetWatchTests extends AbstractWatcherIntegrationTestCase {
|
||||
assertThat(source, not(hasKey("status")));
|
||||
}
|
||||
|
||||
public void testGetNotFoundOnNonExistingIndex() throws Exception {
|
||||
Exception e = expectThrows(Exception.class, () -> watcherClient().getWatch(new GetWatchRequest("_name")).get());
|
||||
assertThat(e.getMessage(), containsString("no such index"));
|
||||
}
|
||||
|
||||
public void testGetNotFound() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate(Watch.INDEX));
|
||||
GetWatchResponse getResponse = watcherClient().getWatch(new GetWatchRequest("_name")).get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getId(), is("_name"));
|
||||
|
@ -10,6 +10,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.LatchScriptEngine;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.xpack.watcher.WatcherState;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionBuilders;
|
||||
import org.elasticsearch.xpack.watcher.condition.ScriptCondition;
|
||||
@ -47,6 +48,11 @@ public class WatchStatsTests extends AbstractWatcherIntegrationTestCase {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean enableSecurity() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
|
||||
@ -73,6 +79,7 @@ public class WatchStatsTests extends AbstractWatcherIntegrationTestCase {
|
||||
getLatchScriptEngine().finishScriptExecution();
|
||||
}
|
||||
|
||||
@TestLogging("org.elasticsearch.xpack.watcher.trigger.schedule.engine:TRACE,org.elasticsearch.xpack.scheduler:TRACE")
|
||||
public void testCurrentWatches() throws Exception {
|
||||
watcherClient().preparePutWatch("_id").setSource(watchBuilder()
|
||||
.trigger(schedule(interval("1s")))
|
||||
|
@ -24,6 +24,7 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.daily;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval;
|
||||
@ -184,17 +185,42 @@ public abstract class BaseTriggerEngineTestCase extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testAddSameJobSeveralTimes() {
|
||||
public void testAddSameJobSeveralTimesAndExecutedOnce() throws InterruptedException {
|
||||
engine.start(Collections.emptySet());
|
||||
engine.register(events -> logger.info("triggered job"));
|
||||
|
||||
final CountDownLatch firstLatch = new CountDownLatch(1);
|
||||
final CountDownLatch secondLatch = new CountDownLatch(1);
|
||||
AtomicInteger counter = new AtomicInteger(0);
|
||||
engine.register(events -> {
|
||||
events.forEach(event -> {
|
||||
if (counter.getAndIncrement() == 0) {
|
||||
firstLatch.countDown();
|
||||
} else {
|
||||
secondLatch.countDown();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
int times = scaledRandomIntBetween(3, 30);
|
||||
for (int i = 0; i < times; i++) {
|
||||
engine.add(new SimpleJob("_id", interval("10s")));
|
||||
engine.add(new SimpleJob("_id", interval("1s")));
|
||||
}
|
||||
|
||||
advanceClockIfNeeded(new DateTime(clock.millis(), UTC).plusMillis(1100));
|
||||
if (!firstLatch.await(3, TimeUnit.SECONDS)) {
|
||||
fail("waiting too long for all watches to be triggered");
|
||||
}
|
||||
|
||||
advanceClockIfNeeded(new DateTime(clock.millis(), UTC).plusMillis(1100));
|
||||
if (!secondLatch.await(3, TimeUnit.SECONDS)) {
|
||||
fail("waiting too long for all watches to be triggered");
|
||||
}
|
||||
|
||||
// ensure job was only called twice independent from its name
|
||||
assertThat(counter.get(), is(2));
|
||||
}
|
||||
|
||||
static class SimpleJob implements TriggerEngine.Job {
|
||||
public static class SimpleJob implements TriggerEngine.Job {
|
||||
|
||||
private final String name;
|
||||
private final ScheduleTrigger trigger;
|
||||
|
@ -1,50 +0,0 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher.watch;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionStatus;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionStatus.AckStatus.State;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.joda.time.DateTime.now;
|
||||
|
||||
public class WatchStatusTests extends ESTestCase {
|
||||
|
||||
public void testThatWatchStatusDirtyOnConditionCheck() throws Exception {
|
||||
// no actions, met condition
|
||||
WatchStatus status = new WatchStatus(now(), new HashMap<>());
|
||||
status.onCheck(true, now());
|
||||
assertThat(status.dirty(), is(true));
|
||||
|
||||
// no actions, unmet condition
|
||||
status = new WatchStatus(now(), new HashMap<>());
|
||||
status.onCheck(false, now());
|
||||
assertThat(status.dirty(), is(true));
|
||||
|
||||
// actions, no action with reset ack status, unmet condition
|
||||
Map<String, ActionStatus > actions = new HashMap<>();
|
||||
actions.put(randomAsciiOfLength(10), new ActionStatus(now()));
|
||||
status = new WatchStatus(now(), actions);
|
||||
status.onCheck(false, now());
|
||||
assertThat(status.dirty(), is(true));
|
||||
|
||||
// actions, one action with state other than AWAITS_SUCCESSFUL_EXECUTION, unmet condition
|
||||
actions.clear();
|
||||
ActionStatus.AckStatus ackStatus = new ActionStatus.AckStatus(now(), randomFrom(State.ACKED, State.ACKABLE));
|
||||
actions.put(randomAsciiOfLength(10), new ActionStatus(ackStatus, null, null, null));
|
||||
actions.put(randomAsciiOfLength(11), new ActionStatus(now()));
|
||||
status = new WatchStatus(now(), actions);
|
||||
status.onCheck(false, now());
|
||||
assertThat(status.dirty(), is(true));
|
||||
|
||||
status.resetDirty();
|
||||
assertThat(status.dirty(), is(false));
|
||||
}
|
||||
}
|
@ -1,528 +0,0 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.watcher.watch;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||
import org.elasticsearch.action.search.ClearScrollResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||
import org.elasticsearch.cluster.routing.TestShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHits;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
|
||||
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
|
||||
import org.elasticsearch.xpack.watcher.condition.AlwaysCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.NeverCondition;
|
||||
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.transform.ExecutableTransform;
|
||||
import org.elasticsearch.xpack.watcher.transform.Transform;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.IsEqual.equalTo;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyString;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.verifyZeroInteractions;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class WatchStoreTests extends ESTestCase {
|
||||
private WatchStore watchStore;
|
||||
private WatcherClientProxy clientProxy;
|
||||
private Watch.Parser parser;
|
||||
|
||||
@Before
|
||||
public void init() {
|
||||
clientProxy = mock(WatcherClientProxy.class);
|
||||
parser = mock(Watch.Parser.class);
|
||||
watchStore = new WatchStore(Settings.EMPTY, clientProxy, parser);
|
||||
}
|
||||
|
||||
public void testStartNoPreviousWatchesIndex() throws Exception {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
MetaData.Builder metaDataBuilder = MetaData.builder();
|
||||
csBuilder.metaData(metaDataBuilder);
|
||||
ClusterState cs = csBuilder.build();
|
||||
|
||||
assertThat(watchStore.validate(cs), is(true));
|
||||
watchStore.start(cs);
|
||||
assertThat(watchStore.started(), is(true));
|
||||
assertThat(watchStore.watches().size(), equalTo(0));
|
||||
verifyZeroInteractions(clientProxy);
|
||||
|
||||
watchStore.start(cs);
|
||||
verifyZeroInteractions(clientProxy);
|
||||
}
|
||||
|
||||
public void testStartPrimaryShardNotReady() {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
MetaData.Builder metaDataBuilder = MetaData.builder();
|
||||
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
|
||||
Settings settings = settings(Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.build();
|
||||
metaDataBuilder.put(IndexMetaData.builder(WatchStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1));
|
||||
final Index index = metaDataBuilder.get(WatchStore.INDEX).getIndex();
|
||||
IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index);
|
||||
indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0))
|
||||
.addShard(TestShardRouting.newShardRouting(WatchStore.INDEX, 0, "_node_id", null, true,
|
||||
ShardRoutingState.UNASSIGNED, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")))
|
||||
.build());
|
||||
indexRoutingTableBuilder.addReplica();
|
||||
routingTableBuilder.add(indexRoutingTableBuilder.build());
|
||||
csBuilder.metaData(metaDataBuilder);
|
||||
csBuilder.routingTable(routingTableBuilder.build());
|
||||
|
||||
ClusterState cs = csBuilder.build();
|
||||
assertThat(watchStore.validate(cs), is(false));
|
||||
verifyZeroInteractions(clientProxy);
|
||||
}
|
||||
|
||||
public void testStartRefreshFailed() {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
createWatchIndexMetaData(csBuilder);
|
||||
|
||||
RefreshResponse refreshResponse = mockRefreshResponse(1, 0);
|
||||
when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse);
|
||||
|
||||
ClusterState cs = csBuilder.build();
|
||||
|
||||
assertThat(watchStore.validate(cs), is(true));
|
||||
try {
|
||||
watchStore.start(cs);
|
||||
} catch (Exception e) {
|
||||
assertThat(e.getMessage(), equalTo("not all required shards have been refreshed"));
|
||||
}
|
||||
verify(clientProxy, times(1)).refresh(any(RefreshRequest.class));
|
||||
verify(clientProxy, never()).search(any(SearchRequest.class), any(TimeValue.class));
|
||||
verify(clientProxy, never()).clearScroll(anyString());
|
||||
}
|
||||
|
||||
public void testStartSearchFailed() {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
createWatchIndexMetaData(csBuilder);
|
||||
|
||||
RefreshResponse refreshResponse = mockRefreshResponse(1, 1);
|
||||
when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse);
|
||||
|
||||
SearchResponse searchResponse = mockSearchResponse(1, 0, 0);
|
||||
when(clientProxy.search(any(SearchRequest.class), any(TimeValue.class))).thenReturn(searchResponse);
|
||||
|
||||
when(clientProxy.clearScroll(anyString())).thenReturn(new ClearScrollResponse(true, 0));
|
||||
|
||||
ClusterState cs = csBuilder.build();
|
||||
assertThat(watchStore.validate(cs), is(true));
|
||||
try {
|
||||
watchStore.start(cs);
|
||||
} catch (Exception e) {
|
||||
assertThat(e.getMessage(), equalTo("Partial response while loading watches"));
|
||||
}
|
||||
verify(clientProxy, times(1)).refresh(any(RefreshRequest.class));
|
||||
verify(clientProxy, times(1)).search(any(SearchRequest.class), any(TimeValue.class));
|
||||
verify(clientProxy, times(1)).clearScroll(anyString());
|
||||
}
|
||||
|
||||
public void testStartNoWatchStored() throws Exception {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
createWatchIndexMetaData(csBuilder);
|
||||
|
||||
RefreshResponse refreshResponse = mockRefreshResponse(1, 1);
|
||||
when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse);
|
||||
|
||||
SearchResponse searchResponse = mockSearchResponse(1, 1, 0);
|
||||
when(clientProxy.search(any(SearchRequest.class), any(TimeValue.class))).thenReturn(searchResponse);
|
||||
|
||||
when(clientProxy.clearScroll(anyString())).thenReturn(new ClearScrollResponse(true, 0));
|
||||
|
||||
ClusterState cs = csBuilder.build();
|
||||
assertThat(watchStore.validate(cs), is(true));
|
||||
watchStore.start(cs);
|
||||
assertThat(watchStore.started(), is(true));
|
||||
assertThat(watchStore.watches().size(), equalTo(0));
|
||||
verify(clientProxy, times(1)).refresh(any(RefreshRequest.class));
|
||||
verify(clientProxy, times(1)).search(any(SearchRequest.class), any(TimeValue.class));
|
||||
verify(clientProxy, times(1)).clearScroll(anyString());
|
||||
}
|
||||
|
||||
public void testStartWatchStored() throws Exception {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
createWatchIndexMetaData(csBuilder);
|
||||
|
||||
RefreshResponse refreshResponse = mockRefreshResponse(1, 1);
|
||||
when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse);
|
||||
|
||||
BytesReference source = new BytesArray("{}");
|
||||
InternalSearchHit hit1 = new InternalSearchHit(0, "_id1", new Text("type"), Collections.<String, SearchHitField>emptyMap());
|
||||
hit1.sourceRef(source);
|
||||
InternalSearchHit hit2 = new InternalSearchHit(1, "_id2", new Text("type"), Collections.<String, SearchHitField>emptyMap());
|
||||
hit2.sourceRef(source);
|
||||
SearchResponse searchResponse1 = mockSearchResponse(1, 1, 2, hit1, hit2);
|
||||
|
||||
when(clientProxy.search(any(SearchRequest.class), any(TimeValue.class))).thenReturn(searchResponse1);
|
||||
|
||||
InternalSearchHit hit3 = new InternalSearchHit(2, "_id3", new Text("type"), Collections.<String, SearchHitField>emptyMap());
|
||||
hit3.sourceRef(source);
|
||||
InternalSearchHit hit4 = new InternalSearchHit(3, "_id4", new Text("type"), Collections.<String, SearchHitField>emptyMap());
|
||||
hit4.sourceRef(source);
|
||||
SearchResponse searchResponse2 = mockSearchResponse(1, 1, 2, hit3, hit4);
|
||||
SearchResponse searchResponse3 = mockSearchResponse(1, 1, 2);
|
||||
when(clientProxy.searchScroll(anyString(), any(TimeValue.class))).thenReturn(searchResponse2, searchResponse3);
|
||||
|
||||
Watch watch1 = mock(Watch.class);
|
||||
WatchStatus status = mock(WatchStatus.class);
|
||||
when(watch1.status()).thenReturn(status);
|
||||
Watch watch2 = mock(Watch.class);
|
||||
when(watch2.status()).thenReturn(status);
|
||||
Watch watch3 = mock(Watch.class);
|
||||
when(watch3.status()).thenReturn(status);
|
||||
Watch watch4 = mock(Watch.class);
|
||||
when(watch4.status()).thenReturn(status);
|
||||
when(parser.parse("_id1", true, source)).thenReturn(watch1);
|
||||
when(parser.parse("_id2", true, source)).thenReturn(watch2);
|
||||
when(parser.parse("_id3", true, source)).thenReturn(watch3);
|
||||
when(parser.parse("_id4", true, source)).thenReturn(watch4);
|
||||
|
||||
when(clientProxy.clearScroll(anyString())).thenReturn(new ClearScrollResponse(true, 0));
|
||||
|
||||
ClusterState cs = csBuilder.build();
|
||||
assertThat(watchStore.validate(cs), is(true));
|
||||
watchStore.start(cs);
|
||||
assertThat(watchStore.started(), is(true));
|
||||
assertThat(watchStore.watches().size(), equalTo(4));
|
||||
verify(clientProxy, times(1)).refresh(any(RefreshRequest.class));
|
||||
verify(clientProxy, times(1)).search(any(SearchRequest.class), any(TimeValue.class));
|
||||
verify(clientProxy, times(2)).searchScroll(anyString(), any(TimeValue.class));
|
||||
verify(clientProxy, times(1)).clearScroll(anyString());
|
||||
}
|
||||
|
||||
public void testUsageStats() throws Exception {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
createWatchIndexMetaData(csBuilder);
|
||||
|
||||
RefreshResponse refreshResponse = mockRefreshResponse(1, 1);
|
||||
when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse);
|
||||
|
||||
BytesReference source = new BytesArray("{}");
|
||||
int hitCount = randomIntBetween(50, 100);
|
||||
int activeHitCount = 0;
|
||||
|
||||
List<InternalSearchHit> hits = new ArrayList<>();
|
||||
for (int i = 0; i < hitCount; i++) {
|
||||
InternalSearchHit hit = new InternalSearchHit(0, "_id" + i, new Text("type"), Collections.<String, SearchHitField>emptyMap());
|
||||
hits.add(hit.sourceRef(source));
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
WatchStatus status = mock(WatchStatus.class);
|
||||
when(watch.status()).thenReturn(status);
|
||||
|
||||
boolean isActive = usually();
|
||||
WatchStatus.State state = mock(WatchStatus.State.class);
|
||||
when(state.isActive()).thenReturn(isActive);
|
||||
when(status.state()).thenReturn(state);
|
||||
if (isActive) {
|
||||
activeHitCount++;
|
||||
}
|
||||
|
||||
// random schedule
|
||||
ScheduleTrigger mockTricker = mock(ScheduleTrigger.class);
|
||||
when(watch.trigger()).thenReturn(mockTricker);
|
||||
when(mockTricker.type()).thenReturn("schedule");
|
||||
String scheduleType = randomFrom("a", "b", "c");
|
||||
Schedule mockSchedule = mock(Schedule.class);
|
||||
when(mockSchedule.type()).thenReturn(scheduleType);
|
||||
when(mockTricker.getSchedule()).thenReturn(mockSchedule);
|
||||
|
||||
// either a none input, or null
|
||||
when(watch.input()).thenReturn(randomFrom(new ExecutableNoneInput(logger), null));
|
||||
|
||||
// random conditions
|
||||
when(watch.condition()).thenReturn(randomFrom(AlwaysCondition.INSTANCE, null,
|
||||
NeverCondition.INSTANCE));
|
||||
|
||||
// random actions
|
||||
ActionWrapper actionWrapper = mock(ActionWrapper.class);
|
||||
ExecutableAction action = mock(ExecutableAction.class);
|
||||
when(actionWrapper.action()).thenReturn(action);
|
||||
when(action.type()).thenReturn(randomFrom("a", "b", "c"));
|
||||
when(watch.actions()).thenReturn(Arrays.asList(actionWrapper));
|
||||
|
||||
// random transform, not always set
|
||||
Transform mockTransform = mock(Transform.class);
|
||||
when(mockTransform.type()).thenReturn("TYPE");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
ExecutableTransform testTransform = new ExecutableTransform(mockTransform, logger) {
|
||||
@Override
|
||||
public Transform.Result execute(WatchExecutionContext ctx, Payload payload) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
when(watch.transform()).thenReturn(randomFrom(testTransform, null));
|
||||
|
||||
when(parser.parse("_id" + i, true, source)).thenReturn(watch);
|
||||
}
|
||||
|
||||
SearchResponse searchResponse = mockSearchResponse(1, 1, hitCount, hits.toArray(new InternalSearchHit[] {}));
|
||||
when(clientProxy.search(any(SearchRequest.class), any(TimeValue.class))).thenReturn(searchResponse);
|
||||
SearchResponse noHitsResponse = mockSearchResponse(1, 1, 2);
|
||||
when(clientProxy.searchScroll(anyString(), any(TimeValue.class))).thenReturn(noHitsResponse);
|
||||
when(clientProxy.clearScroll(anyString())).thenReturn(new ClearScrollResponse(true, 0));
|
||||
|
||||
ClusterState cs = csBuilder.build();
|
||||
watchStore.start(cs);
|
||||
|
||||
XContentSource stats = new XContentSource(jsonBuilder().map(watchStore.usageStats()));
|
||||
|
||||
assertThat(stats.getValue("count.total"), is(hitCount));
|
||||
assertThat(stats.getValue("count.active"), is(activeHitCount));
|
||||
|
||||
// schedule count
|
||||
int scheduleCountA = stats.getValue("watch.trigger.schedule.a.active");
|
||||
int scheduleCountB = stats.getValue("watch.trigger.schedule.b.active");
|
||||
int scheduleCountC = stats.getValue("watch.trigger.schedule.c.active");
|
||||
assertThat(scheduleCountA + scheduleCountB + scheduleCountC, is(activeHitCount));
|
||||
|
||||
// input count
|
||||
assertThat(stats.getValue("watch.input.none.active"), is(greaterThan(0)));
|
||||
assertThat(stats.getValue("watch.input.none.total"), is(greaterThan(0)));
|
||||
assertThat(stats.getValue("watch.input.none.total"), is(lessThan(activeHitCount)));
|
||||
|
||||
// condition count
|
||||
assertThat(stats.getValue("watch.condition.never.active"), is(greaterThan(0)));
|
||||
assertThat(stats.getValue("watch.condition.always.active"), is(greaterThan(0)));
|
||||
|
||||
// action count
|
||||
int actionCountA = stats.getValue("watch.action.a.active");
|
||||
int actionCountB = stats.getValue("watch.action.b.active");
|
||||
int actionCountC = stats.getValue("watch.action.c.active");
|
||||
assertThat(actionCountA + actionCountB + actionCountC, is(activeHitCount));
|
||||
|
||||
// transform count
|
||||
assertThat(stats.getValue("watch.transform.TYPE.active"), is(greaterThan(0)));
|
||||
}
|
||||
|
||||
public void testThatCleaningWatchesWorks() throws Exception {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
createWatchIndexMetaData(csBuilder);
|
||||
|
||||
RefreshResponse refreshResponse = mockRefreshResponse(1, 1);
|
||||
when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse);
|
||||
|
||||
BytesReference source = new BytesArray("{}");
|
||||
InternalSearchHit hit = new InternalSearchHit(0, "_id1", new Text("type"), Collections.emptyMap());
|
||||
hit.sourceRef(source);
|
||||
|
||||
SearchResponse searchResponse = mockSearchResponse(1, 1, 1, hit);
|
||||
when(clientProxy.search(any(SearchRequest.class), any(TimeValue.class))).thenReturn(searchResponse);
|
||||
|
||||
SearchResponse finalSearchResponse = mockSearchResponse(1, 1, 0);
|
||||
when(clientProxy.searchScroll(anyString(), any(TimeValue.class))).thenReturn(finalSearchResponse);
|
||||
|
||||
Watch watch = mock(Watch.class);
|
||||
WatchStatus status = mock(WatchStatus.class);
|
||||
when(watch.status()).thenReturn(status);
|
||||
when(parser.parse("_id1", true, source)).thenReturn(watch);
|
||||
|
||||
when(clientProxy.clearScroll(anyString())).thenReturn(new ClearScrollResponse(true, 0));
|
||||
|
||||
ClusterState cs = csBuilder.build();
|
||||
assertThat(watchStore.validate(cs), is(true));
|
||||
watchStore.start(cs);
|
||||
assertThat(watchStore.started(), is(true));
|
||||
assertThat(watchStore.watches(), hasSize(1));
|
||||
|
||||
watchStore.clearWatchesInMemory();
|
||||
assertThat(watchStore.started(), is(true));
|
||||
assertThat(watchStore.watches(), hasSize(0));
|
||||
assertThat(watchStore.activeWatches(), hasSize(0));
|
||||
}
|
||||
|
||||
// the elasticsearch migration helper is doing reindex using aliases, so we have to
|
||||
// make sure that the watch store supports a single alias pointing to the watch index
|
||||
public void testThatStartingWithWatchesIndexAsAliasWorks() throws Exception {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
|
||||
MetaData.Builder metaDataBuilder = MetaData.builder();
|
||||
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
|
||||
Settings settings = settings(Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.build();
|
||||
metaDataBuilder.put(IndexMetaData.builder("watches-alias").settings(settings).numberOfShards(1).numberOfReplicas(1)
|
||||
.putAlias(new AliasMetaData.Builder(WatchStore.INDEX).build()));
|
||||
|
||||
final Index index = metaDataBuilder.get("watches-alias").getIndex();
|
||||
IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index);
|
||||
indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0))
|
||||
.addShard(TestShardRouting.newShardRouting("watches-alias", 0, "_node_id", null, true, ShardRoutingState.STARTED))
|
||||
.build());
|
||||
indexRoutingTableBuilder.addReplica();
|
||||
routingTableBuilder.add(indexRoutingTableBuilder.build());
|
||||
csBuilder.metaData(metaDataBuilder);
|
||||
csBuilder.routingTable(routingTableBuilder.build());
|
||||
|
||||
RefreshResponse refreshResponse = mockRefreshResponse(1, 1);
|
||||
when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse);
|
||||
|
||||
BytesReference source = new BytesArray("{}");
|
||||
InternalSearchHit hit1 = new InternalSearchHit(0, "_id1", new Text("type"), Collections.emptyMap());
|
||||
hit1.sourceRef(source);
|
||||
InternalSearchHit hit2 = new InternalSearchHit(1, "_id2", new Text("type"), Collections.emptyMap());
|
||||
hit2.sourceRef(source);
|
||||
SearchResponse searchResponse1 = mockSearchResponse(1, 1, 2, hit1, hit2);
|
||||
|
||||
when(clientProxy.search(any(SearchRequest.class), any(TimeValue.class))).thenReturn(searchResponse1);
|
||||
|
||||
InternalSearchHit hit3 = new InternalSearchHit(2, "_id3", new Text("type"), Collections.emptyMap());
|
||||
hit3.sourceRef(source);
|
||||
InternalSearchHit hit4 = new InternalSearchHit(3, "_id4", new Text("type"), Collections.emptyMap());
|
||||
hit4.sourceRef(source);
|
||||
SearchResponse searchResponse2 = mockSearchResponse(1, 1, 2, hit3, hit4);
|
||||
SearchResponse searchResponse3 = mockSearchResponse(1, 1, 2);
|
||||
when(clientProxy.searchScroll(anyString(), any(TimeValue.class))).thenReturn(searchResponse2, searchResponse3);
|
||||
|
||||
Watch watch1 = mock(Watch.class);
|
||||
WatchStatus status = mock(WatchStatus.class);
|
||||
when(watch1.status()).thenReturn(status);
|
||||
Watch watch2 = mock(Watch.class);
|
||||
when(watch2.status()).thenReturn(status);
|
||||
Watch watch3 = mock(Watch.class);
|
||||
when(watch3.status()).thenReturn(status);
|
||||
Watch watch4 = mock(Watch.class);
|
||||
when(watch4.status()).thenReturn(status);
|
||||
when(parser.parse("_id1", true, source)).thenReturn(watch1);
|
||||
when(parser.parse("_id2", true, source)).thenReturn(watch2);
|
||||
when(parser.parse("_id3", true, source)).thenReturn(watch3);
|
||||
when(parser.parse("_id4", true, source)).thenReturn(watch4);
|
||||
|
||||
when(clientProxy.clearScroll(anyString())).thenReturn(new ClearScrollResponse(true, 0));
|
||||
|
||||
ClusterState cs = csBuilder.build();
|
||||
assertThat(watchStore.validate(cs), is(true));
|
||||
watchStore.start(cs);
|
||||
assertThat(watchStore.started(), is(true));
|
||||
assertThat(watchStore.watches().size(), equalTo(4));
|
||||
verify(clientProxy, times(1)).refresh(any(RefreshRequest.class));
|
||||
verify(clientProxy, times(1)).search(any(SearchRequest.class), any(TimeValue.class));
|
||||
verify(clientProxy, times(1)).clearScroll(anyString());
|
||||
}
|
||||
|
||||
// the elasticsearch migration helper is doing reindex using aliases, so we have to
|
||||
// make sure that the watch store supports only a single index in an alias
|
||||
public void testThatWatchesIndexWithTwoAliasesFails() throws Exception {
|
||||
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
|
||||
|
||||
MetaData.Builder metaDataBuilder = MetaData.builder();
|
||||
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
|
||||
Settings settings = settings(Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.build();
|
||||
metaDataBuilder.put(IndexMetaData.builder("watches-alias").settings(settings).numberOfShards(1).numberOfReplicas(1)
|
||||
.putAlias(new AliasMetaData.Builder(WatchStore.INDEX).build()));
|
||||
metaDataBuilder.put(IndexMetaData.builder("whatever").settings(settings).numberOfShards(1).numberOfReplicas(1)
|
||||
.putAlias(new AliasMetaData.Builder(WatchStore.INDEX).build()));
|
||||
|
||||
final Index index = metaDataBuilder.get("watches-alias").getIndex();
|
||||
IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index);
|
||||
indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0))
|
||||
.addShard(TestShardRouting.newShardRouting("watches-alias", 0, "_node_id", null, true, ShardRoutingState.STARTED))
|
||||
.build());
|
||||
indexRoutingTableBuilder.addReplica();
|
||||
final Index otherIndex = metaDataBuilder.get("whatever").getIndex();
|
||||
IndexRoutingTable.Builder otherIndexRoutingTableBuilder = IndexRoutingTable.builder(otherIndex);
|
||||
otherIndexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0))
|
||||
.addShard(TestShardRouting.newShardRouting("whatever", 0, "_node_id", null, true, ShardRoutingState.STARTED))
|
||||
.build());
|
||||
otherIndexRoutingTableBuilder.addReplica();
|
||||
routingTableBuilder.add(otherIndexRoutingTableBuilder.build());
|
||||
csBuilder.metaData(metaDataBuilder);
|
||||
csBuilder.routingTable(routingTableBuilder.build());
|
||||
|
||||
ClusterState cs = csBuilder.build();
|
||||
assertThat(watchStore.validate(cs), is(false));
|
||||
IllegalStateException exception = expectThrows(IllegalStateException.class, () -> watchStore.start(cs));
|
||||
assertThat(exception.getMessage(), is("Alias [.watches] points to more than one index"));
|
||||
}
|
||||
|
||||
/*
|
||||
* Creates the standard cluster state metadata for the watches index
|
||||
* with shards/replicas being marked as started
|
||||
*/
|
||||
private void createWatchIndexMetaData(ClusterState.Builder builder) {
|
||||
MetaData.Builder metaDataBuilder = MetaData.builder();
|
||||
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
|
||||
Settings settings = settings(Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.build();
|
||||
metaDataBuilder.put(IndexMetaData.builder(WatchStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1));
|
||||
final Index index = metaDataBuilder.get(WatchStore.INDEX).getIndex();
|
||||
IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index);
|
||||
indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0))
|
||||
.addShard(TestShardRouting.newShardRouting(WatchStore.INDEX, 0, "_node_id", null, true, ShardRoutingState.STARTED))
|
||||
.build());
|
||||
indexRoutingTableBuilder.addReplica();
|
||||
routingTableBuilder.add(indexRoutingTableBuilder.build());
|
||||
builder.metaData(metaDataBuilder);
|
||||
builder.routingTable(routingTableBuilder.build());
|
||||
}
|
||||
|
||||
private RefreshResponse mockRefreshResponse(int total, int successful) {
|
||||
RefreshResponse refreshResponse = mock(RefreshResponse.class);
|
||||
when(refreshResponse.getTotalShards()).thenReturn(total);
|
||||
when(refreshResponse.getSuccessfulShards()).thenReturn(successful);
|
||||
return refreshResponse;
|
||||
}
|
||||
|
||||
private SearchResponse mockSearchResponse(int total, int successful, int totalHits, InternalSearchHit... hits) {
|
||||
InternalSearchHits internalSearchHits = new InternalSearchHits(hits, totalHits, 1f);
|
||||
SearchResponse searchResponse = mock(SearchResponse.class);
|
||||
when(searchResponse.getTotalShards()).thenReturn(total);
|
||||
when(searchResponse.getSuccessfulShards()).thenReturn(successful);
|
||||
when(searchResponse.getHits()).thenReturn(internalSearchHits);
|
||||
return searchResponse;
|
||||
}
|
||||
}
|
@ -35,10 +35,6 @@
|
||||
|
||||
- match: { _id: "my_watch" }
|
||||
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
- do:
|
||||
xpack.watcher.get_watch:
|
||||
id: "my_watch"
|
||||
@ -51,7 +47,7 @@
|
||||
xpack.watcher.deactivate_watch:
|
||||
watch_id: "my_watch"
|
||||
|
||||
- match: { "_status.state.active" : false }
|
||||
- match: { _status.state.active : false }
|
||||
|
||||
- do:
|
||||
xpack.watcher.get_watch:
|
||||
@ -64,7 +60,7 @@
|
||||
xpack.watcher.activate_watch:
|
||||
watch_id: "my_watch"
|
||||
|
||||
- match: { "_status.state.active" : true }
|
||||
- match: { _status.state.active : true }
|
||||
|
||||
- do:
|
||||
xpack.watcher.get_watch:
|
||||
|
@ -52,5 +52,5 @@ teardown:
|
||||
id: "my_watch"
|
||||
- match: { found : true}
|
||||
- match: { _id: "my_watch" }
|
||||
- is_true: _status.version
|
||||
- is_true: watch
|
||||
- is_false: watch.status
|
||||
|
@ -4,6 +4,9 @@
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: .watches
|
||||
|
||||
- do:
|
||||
catch: missing
|
||||
|
@ -93,8 +93,6 @@
|
||||
- is_true: graph.available
|
||||
- is_true: monitoring.enabled
|
||||
- is_true: monitoring.available
|
||||
- gte: { watcher.count.total: 0 }
|
||||
- gte: { watcher.count.active: 0 }
|
||||
|
||||
- do:
|
||||
xpack.info:
|
||||
|
@ -4,11 +4,6 @@
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
|
||||
- do: {xpack.watcher.stats:{}}
|
||||
- match: { "watcher_state": "started" }
|
||||
- match: { "watch_count": 0 }
|
||||
|
||||
- do:
|
||||
xpack.watcher.put_watch:
|
||||
id: "test_watch"
|
||||
@ -47,9 +42,6 @@
|
||||
- match: { _id: "test_watch" }
|
||||
- match: { created: true }
|
||||
|
||||
- do: {xpack.watcher.stats:{}}
|
||||
- match: { "watch_count": 1 }
|
||||
|
||||
# Simulate a Thread.sleep()
|
||||
- do:
|
||||
catch: request_timeout
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user