diff --git a/elasticsearch/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java b/elasticsearch/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java index a2dc5591856..4b4f7e04ce1 100644 --- a/elasticsearch/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java +++ b/elasticsearch/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -22,7 +23,6 @@ import org.junit.Before; import org.junit.BeforeClass; import java.net.InetSocketAddress; -import java.net.URI; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; @@ -83,12 +83,9 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase { @Before public void enableExporter() throws Exception { - InetSocketAddress httpAddress = randomFrom(httpAddresses()); - URI uri = new URI("https", null, httpAddress.getHostString(), httpAddress.getPort(), "/", null, null); - Settings exporterSettings = Settings.builder() .put("xpack.monitoring.exporters._http.enabled", true) - .put("xpack.monitoring.exporters._http.host", uri.toString()) + .put("xpack.monitoring.exporters._http.host", "https://" + NetworkAddress.format(randomFrom(httpAddresses()))) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(exporterSettings)); } diff --git a/elasticsearch/qa/smoke-test-watcher-with-groovy/src/test/resources/rest-api-spec/test/watcher_groovy/50_script_condition.yaml b/elasticsearch/qa/smoke-test-watcher-with-groovy/src/test/resources/rest-api-spec/test/watcher_groovy/50_script_condition.yaml index 3a876f98531..d8ee955f1ab 100644 --- a/elasticsearch/qa/smoke-test-watcher-with-groovy/src/test/resources/rest-api-spec/test/watcher_groovy/50_script_condition.yaml +++ b/elasticsearch/qa/smoke-test-watcher-with-groovy/src/test/resources/rest-api-spec/test/watcher_groovy/50_script_condition.yaml @@ -70,6 +70,8 @@ - match: { _id: "watch_with_groovy_closure" } - do: + warnings: + - '[groovy] scripts are deprecated, use [painless] scripts instead' xpack.watcher.execute_watch: id: "watch_with_groovy_closure" body: > @@ -118,6 +120,8 @@ { "status": "red", "@timestamp": "2005-01-01T00:01:55" } - do: + warnings: + - '[groovy] scripts are deprecated, use [painless] scripts instead' xpack.watcher.execute_watch: id: "watch_with_groovy_closure" body: > diff --git a/elasticsearch/x-dev-tools/create_bwc_indexes.py b/elasticsearch/x-dev-tools/create_bwc_indexes.py index cbb7def4a2d..9db4c80796c 100644 --- a/elasticsearch/x-dev-tools/create_bwc_indexes.py +++ b/elasticsearch/x-dev-tools/create_bwc_indexes.py @@ -16,7 +16,7 @@ # Creates indices with old versions of elasticsearch. These indices are used by x-pack plugins like security # to test if the import of metadata that is stored in elasticsearch indexes works correctly. # This tool will start a node on port 9200/9300. If a node is already running on that port then the script will fail. -# Currently this script can only deal with versions >=2.3X and < 5.0. Needs more work for versions before or after. +# Currently this script can only deal with versions >=2.0.0 and < 5.0. Needs more work for versions before or after. # # Run from x-plugins root directory like so: # python3 ./elasticsearch/x-dev-tools/create_bwc_indexes.py 2.3.4 @@ -50,6 +50,7 @@ try: from elasticsearch import Elasticsearch from elasticsearch.exceptions import ConnectionError from elasticsearch.exceptions import TransportError + from elasticsearch.exceptions import NotFoundError from elasticsearch.client import IndicesClient except ImportError as e: print('Can\'t import elasticsearch please install `sudo pip3 install elasticsearch`') @@ -80,7 +81,10 @@ def start_node(version, release_dir, data_dir): return subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) def install_plugin(version, release_dir, plugin_name): - run_plugin(version, release_dir, 'install', [plugin_name]) + args = [plugin_name] + if parse_version(version) >= parse_version('2.2.0'): + args = [plugin_name, '--batch'] + run_plugin(version, release_dir, 'install', args) def remove_plugin(version, release_dir, plugin_name): run_plugin(version, release_dir, 'remove', [plugin_name]) @@ -96,9 +100,8 @@ def create_client(): client = Elasticsearch([{'host': 'localhost', 'port': 9200, 'http_auth':'es_admin:0123456789'}]) health = client.cluster.health(wait_for_nodes=1) return client - except Exception as e: - logging.info('got exception while waiting for cluster' + str(e)) - pass + except ConnectionError: + logging.info('Not started yet...') time.sleep(1) assert False, 'Timed out waiting for node for %s seconds' % timeout @@ -113,11 +116,17 @@ def generate_security_index(client, version): "roles" : [ "bwc_test_role" ] } - response = requests.put('http://localhost:9200/_shield/user/bwc_test_user', auth=('es_admin', '0123456789'), data=json.dumps(body)) - logging.info('put user reponse: ' + response.text) - if (response.status_code != 200) : + while True: + response = requests.put('http://localhost:9200/_shield/user/bwc_test_user', auth=('es_admin', '0123456789'), data=json.dumps(body)) + logging.info('put user reponse: ' + response.text) + if response.status_code == 200: + break + else: + if 'service has not been started' in response.text: + continue raise Exception('PUT http://localhost:9200/_shield/role/bwc_test_role did not succeed!') + # add a role body = { "cluster": ["all"], @@ -154,6 +163,154 @@ def generate_security_index(client, version): health = client.cluster.health(wait_for_status='yellow', wait_for_relocating_shards=0, index='.security') assert health['timed_out'] == False, 'cluster health timed out %s' % health +# this adds a couple of watches and waits for the the watch_history to accumulate some results +def generate_watcher_index(client, version): + logging.info('Adding a watch') + body = { + "trigger" : { + "schedule": { + "interval": "1s" + } + }, + "input" : { + "search" : { + "timeout": "100s", + "request" : { + "indices" : [ ".watches" ], + "body" : { + "query" : { "match_all" : {}}, + "size": 1 + }, + } + } + }, + "condition" : { + "always" : {} + }, + "throttle_period": "1s", + "actions" : { + "index_payload" : { + "transform" : { + "search" : { + "request" : { + "body" : { "size": 1, "query" : { "match_all" : {} }} + }, + "timeout": "100s" + } + }, + "index" : { + "index" : "bwc_watch_index", + "doc_type" : "bwc_watch_type", + "timeout": "100s" + } + } + } + } + response = requests.put('http://localhost:9200/_watcher/watch/bwc_watch', auth=('es_admin', '0123456789'), data=json.dumps(body)) + logging.info('PUT watch response: ' + response.text) + if (response.status_code != 201) : + raise Exception('PUT http://localhost:9200/_watcher/watch/bwc_watch did not succeed!') + + logging.info('Adding a watch with "fun" throttle periods') + body = { + "trigger" : { + "schedule": { + "interval": "1s" + } + }, + "condition" : { + "never" : {} + }, + "throttle_period": "100s", + "actions" : { + "index_payload" : { + "throttle_period": "100s", + "transform" : { + "search" : { + "request" : { + "body" : { "size": 1, "query" : { "match_all" : {} }} + } + } + }, + "index" : { + "index" : "bwc_watch_index", + "doc_type" : "bwc_watch_type" + } + } + } + } + response = requests.put('http://localhost:9200/_watcher/watch/bwc_throttle_period', auth=('es_admin', '0123456789'), data=json.dumps(body)) + logging.info('PUT watch response: ' + response.text) + if (response.status_code != 201) : + raise Exception('PUT http://localhost:9200/_watcher/watch/bwc_throttle_period did not succeed!') + + if parse_version(version) < parse_version('2.3.0'): + logging.info('Skipping watch with a funny read timeout because email attachement is not supported by this version') + else: + logging.info('Adding a watch with a funny read timeout') + body = { + "trigger" : { + "schedule": { + "interval": "100s" + } + }, + "condition": { + "never": {} + }, + "actions": { + "work": { + "email": { + "to": "email@domain.com", + "subject": "Test Kibana PDF report", + "attachments": { + "test_report.pdf": { + "http": { + "content_type": "application/pdf", + "request": { + "read_timeout": "100s", + "scheme": "https", + "host": "example.com", + "path":"{{ctx.metadata.report_url}}", + "port": 8443, + "auth": { + "basic": { + "username": "Aladdin", + "password": "open sesame" + } + } + } + } + } + } + } + } + } + } + response = requests.put('http://localhost:9200/_watcher/watch/bwc_funny_timeout', auth=('es_admin', '0123456789'), data=json.dumps(body)) + logging.info('PUT watch response: ' + response.text) + if (response.status_code != 201) : + raise Exception('PUT http://localhost:9200/_watcher/watch/bwc_funny_timeout did not succeed!') + + # wait to accumulate some watches + logging.info('Waiting for watch results index to fill up...') + for attempt in range(1, 31): + try: + response = client.search(index="bwc_watch_index", body={"query": {"match_all": {}}}) + logging.info('(' + str(attempt) + ') Got ' + str(response['hits']['total']) + ' hits and want 10...') + if response['hits']['total'] >= 10: + break + except NotFoundError: + logging.info('(' + str(attempt) + ') Not found, retrying') + time.sleep(1) + + health = client.cluster.health(wait_for_status='yellow', wait_for_relocating_shards=0, index='.watches') + assert health['timed_out'] == False, 'cluster health timed out %s' % health + health = client.cluster.health(wait_for_status='yellow', wait_for_relocating_shards=0, index='.watch_history*') + assert health['timed_out'] == False, 'cluster health timed out %s' % health + health = client.cluster.health(wait_for_status='yellow', wait_for_relocating_shards=0, index='bwc_watch_index') + assert health['timed_out'] == False, 'cluster health timed out %s' % health + + def compress_index(version, tmp_dir, output_dir): compress(tmp_dir, output_dir, 'x-pack-%s.zip' % version, 'data') @@ -232,50 +389,52 @@ def main(): logging.getLogger('urllib3').setLevel(logging.WARN) cfg = parse_config() for version in cfg.versions: - if parse_version(version) < parse_version('2.3.0'): - logging.info('version is ' + version + ' but shield supports native realm oly from 2.3.0 on. nothing to do.') - continue - else: - logging.info('--> Creating x-pack index for %s' % version) + logging.info('--> Creating x-pack index for %s' % version) - # setup for starting nodes - release_dir = os.path.join(cfg.releases_dir, 'elasticsearch-%s' % version) - if not os.path.exists(release_dir): - raise RuntimeError('ES version %s does not exist in %s' % (version, cfg.releases_dir)) - tmp_dir = tempfile.mkdtemp() - data_dir = os.path.join(tmp_dir, 'data') - logging.info('Temp data dir: %s' % data_dir) - node = None + # setup for starting nodes + release_dir = os.path.join(cfg.releases_dir, 'elasticsearch-%s' % version) + if not os.path.exists(release_dir): + raise RuntimeError('ES version %s does not exist in %s' % (version, cfg.releases_dir)) + tmp_dir = tempfile.mkdtemp() + data_dir = os.path.join(tmp_dir, 'data') + logging.info('Temp data dir: %s' % data_dir) + node = None - try: + try: - # install plugins - remove_plugin(version, release_dir, 'license') - remove_plugin(version, release_dir, 'shield') - # remove the shield config too before fresh install - run('rm -rf %s' %(os.path.join(release_dir, 'config/shield'))) - install_plugin(version, release_dir, 'license') - install_plugin(version, release_dir, 'shield') - # here we could also install watcher etc + # install plugins + remove_plugin(version, release_dir, 'license') + remove_plugin(version, release_dir, 'shield') + remove_plugin(version, release_dir, 'watcher') + # remove the shield config too before fresh install + run('rm -rf %s' %(os.path.join(release_dir, 'config/shield'))) + install_plugin(version, release_dir, 'license') + install_plugin(version, release_dir, 'shield') + install_plugin(version, release_dir, 'watcher') + # here we could also install watcher etc - # create admin - run('%s useradd es_admin -r admin -p 0123456789' %(os.path.join(release_dir, 'bin/shield/esusers'))) - node = start_node(version, release_dir, data_dir) + # create admin + run('%s useradd es_admin -r admin -p 0123456789' %(os.path.join(release_dir, 'bin/shield/esusers'))) + node = start_node(version, release_dir, data_dir) - # create a client that authenticates as es_admin - client = create_client() + # create a client that authenticates as es_admin + client = create_client() + if parse_version(version) < parse_version('2.3.0'): + logging.info('Version is ' + version + ' but shield supports native realm oly from 2.3.0 on. Nothing to do for Shield.') + else: generate_security_index(client, version) - # here we could also add watches, monitoring etc + generate_watcher_index(client, version) + # here we could also add watches, monitoring etc + shutdown_node(node) + node = None + compress_index(version, tmp_dir, cfg.output_dir) + finally: + + if node is not None: + # This only happens if we've hit an exception: shutdown_node(node) - node = None - compress_index(version, tmp_dir, cfg.output_dir) - finally: - - if node is not None: - # This only happens if we've hit an exception: - shutdown_node(node) - shutil.rmtree(tmp_dir) + shutil.rmtree(tmp_dir) if __name__ == '__main__': try: diff --git a/elasticsearch/x-pack/build.gradle b/elasticsearch/x-pack/build.gradle index e314451cdef..72a5870f092 100644 --- a/elasticsearch/x-pack/build.gradle +++ b/elasticsearch/x-pack/build.gradle @@ -51,6 +51,10 @@ dependencies { // needed for subethasmtp, has @GuardedBy annotation testCompile 'com.google.code.findbugs:jsr305:3.0.1' + // monitoring deps + compile "org.elasticsearch.client:rest:${version}" + compile "org.elasticsearch.client:sniffer:${version}" + // common test deps testCompile 'org.elasticsearch:securemock:1.2' testCompile 'org.slf4j:slf4j-log4j12:1.6.2' diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index b5d9f755606..eae0673538a 100644 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; @@ -63,15 +62,13 @@ public class Monitoring implements ActionPlugin { public static final String NAME = "monitoring"; private final Settings settings; - private final Environment env; private final XPackLicenseState licenseState; private final boolean enabled; private final boolean transportClientMode; private final boolean tribeNode; - public Monitoring(Settings settings, Environment env, XPackLicenseState licenseState) { + public Monitoring(Settings settings, XPackLicenseState licenseState) { this.settings = settings; - this.env = env; this.licenseState = licenseState; this.enabled = XPackSettings.MONITORING_ENABLED.get(settings); this.transportClientMode = XPackPlugin.transportClientMode(settings); @@ -107,10 +104,10 @@ public class Monitoring implements ActionPlugin { final MonitoringSettings monitoringSettings = new MonitoringSettings(settings, clusterSettings); final CleanerService cleanerService = new CleanerService(settings, clusterSettings, threadPool, licenseState); - // TODO do exporters and their ssl config really need to be dynamic? https://github.com/elastic/x-plugins/issues/3117 + // TODO: https://github.com/elastic/x-plugins/issues/3117 (remove dynamic need with static exporters) final SSLService dynamicSSLService = sslService.createDynamicSSLService(); Map exporterFactories = new HashMap<>(); - exporterFactories.put(HttpExporter.TYPE, config -> new HttpExporter(config, env, dynamicSSLService)); + exporterFactories.put(HttpExporter.TYPE, config -> new HttpExporter(config, dynamicSSLService)); exporterFactories.put(LocalExporter.TYPE, config -> new LocalExporter(config, client, clusterService, cleanerService)); final Exporters exporters = new Exporters(settings, exporterFactories, clusterService); diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterInfoMonitoringDoc.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterInfoMonitoringDoc.java index ed331f59950..e6262d171a8 100644 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterInfoMonitoringDoc.java +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterInfoMonitoringDoc.java @@ -7,13 +7,17 @@ package org.elasticsearch.xpack.monitoring.collector.cluster; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.license.License; +import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; +import java.util.List; + public class ClusterInfoMonitoringDoc extends MonitoringDoc { private String clusterName; private String version; private License license; + private List usage; private ClusterStatsResponse clusterStats; public ClusterInfoMonitoringDoc(String monitoringId, String monitoringVersion) { @@ -44,6 +48,14 @@ public class ClusterInfoMonitoringDoc extends MonitoringDoc { this.license = license; } + public List getUsage() { + return usage; + } + + public void setUsage(List usage) { + this.usage = usage; + } + public ClusterStatsResponse getClusterStats() { return clusterStats; } diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java index 7101113d60e..bc83c422e5c 100644 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java @@ -13,10 +13,13 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.XPackFeatureSet; +import org.elasticsearch.xpack.action.XPackUsageRequestBuilder; import org.elasticsearch.xpack.monitoring.MonitoringSettings; import org.elasticsearch.xpack.monitoring.collector.AbstractCollector; import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; @@ -60,25 +63,17 @@ public class ClusterStatsCollector extends AbstractCollector { @Override protected Collection doCollect() throws Exception { - List results = new ArrayList<>(1); + final Supplier clusterStatsSupplier = + () -> client.admin().cluster().prepareClusterStats().get(monitoringSettings.clusterStatsTimeout()); + final Supplier> usageSupplier = () -> new XPackUsageRequestBuilder(client).get().getUsages(); - // Retrieves cluster stats - ClusterStatsResponse clusterStats = null; - try { - clusterStats = client.admin().cluster().prepareClusterStats().get(monitoringSettings.clusterStatsTimeout()); - } catch (ElasticsearchSecurityException e) { - if (LicenseUtils.isLicenseExpiredException(e)) { - logger.trace( - (Supplier) () -> new ParameterizedMessage( - "collector [{}] - unable to collect data because of expired license", name()), e); - } else { - throw e; - } - } + final ClusterStatsResponse clusterStats = clusterStatsSupplier.get(); - long timestamp = System.currentTimeMillis(); - String clusterUUID = clusterUUID(); - DiscoveryNode sourceNode = localNode(); + final long timestamp = System.currentTimeMillis(); + final String clusterUUID = clusterUUID(); + final DiscoveryNode sourceNode = localNode(); + + final List results = new ArrayList<>(1); // Adds a cluster info document ClusterInfoMonitoringDoc clusterInfoDoc = new ClusterInfoMonitoringDoc(monitoringId(), monitoringVersion()); @@ -89,6 +84,7 @@ public class ClusterStatsCollector extends AbstractCollector { clusterInfoDoc.setVersion(Version.CURRENT.toString()); clusterInfoDoc.setLicense(licenseService.getLicense()); clusterInfoDoc.setClusterStats(clusterStats); + clusterInfoDoc.setUsage(collect(usageSupplier)); results.add(clusterInfoDoc); // Adds a cluster stats document @@ -103,4 +99,21 @@ public class ClusterStatsCollector extends AbstractCollector { return Collections.unmodifiableCollection(results); } + + @Nullable + private T collect(final Supplier supplier) { + try { + return supplier.get(); + } catch (ElasticsearchSecurityException e) { + if (LicenseUtils.isLicenseExpiredException(e)) { + logger.trace((Supplier) () -> new ParameterizedMessage( + "collector [{}] - unable to collect data because of expired license", name()), e); + } else { + throw e; + } + } + + return null; + } + } diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ExportBulk.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ExportBulk.java index 7ee8ed43388..4b0515e428d 100644 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ExportBulk.java +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ExportBulk.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.monitoring.exporter; import java.util.Collection; +import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; /** @@ -18,11 +19,15 @@ public abstract class ExportBulk { private final AtomicReference state = new AtomicReference<>(State.INITIALIZING); public ExportBulk(String name) { - this.name = name; + this.name = Objects.requireNonNull(name); } - @Override - public String toString() { + /** + * Get the name used for any logging messages. + * + * @return Never {@code null}. + */ + public String getName() { return name; } diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java index 6a58e1ce344..cbb8abb3514 100644 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java @@ -5,11 +5,7 @@ */ package org.elasticsearch.xpack.monitoring.exporter; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.monitoring.MonitoringSettings; @@ -25,7 +21,6 @@ public abstract class Exporter implements AutoCloseable { public static final String EXPORT_PIPELINE_NAME = "xpack_monitoring_" + MonitoringTemplateUtils.TEMPLATE_VERSION; public static final String INDEX_NAME_TIME_FORMAT_SETTING = "index.name.time_format"; - public static final String BULK_TIMEOUT_SETTING = "bulk.timeout"; /** * Every {@code Exporter} adds the ingest pipeline to bulk requests, but they should, at the exporter level, allow that to be disabled. *

@@ -34,16 +29,11 @@ public abstract class Exporter implements AutoCloseable { public static final String USE_INGEST_PIPELINE_SETTING = "use_ingest"; protected final Config config; - protected final Logger logger; - - @Nullable protected final TimeValue bulkTimeout; private AtomicBoolean closed = new AtomicBoolean(false); public Exporter(Config config) { this.config = config; - this.logger = config.logger(getClass()); - this.bulkTimeout = config.settings().getAsTime(BULK_TIMEOUT_SETTING, null); } public String name() { @@ -82,7 +72,11 @@ public abstract class Exporter implements AutoCloseable { protected abstract void doClose(); - protected String settingFQN(String setting) { + protected static String settingFQN(final Config config) { + return MonitoringSettings.EXPORTERS_SETTINGS.getKey() + config.name; + } + + protected static String settingFQN(final Config config, final String setting) { return MonitoringSettings.EXPORTERS_SETTINGS.getKey() + config.name + "." + setting; } @@ -119,13 +113,11 @@ public abstract class Exporter implements AutoCloseable { private final String name; private final String type; private final boolean enabled; - private final Settings globalSettings; private final Settings settings; - public Config(String name, String type, Settings globalSettings, Settings settings) { + public Config(String name, String type, Settings settings) { this.name = name; this.type = type; - this.globalSettings = globalSettings; this.settings = settings; this.enabled = settings.getAsBoolean("enabled", true); } @@ -146,9 +138,6 @@ public abstract class Exporter implements AutoCloseable { return settings; } - public Logger logger(Class clazz) { - return Loggers.getLogger(clazz, globalSettings, name); - } } /** A factory for constructing {@link Exporter} instances.*/ diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java index 1fea90d6a04..f504b72686e 100644 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; -import org.elasticsearch.node.Node; import org.elasticsearch.xpack.monitoring.MonitoringSettings; import org.elasticsearch.xpack.monitoring.exporter.local.LocalExporter; @@ -117,11 +116,6 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable initExporters(Settings settings) { - Settings globalSettings = Settings.builder() - .put(settings) - .put(Node.NODE_NAME_SETTING.getKey(), nodeName()) - .build(); - Set singletons = new HashSet<>(); Map exporters = new HashMap<>(); boolean hasDisabled = false; @@ -135,7 +129,7 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable params; + + /** + * Resolvers are used to render monitoring documents into JSON. + */ + private final ResolversRegistry registry; + + /** + * The bytes payload that represents the bulk body is created via {@link #doAdd(Collection)}. + */ + private byte[] payload = null; + + public HttpExportBulk(final String name, final RestClient client, final Map parameters, + final ResolversRegistry registry) { + super(name); + + this.client = client; + this.params = parameters; + this.registry = registry; + } + + @Override + public void doAdd(Collection docs) throws ExportException { + try { + if (docs != null && docs.isEmpty() == false) { + try (final BytesStreamOutput payload = new BytesStreamOutput()) { + for (MonitoringDoc monitoringDoc : docs) { + // any failure caused by an individual doc will be written as an empty byte[], thus not impacting the rest + payload.write(toBulkBytes(monitoringDoc)); + } + + // store the payload until we flush + this.payload = BytesReference.toBytes(payload.bytes()); + } + } + } catch (Exception e) { + throw new ExportException("failed to add documents to export bulk [{}]", e, name); + } + } + + @Override + public void doFlush() throws ExportException { + if (payload == null) { + throw new ExportException("unable to send documents because none were loaded for export bulk [{}]", name); + } else if (payload.length != 0) { + final HttpEntity body = new ByteArrayEntity(payload, ContentType.APPLICATION_JSON); + + client.performRequestAsync("POST", "/_bulk", params, body, HttpExportBulkResponseListener.INSTANCE); + + // free the memory + payload = null; + } + } + + @Override + protected void doClose() { + // nothing serious to do at this stage + assert payload == null; + } + + private byte[] toBulkBytes(final MonitoringDoc doc) throws IOException { + final XContentType xContentType = XContentType.JSON; + final XContent xContent = xContentType.xContent(); + + try (final BytesStreamOutput out = new BytesStreamOutput()) { + MonitoringIndexNameResolver resolver = registry.getResolver(doc); + + if (resolver != null) { + String index = resolver.index(doc); + String type = resolver.type(doc); + String id = resolver.id(doc); + + try (XContentBuilder builder = new XContentBuilder(xContent, out)) { + // Builds the bulk action metadata line + builder.startObject(); + builder.startObject("index"); + builder.field("_index", index); + builder.field("_type", type); + if (id != null) { + builder.field("_id", id); + } + builder.endObject(); + builder.endObject(); + } + + // Adds action metadata line bulk separator + out.write(xContent.streamSeparator()); + + // Render the monitoring document + BytesRef bytesRef = resolver.source(doc, xContentType).toBytesRef(); + out.write(bytesRef.bytes, bytesRef.offset, bytesRef.length); + + // Adds final bulk separator + out.write(xContent.streamSeparator()); + + logger.trace("added index request [index={}, type={}, id={}]", index, type, id); + } else { + logger.error("no resolver found for monitoring document [class={}, id={}, version={}]", + doc.getClass().getName(), doc.getMonitoringId(), doc.getMonitoringVersion()); + } + + return BytesReference.toBytes(out.bytes()); + } catch (Exception e) { + logger.warn((Supplier) () -> new ParameterizedMessage("failed to render document [{}], skipping it [{}]", doc, name), e); + + return BytesRef.EMPTY_BYTES; + } + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExportBulkResponseListener.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExportBulkResponseListener.java new file mode 100644 index 00000000000..1b289f73d82 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExportBulkResponseListener.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.Objects; + +/** + * {@code HttpExportBulkResponseListener} logs issues based on the response, but otherwise does nothing else. + */ +class HttpExportBulkResponseListener implements ResponseListener { + + private static final Logger logger = Loggers.getLogger(HttpExportBulkResponseListener.class); + + /** + * Singleton instance. + */ + public static final HttpExportBulkResponseListener INSTANCE = new HttpExportBulkResponseListener(XContentType.JSON.xContent()); + + /** + * The response content type. + */ + private final XContent xContent; + + /** + * Create a new {@link HttpExportBulkResponseListener}. + * + * @param xContent The {@code XContent} to use for parsing the response. + */ + HttpExportBulkResponseListener(final XContent xContent) { + this.xContent = Objects.requireNonNull(xContent); + } + + /** + * Success is relative with bulk responses because unless it's rejected outright, it returns with a 200. + *

+ * Individual documents can fail and since we know how we're making them, that means that . + */ + @Override + public void onSuccess(final Response response) { + try (final XContentParser parser = xContent.createParser(response.getEntity().getContent())) { + // avoid parsing the entire payload if we don't need too + XContentParser.Token token = parser.nextToken(); + + if (token == XContentParser.Token.START_OBJECT) { + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if ("errors".equals(currentFieldName)) { + // no errors? then we can stop looking + if (parser.booleanValue() == false) { + return; + } + } + } else if (token == XContentParser.Token.START_ARRAY) { + // note: this assumes that "items" is the only array portion of the response (currently true) + parseErrors(parser); + return; + } + } + } + } catch (IOException | RuntimeException e) { + onError("unexpected exception while verifying bulk response", e); + } + } + + /** + * Logs every error field's value until it hits the end of an array. + * + * @param parser The bulk response parser + * @throws IOException if any parsing error occurs + */ + private void parseErrors(final XContentParser parser) throws IOException { + XContentParser.Token token; + String currentFieldName = null; + + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if ("error".equals(currentFieldName)) { + onItemError(parser.text()); + } + } + } + } + + /** + * Log obvious failures. + *

+ * In the future, we should queue replayable failures. + */ + @Override + public void onFailure(final Exception exception) { + // queueable exceptions: + // - RestStatus.TOO_MANY_REQUESTS.getStatus() + // - possibly other, non-ResponseExceptions + onError("bulk request failed unexpectedly", exception); + } + + void onError(final String msg, final Throwable cause) { + logger.warn(msg, cause); + } + + void onItemError(final String text) { + logger.warn("unexpected error while indexing monitoring document: [{}]", text); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporter.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporter.java index 590aba4eeca..fb9fb7f1f5c 100644 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporter.java +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporter.java @@ -5,93 +5,114 @@ */ package org.elasticsearch.xpack.monitoring.exporter.http; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.SpecialPermission; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; +import org.apache.logging.log4j.Logger; +import org.apache.http.Header; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.message.BasicHeader; import org.elasticsearch.Version; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.client.sniff.ElasticsearchHostsSniffer; +import org.elasticsearch.client.sniff.ElasticsearchHostsSniffer.Scheme; +import org.elasticsearch.client.sniff.Sniffer; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.monitoring.exporter.ExportBulk; -import org.elasticsearch.xpack.monitoring.exporter.ExportException; import org.elasticsearch.xpack.monitoring.exporter.Exporter; -import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; import org.elasticsearch.xpack.monitoring.resolver.MonitoringIndexNameResolver; import org.elasticsearch.xpack.monitoring.resolver.ResolversRegistry; -import org.elasticsearch.xpack.monitoring.support.VersionUtils; import org.elasticsearch.xpack.ssl.SSLService; -import javax.net.ssl.HostnameVerifier; -import javax.net.ssl.HttpsURLConnection; -import javax.net.ssl.SSLSession; -import javax.net.ssl.SSLSocketFactory; -import java.io.ByteArrayOutputStream; +import javax.net.ssl.SSLContext; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.UnsupportedEncodingException; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.ArrayList; -import java.util.Base64; -import java.util.Collection; import java.util.Collections; -import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; +import java.util.function.Supplier; /** - * With the forthcoming addition of the HTTP-based Java Client for ES, we should be able to combine this class with the - * {@code LocalExporter} implementation, with only a few minor differences: - * + * {@code HttpExporter} uses the low-level {@link RestClient} to connect to a user-specified set of nodes for exporting Monitoring + * documents via HTTP or HTTPS. + *

+ * In addition to the set of nodes, it can be configured to use: *

    - *
  • The {@code HttpExporter} needs to support configuring the certificates and authentication parameters.
  • - *
  • Depending on how the REST client is implemented, it may or may not allow us to make some calls in the same way - * (only time will tell; unknown unknowns).
  • + *
  • Certain timeouts (e.g., connection timeouts).
  • + *
  • User authentication.
  • + *
  • Sniffing (automatic detection of other nodes in the cluster to improve round robin behavior).
  • + *
  • Custom headers (e.g., for proxies).
  • + *
  • SSL / TLS.
  • *
*/ public class HttpExporter extends Exporter { + private static final Logger logger = Loggers.getLogger(HttpExporter.class); + public static final String TYPE = "http"; + /** + * A string array representing the Elasticsearch node(s) to communicate with over HTTP(S). + */ public static final String HOST_SETTING = "host"; + /** + * Master timeout associated with bulk requests. + */ + public static final String BULK_TIMEOUT_SETTING = "bulk.timeout"; + /** + * Timeout used for initiating a connection. + */ public static final String CONNECTION_TIMEOUT_SETTING = "connection.timeout"; + /** + * Timeout used for reading from the connection. + */ public static final String CONNECTION_READ_TIMEOUT_SETTING = "connection.read_timeout"; - public static final String CONNECTION_KEEP_ALIVE_SETTING = "connection.keep_alive"; + /** + * Username for basic auth. + */ public static final String AUTH_USERNAME_SETTING = "auth.username"; + /** + * Password for basic auth. + */ public static final String AUTH_PASSWORD_SETTING = "auth.password"; - + /** + * The SSL settings. + * + * @see SSLService + */ + public static final String SSL_SETTING = "ssl"; + /** + * Proxy setting to allow users to send requests to a remote cluster that requires a proxy base path. + */ + public static final String PROXY_BASE_PATH_SETTING = "proxy.base_path"; + /** + * A boolean setting to enable or disable sniffing for extra connections. + */ + public static final String SNIFF_ENABLED_SETTING = "sniff.enabled"; /** * A parent setting to header key/value pairs, whose names are user defined. */ - public static final String HEADERS = "headers"; + public static final String HEADERS_SETTING = "headers"; /** * Blacklist of headers that the user is not allowed to set. *

* Headers are blacklisted if they have the opportunity to break things and we won't be guaranteed to overwrite them. */ public static final Set BLACKLISTED_HEADERS = Collections.unmodifiableSet(Sets.newHashSet("Content-Length", "Content-Type")); - /** * ES level timeout used when checking and writing templates (used to speed up tests) */ @@ -106,800 +127,448 @@ public class HttpExporter extends Exporter { *

* We must have support for ingest pipelines, which requires a minimum of 5.0. */ - public static final Version MIN_SUPPORTED_CLUSTER_VERSION = Version.V_5_0_0_alpha5; - - private static final XContentType CONTENT_TYPE = XContentType.JSON; - - volatile String[] hosts; - final TimeValue connectionTimeout; - final TimeValue connectionReadTimeout; - final BasicAuth auth; + public static final Version MIN_SUPPORTED_CLUSTER_VERSION = Version.V_5_0_0_alpha6; /** - * https support * + * The {@link RestClient} automatically pools connections and keeps them alive as necessary. + */ + private final RestClient client; + /** + * The optional {@link Sniffer} to add hosts to the {@link #client}. */ - final SSLSocketFactory sslSocketFactory; - final boolean hostnameVerification; - - final Environment env; - final ResolversRegistry resolvers; - @Nullable - final TimeValue templateCheckTimeout; - - @Nullable - final TimeValue pipelineCheckTimeout; + private final Sniffer sniffer; + /** + * The parameters (query string variables) to supply with every bulk request. + */ + private final Map defaultParams; /** - * Headers supplied by the user to send (likely to a proxy for routing). + * {@link HttpResource} allow us to wait to send bulk payloads until we have confirmed the remote cluster is ready. */ - @Nullable - private final Map headers; + private final HttpResource resource; - volatile boolean checkedAndUploadedIndexTemplate = false; - volatile boolean checkedAndUploadedIndexPipeline = false; - volatile boolean supportedClusterVersion = false; + private final ResolversRegistry resolvers; - boolean keepAlive; - final ConnectionKeepAliveWorker keepAliveWorker; - Thread keepAliveThread; + /** + * Create an {@link HttpExporter}. + * + * @param config The HTTP Exporter's configuration + * @param sslService The SSL Service used to create the SSL Context necessary for TLS / SSL communication + * @throws SettingsException if any setting is malformed + */ + public HttpExporter(final Config config, final SSLService sslService) { + this(config, sslService, new NodeFailureListener()); + } - public HttpExporter(Config config, Environment env, SSLService sslService) { + /** + * Create an {@link HttpExporter}. + * + * @param config The HTTP Exporter's configuration + * @param sslService The SSL Service used to create the SSL Context necessary for TLS / SSL communication + * @param listener The node failure listener used to notify an optional sniffer and resources + * @throws SettingsException if any setting is malformed + */ + HttpExporter(final Config config, final SSLService sslService, final NodeFailureListener listener) { + this(config, createRestClient(config, sslService, listener), listener); + } + + /** + * Create an {@link HttpExporter}. + * + * @param config The HTTP Exporter's configuration + * @param client The REST Client used to make all requests to the remote Elasticsearch cluster + * @param listener The node failure listener used to notify an optional sniffer and resources + * @throws SettingsException if any setting is malformed + */ + HttpExporter(final Config config, final RestClient client, final NodeFailureListener listener) { + this(config, client, createSniffer(config, client, listener), listener, new ResolversRegistry(config.settings())); + } + + /** + * Create an {@link HttpExporter}. + * + * @param config The HTTP Exporter's configuration + * @param client The REST Client used to make all requests to the remote Elasticsearch cluster + * @param listener The node failure listener used to notify an optional sniffer and resources + * @param resolvers The resolver registry used to load templates and resolvers + * @throws SettingsException if any setting is malformed + */ + HttpExporter(final Config config, final RestClient client, @Nullable final Sniffer sniffer, final NodeFailureListener listener, + final ResolversRegistry resolvers) { + this(config, client, sniffer, listener, resolvers, createResources(config, resolvers)); + } + + /** + * Create an {@link HttpExporter}. + * + * @param config The HTTP Exporter's configuration + * @param client The REST Client used to make all requests to the remote Elasticsearch cluster + * @param sniffer The optional sniffer, which has already been associated with the {@code listener} + * @param listener The node failure listener used to notify resources + * @param resolvers The resolver registry used to load templates and resolvers + * @param resource Blocking HTTP resource to prevent bulks until all requirements are met + * @throws SettingsException if any setting is malformed + */ + HttpExporter(final Config config, final RestClient client, @Nullable final Sniffer sniffer, final NodeFailureListener listener, + final ResolversRegistry resolvers, final HttpResource resource) { super(config); - this.env = env; - this.hosts = resolveHosts(config.settings()); - this.auth = resolveAuth(config.settings()); - // allow the user to configure headers - this.headers = configureHeaders(config.settings()); - this.connectionTimeout = config.settings().getAsTime(CONNECTION_TIMEOUT_SETTING, TimeValue.timeValueMillis(6000)); - this.connectionReadTimeout = config.settings().getAsTime(CONNECTION_READ_TIMEOUT_SETTING, - TimeValue.timeValueMillis(connectionTimeout.millis() * 10)); + this.client = Objects.requireNonNull(client); + this.sniffer = sniffer; + this.resolvers = resolvers; + this.resource = resource; + this.defaultParams = createDefaultParams(config); - templateCheckTimeout = parseTimeValue(TEMPLATE_CHECK_TIMEOUT_SETTING); - pipelineCheckTimeout = parseTimeValue(PIPELINE_CHECK_TIMEOUT_SETTING); + // mark resources as dirty after any node failure + listener.setResource(resource); + } - keepAlive = config.settings().getAsBoolean(CONNECTION_KEEP_ALIVE_SETTING, true); - keepAliveWorker = new ConnectionKeepAliveWorker(); + /** + * Create a {@link RestClientBuilder} from the HTTP Exporter's {@code config}. + * + * @param config The HTTP Exporter's configuration + * @param sslService The SSL Service used to create the SSL Context necessary for TLS / SSL communication + * @param listener The node failure listener used to log node failures + * @return Never {@code null}. + * @throws SettingsException if any required setting is missing or any setting is malformed + */ + static RestClient createRestClient(final Config config, final SSLService sslService, final NodeFailureListener listener) { + final RestClientBuilder builder = RestClient.builder(createHosts(config)).setFailureListener(listener); + final String proxyBasePath = config.settings().get(PROXY_BASE_PATH_SETTING); - final Settings sslSettings = config.settings().getByPrefix("ssl."); - sslSocketFactory = sslService.sslSocketFactory(sslSettings); - hostnameVerification = sslService.getVerificationMode(sslSettings, Settings.EMPTY).isHostnameVerificationEnabled(); - - resolvers = new ResolversRegistry(config.settings()); - // Checks that required templates are loaded - for (MonitoringIndexNameResolver resolver : resolvers) { - if (resolver.template() == null) { - throw new IllegalStateException("unable to find built-in template " + resolver.templateName()); + // allow the user to configure proxies + if (proxyBasePath != null) { + try { + builder.setPathPrefix(proxyBasePath); + } catch (final IllegalArgumentException e) { + throw new SettingsException("[" + settingFQN(config, "proxy.base_path") + "] is malformed [" + proxyBasePath + "]", e); } } - logger.debug("initialized with hosts [{}], index prefix [{}]", - Strings.arrayToCommaDelimitedString(hosts), MonitoringIndexNameResolver.PREFIX); + // allow the user to configure headers that go along with _every_ request + configureHeaders(builder, config); + // commercial X-Pack users can have Security enabled (auth and SSL/TLS), and also clusters behind proxies + configureSecurity(builder, config, sslService); + // timeouts for requests + configureTimeouts(builder, config); + + return builder.build(); } - private String[] resolveHosts(final Settings settings) { - final String[] hosts = settings.getAsArray(HOST_SETTING); + /** + * Create a {@link Sniffer} from the HTTP Exporter's {@code config} for the {@code client}. + * + * @param config The HTTP Exporter's configuration + * @param client The REST client to sniff + * @param listener The node failure listener used to help improve sniffing + * @return Can be {@code null} if the sniffer is disabled. + * @throws IndexOutOfBoundsException if no {@linkplain #HOST_SETTING hosts} are set + */ + static Sniffer createSniffer(final Config config, final RestClient client, final NodeFailureListener listener) { + final Settings settings = config.settings(); + Sniffer sniffer = null; + + // the sniffer is allowed to be ENABLED; it's disabled by default until we think it's ready for use + if (settings.getAsBoolean(SNIFF_ENABLED_SETTING, false)) { + final String[] hosts = config.settings().getAsArray(HOST_SETTING); + // createHosts(config) ensures that all schemes are the same for all hosts! + final Scheme scheme = hosts[0].startsWith("https") ? Scheme.HTTPS : Scheme.HTTP; + final ElasticsearchHostsSniffer hostsSniffer = + new ElasticsearchHostsSniffer(client, ElasticsearchHostsSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT, scheme); + + sniffer = Sniffer.builder(client).setHostsSniffer(hostsSniffer).build(); + + // inform the sniffer whenever there's a node failure + listener.setSniffer(sniffer); + + logger.debug("[" + settingFQN(config) + "] using host sniffing"); + } + + return sniffer; + } + + /** + * Create a {@link MultiHttpResource} that can be used to block bulk exporting until all expected resources are available. + * + * @param config The HTTP Exporter's configuration + * @param resolvers The resolvers that contain all known templates. + * @return Never {@code null}. + */ + static MultiHttpResource createResources(final Config config, final ResolversRegistry resolvers) { + final String resourceOwnerName = settingFQN(config); + // order controls the order that each is checked; more direct checks should always happen first (e.g., version checks) + final List resources = new ArrayList<>(); + + // block the exporter from working against a monitoring cluster with the wrong version + resources.add(new VersionHttpResource(resourceOwnerName, MIN_SUPPORTED_CLUSTER_VERSION)); + // load all templates (template bodies are lazily loaded on demand) + configureTemplateResources(config, resolvers, resourceOwnerName, resources); + // load the pipeline (this will get added to as the monitoring API version increases) + configurePipelineResources(config, resourceOwnerName, resources); + + return new MultiHttpResource(resourceOwnerName, resources); + } + + /** + * Create the {@link HttpHost}s that will be connected too. + * + * @param config The exporter's configuration + * @return Never {@code null} or empty. + * @throws SettingsException if any setting is malformed or if no host is set + */ + private static HttpHost[] createHosts(final Config config) { + final String[] hosts = config.settings().getAsArray(HOST_SETTING); if (hosts.length == 0) { - throw new SettingsException("missing required setting [" + settingFQN(HOST_SETTING) + "]"); + throw new SettingsException("missing required setting [" + settingFQN(config, HOST_SETTING) + "]"); } - for (String host : hosts) { + final List httpHosts = new ArrayList<>(hosts.length); + boolean httpHostFound = false; + boolean httpsHostFound = false; + + // every host must be configured + for (final String host : hosts) { + final HttpHost httpHost; + try { - HttpExporterUtils.parseHostWithPath(host, ""); - } catch (URISyntaxException | MalformedURLException e) { - throw new SettingsException("[" + settingFQN(HOST_SETTING) + "] invalid host: [" + host + "]", e); + httpHost = HttpHostBuilder.builder(host).build(); + } catch (IllegalArgumentException e) { + throw new SettingsException("[" + settingFQN(config, HOST_SETTING) + "] invalid host: [" + host + "]", e); } + + if ("http".equals(httpHost.getSchemeName())) { + httpHostFound = true; + } else { + httpsHostFound = true; + } + + // fail if we find them configuring the scheme/protocol in different ways + if (httpHostFound && httpsHostFound) { + throw new SettingsException( + "[" + settingFQN(config, HOST_SETTING) + "] must use a consistent scheme: http or https"); + } + + httpHosts.add(httpHost); } - return hosts; + if (logger.isDebugEnabled()) { + logger.debug("[{}] using hosts [{}]", settingFQN(config), Strings.arrayToCommaDelimitedString(hosts)); + } + + return httpHosts.toArray(new HttpHost[httpHosts.size()]); } - private Map configureHeaders(final Settings settings) { - final Settings headerSettings = settings.getAsSettings(HEADERS); + /** + * Configures the {@linkplain RestClientBuilder#setDefaultHeaders(Header[]) default headers} to use with all requests. + * + * @param builder The REST client builder to configure + * @param config The exporter's configuration + * @throws SettingsException if any header is {@linkplain #BLACKLISTED_HEADERS blacklisted} + */ + private static void configureHeaders(final RestClientBuilder builder, final Config config) { + final Settings headerSettings = config.settings().getAsSettings(HEADERS_SETTING); final Set names = headerSettings.names(); // Most users won't define headers if (names.isEmpty()) { - return null; + return; } - final Map headers = new HashMap<>(); + final List

headers = new ArrayList<>(); // record and validate each header as best we can for (final String name : names) { if (BLACKLISTED_HEADERS.contains(name)) { - throw new SettingsException("[" + name + "] cannot be overwritten via [" + settingFQN("headers") + "]"); + throw new SettingsException("[" + name + "] cannot be overwritten via [" + settingFQN(config, "headers") + "]"); } final String[] values = headerSettings.getAsArray(name); if (values.length == 0) { - throw new SettingsException("headers must have values, missing for setting [" + settingFQN("headers." + name) + "]"); + final String settingName = settingFQN(config, "headers." + name); + throw new SettingsException("headers must have values, missing for setting [" + settingName + "]"); } - headers.put(name, values); + // add each value as a separate header; they literally appear like: + // + // Warning: abc + // Warning: xyz + for (final String value : values) { + headers.add(new BasicHeader(name, value)); + } } - return Collections.unmodifiableMap(headers); + builder.setDefaultHeaders(headers.toArray(new Header[headers.size()])); } - private TimeValue parseTimeValue(final String setting) { - // HORRIBLE!!! We can't use settings.getAsTime(..) !!! - // WE MUST FIX THIS IN CORE... - // TimeValue SHOULD NOT SELECTIVELY CHOOSE WHAT FIELDS TO PARSE BASED ON THEIR NAMES!!!! - final String checkTimeoutValue = config.settings().get(setting, null); + /** + * Configure the {@link RestClientBuilder} to use {@linkplain CredentialsProvider user authentication} and/or + * {@linkplain SSLContext SSL / TLS}. + * + * @param builder The REST client builder to configure + * @param config The exporter's configuration + * @param sslService The SSL Service used to create the SSL Context necessary for TLS / SSL communication + * @throws SettingsException if any setting causes issues + */ + private static void configureSecurity(final RestClientBuilder builder, final Config config, final SSLService sslService) { + final Settings sslSettings = config.settings().getAsSettings(SSL_SETTING); + final SSLIOSessionStrategy sslStrategy = sslService.sslIOSessionStrategy(sslSettings); + final CredentialsProvider credentialsProvider = createCredentialsProvider(config); - return TimeValue.parseTimeValue(checkTimeoutValue, null, settingFQN(setting)); + // sending credentials in plaintext! + if (credentialsProvider != null && config.settings().getAsArray(HOST_SETTING)[0].startsWith("https") == false) { + logger.warn("[" + settingFQN(config) + "] is not using https, but using user authentication with plaintext username/password!"); + } + + builder.setHttpClientConfigCallback(new SecurityHttpClientConfigCallback(sslStrategy, credentialsProvider)); } - ResolversRegistry getResolvers() { - return resolvers; + /** + * Configure the {@link RestClientBuilder} to use initial connection and socket timeouts. + * + * @param builder The REST client builder to configure + * @param config The exporter's configuration + */ + private static void configureTimeouts(final RestClientBuilder builder, final Config config) { + final Settings settings = config.settings(); + final TimeValue connectTimeout = settings.getAsTime(CONNECTION_TIMEOUT_SETTING, TimeValue.timeValueMillis(6000)); + final TimeValue socketTimeout = settings.getAsTime(CONNECTION_READ_TIMEOUT_SETTING, + TimeValue.timeValueMillis(connectTimeout.millis() * 10)); + + // if the values could ever be null, then we should only set it if they're not null + builder.setRequestConfigCallback(new TimeoutRequestConfigCallback(connectTimeout, socketTimeout)); + } + + /** + * Creates the optional {@link CredentialsProvider} with the username/password to use with all requests for user + * authentication. + * + * @param config The exporter's configuration + * @return {@code null} if username and password not are provided. Otherwise the {@link CredentialsProvider} to use. + * @throws SettingsException if the username is missing, but a password is supplied + */ + @Nullable + private static CredentialsProvider createCredentialsProvider(final Config config) { + final Settings settings = config.settings(); + final String username = settings.get(AUTH_USERNAME_SETTING); + final String password = settings.get(AUTH_PASSWORD_SETTING); + + // username is required for any auth + if (username == null) { + if (password != null) { + throw new SettingsException( + "[" + settingFQN(config, AUTH_PASSWORD_SETTING) + "] without [" + settingFQN(config, AUTH_USERNAME_SETTING) + "]"); + } + // nothing to configure; default situation for most users + return null; + } + + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password)); + + return credentialsProvider; + } + + /** + * Create the default parameters to use with bulk indexing operations. + * + * @param config The exporter's configuration + * @return Never {@code null}. Can be empty. + */ + static Map createDefaultParams(final Config config) { + final Settings settings = config.settings(); + final TimeValue bulkTimeout = settings.getAsTime(BULK_TIMEOUT_SETTING, null); + + final MapBuilder params = new MapBuilder<>(); + + if (bulkTimeout != null) { + params.put("master_timeout", bulkTimeout.toString()); + } + + // allow the use of ingest pipelines to be completely optional + if (settings.getAsBoolean(USE_INGEST_PIPELINE_SETTING, true)) { + params.put("pipeline", EXPORT_PIPELINE_NAME); + } + + // widdle down the response to just what we care to check + params.put("filter_path", "errors,items.*.error"); + + return params.immutableMap(); + } + + /** + * Adds the {@code resources} necessary for checking and publishing monitoring templates. + * + * @param config The HTTP Exporter's configuration + * @param resolvers The resolvers that contain all known templates. + * @param resourceOwnerName The resource owner name to display for any logging messages. + * @param resources The resources to add too. + */ + private static void configureTemplateResources(final Config config, final ResolversRegistry resolvers, final String resourceOwnerName, + final List resources) { + final TimeValue templateTimeout = config.settings().getAsTime(TEMPLATE_CHECK_TIMEOUT_SETTING, null); + final Set templateNames = new HashSet<>(); + + for (final MonitoringIndexNameResolver resolver : resolvers) { + final String templateName = resolver.templateName(); + + // ignore duplicates + if (templateNames.contains(templateName) == false) { + templateNames.add(templateName); + + resources.add(new TemplateHttpResource(resourceOwnerName, templateTimeout, templateName, resolver::template)); + } + } + } + + /** + * Adds the {@code resources} necessary for checking and publishing monitoring pipelines. + * + * @param config The HTTP Exporter's configuration + * @param resourceOwnerName The resource owner name to display for any logging messages. + * @param resources The resources to add too. + */ + private static void configurePipelineResources(final Config config, final String resourceOwnerName, + final List resources) { + final Settings settings = config.settings(); + + // don't require pipelines if we're not using them + if (settings.getAsBoolean(USE_INGEST_PIPELINE_SETTING, true)) { + final TimeValue pipelineTimeout = settings.getAsTime(PIPELINE_CHECK_TIMEOUT_SETTING, null); + // lazily load the pipeline + final Supplier pipeline = () -> BytesReference.toBytes(emptyPipeline(XContentType.JSON).bytes()); + + resources.add(new PipelineHttpResource(resourceOwnerName, pipelineTimeout, EXPORT_PIPELINE_NAME, pipeline)); + } } @Override - public ExportBulk openBulk() { - HttpURLConnection connection = openExportingConnection(); - return connection != null ? new Bulk(connection) : null; + public HttpExportBulk openBulk() { + // block until all resources are verified to exist + if (resource.checkAndPublishIfDirty(client)) { + return new HttpExportBulk(settingFQN(config), client, defaultParams, resolvers); + } + + return null; } @Override public void doClose() { - if (keepAliveThread != null && keepAliveThread.isAlive()) { - keepAliveWorker.closed = true; - keepAliveThread.interrupt(); + try { + if (sniffer != null) { + sniffer.close(); + } + } catch (IOException | RuntimeException e) { + logger.error("an error occurred while closing the internal client sniffer", e); + } finally { try { - keepAliveThread.join(6000); - } catch (InterruptedException e) { - // don't care. + client.close(); + } catch (IOException | RuntimeException e) { + logger.error("an error occurred while closing the internal client", e); } } } - private String buildQueryString() { - StringBuilder queryString = new StringBuilder(); - - if (bulkTimeout != null) { - queryString.append("master_timeout=").append(bulkTimeout); - } - - // allow the use of ingest pipelines to be completely optional - if (config.settings().getAsBoolean(USE_INGEST_PIPELINE_SETTING, true)) { - if (queryString.length() != 0) { - queryString.append('&'); - } - - queryString.append("pipeline=").append(EXPORT_PIPELINE_NAME); - } - - return queryString.length() != 0 ? '?' + queryString.toString() : ""; - } - - private HttpURLConnection openExportingConnection() { - logger.trace("setting up an export connection"); - - final String queryString = buildQueryString(); - HttpURLConnection conn = openAndValidateConnection("POST", "/_bulk" + queryString, CONTENT_TYPE.mediaType()); - if (conn != null && (keepAliveThread == null || !keepAliveThread.isAlive())) { - // start keep alive upon successful connection if not there. - initKeepAliveThread(); - } - return conn; - } - - private void render(MonitoringDoc doc, OutputStream out) throws IOException { - try { - MonitoringIndexNameResolver resolver = resolvers.getResolver(doc); - if (resolver != null) { - String index = resolver.index(doc); - String type = resolver.type(doc); - String id = resolver.id(doc); - - try (XContentBuilder builder = new XContentBuilder(CONTENT_TYPE.xContent(), out)) { - // Builds the bulk action metadata line - builder.startObject(); - builder.startObject("index"); - builder.field("_index", index); - builder.field("_type", type); - if (id != null) { - builder.field("_id", id); - } - builder.endObject(); - builder.endObject(); - } - - // Adds action metadata line bulk separator - out.write(CONTENT_TYPE.xContent().streamSeparator()); - - // Render the monitoring document - BytesRef bytesRef = resolver.source(doc, CONTENT_TYPE).toBytesRef(); - out.write(bytesRef.bytes, bytesRef.offset, bytesRef.length); - - // Adds final bulk separator - out.write(CONTENT_TYPE.xContent().streamSeparator()); - - if (logger.isTraceEnabled()) { - logger.trace("added index request [index={}, type={}, id={}]", index, type, id); - } - } else if (logger.isTraceEnabled()) { - logger.trace("no resolver found for monitoring document [class={}, id={}, version={}]", - doc.getClass().getName(), doc.getMonitoringId(), doc.getMonitoringVersion()); - } - } catch (Exception e) { - logger.warn((Supplier) () -> new ParameterizedMessage("failed to render document [{}], skipping it", doc), e); - } - } - - @SuppressWarnings("unchecked") - private void sendCloseExportingConnection(HttpURLConnection conn) throws IOException { - logger.trace("sending content"); - closeExportingConnection(conn); - if (conn.getResponseCode() != 200) { - logConnectionError("remote target didn't respond with 200 OK", conn); - return; - } - - InputStream inputStream = conn.getInputStream(); - try (XContentParser parser = CONTENT_TYPE.xContent().createParser(inputStream)) { - Map response = parser.map(); - if (response.get("items") != null) { - ArrayList list = (ArrayList) response.get("items"); - for (Object itemObject : list) { - Map actions = (Map) itemObject; - for (String actionKey : actions.keySet()) { - Map action = (Map) actions.get(actionKey); - if (action.get("error") != null) { - logger.error("{} failure (index:[{}] type: [{}]): {}", actionKey, action.get("_index"), action.get("_type"), - action.get("error")); - } - } - } - } - } - } - - private void closeExportingConnection(HttpURLConnection connection) throws IOException { - try (OutputStream os = connection.getOutputStream()) { - logger.debug("closing exporting connection [{}]", connection); - } - } - - /** - * open a connection to any host, validating it has the template installed if needed - * - * @return a url connection to the selected host or null if no current host is available. - */ - private HttpURLConnection openAndValidateConnection(String method, String path, String contentType) { - // allows us to move faulty hosts to the end; the HTTP Client will make this code obsolete - int hostIndex = 0; - try { - for (; hostIndex < hosts.length; hostIndex++) { - String host = hosts[hostIndex]; - if (!supportedClusterVersion) { - try { - Version remoteVersion = loadRemoteClusterVersion(host); - if (remoteVersion == null) { - logger.warn("unable to check remote cluster version: no version found on host [{}]", host); - continue; - } - supportedClusterVersion = remoteVersion.onOrAfter(MIN_SUPPORTED_CLUSTER_VERSION); - if (!supportedClusterVersion) { - logger.error("remote cluster version [{}] is not supported, please use a cluster with minimum version [{}]", - remoteVersion, MIN_SUPPORTED_CLUSTER_VERSION); - continue; - } - } catch (ElasticsearchException e) { - logger.error( - (Supplier) () -> new ParameterizedMessage( - "exception when checking remote cluster version on host [{}]", host), e); - continue; - } - } - - // NOTE: This assumes that the user is configured properly and only sending to a single cluster - if (checkedAndUploadedIndexTemplate == false || checkedAndUploadedIndexPipeline == false) { - checkedAndUploadedIndexTemplate = checkAndUploadIndexTemplate(host); - checkedAndUploadedIndexPipeline = checkedAndUploadedIndexTemplate && checkAndUploadIndexPipeline(host); - - // did we fail? - if (checkedAndUploadedIndexTemplate == false || checkedAndUploadedIndexPipeline == false) { - continue; - } - } - - HttpURLConnection connection = openConnection(host, method, path, contentType); - if (connection != null) { - return connection; - } - // failed hosts - reset template & cluster versions check, someone may have restarted the target cluster and deleted - // it's data folder. be safe. - checkedAndUploadedIndexTemplate = false; - checkedAndUploadedIndexPipeline = false; - supportedClusterVersion = false; - } - } finally { - if (hostIndex > 0 && hostIndex < hosts.length) { - logger.debug("moving [{}] failed hosts to the end of the list", hostIndex); - String[] newHosts = new String[hosts.length]; - System.arraycopy(hosts, hostIndex, newHosts, 0, hosts.length - hostIndex); - System.arraycopy(hosts, 0, newHosts, hosts.length - hostIndex, hostIndex); - hosts = newHosts; - logger.debug("preferred target host is now [{}]", hosts[0]); - } - } - - logger.error("could not connect to any configured elasticsearch instances [{}]", Strings.arrayToCommaDelimitedString(hosts)); - - return null; - } - - /** - * open a connection to the given hosts, returning null when not successful * - */ - private HttpURLConnection openConnection(String host, String method, String path, @Nullable String contentType) { - // the HTTP Client will make this code obsolete - try { - final URL url = HttpExporterUtils.parseHostWithPath(host, path); - final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); - - // Custom Headers must be set before we manually apply headers, so that our headers beat custom ones - if (headers != null) { - // Headers can technically be duplicated, although it's not expected to be used frequently - for (final Map.Entry header : headers.entrySet()) { - for (final String value : header.getValue()) { - conn.addRequestProperty(header.getKey(), value); - } - } - } - - if (conn instanceof HttpsURLConnection && sslSocketFactory != null) { - final HttpsURLConnection httpsConn = (HttpsURLConnection) conn; - final SSLSocketFactory factory = sslSocketFactory; - - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - sm.checkPermission(new SpecialPermission()); - } - AccessController.doPrivileged((PrivilegedAction) () -> { - // Requires permission java.lang.RuntimePermission "setFactory"; - httpsConn.setSSLSocketFactory(factory); - - // Requires permission javax.net.ssl.SSLPermission "setHostnameVerifier"; - if (hostnameVerification == false) { - httpsConn.setHostnameVerifier(TrustAllHostnameVerifier.INSTANCE); - } - return null; - }); - } - - conn.setRequestMethod(method); - conn.setConnectTimeout((int) connectionTimeout.getMillis()); - conn.setReadTimeout((int) connectionReadTimeout.getMillis()); - if (contentType != null) { - conn.setRequestProperty("Content-Type", contentType); - } - if (auth != null) { - auth.apply(conn); - } - conn.setUseCaches(false); - if (method.equalsIgnoreCase("POST") || method.equalsIgnoreCase("PUT")) { - conn.setDoOutput(true); - } - conn.connect(); - - return conn; - } catch (URISyntaxException e) { - logger.error((Supplier) () -> new ParameterizedMessage("error parsing host [{}]", host), e); - } catch (IOException e) { - logger.error((Supplier) () -> new ParameterizedMessage("error connecting to [{}]", host), e); - } - return null; - } - - /** - * Get the version of the remote monitoring cluster - */ - Version loadRemoteClusterVersion(final String host) { - HttpURLConnection connection = null; - try { - connection = openConnection(host, "GET", "/", null); - if (connection == null) { - throw new ElasticsearchException("unable to check remote cluster version: no available connection for host [" + host + "]"); - } - - try (InputStream is = connection.getInputStream()) { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - Streams.copy(is, out); - return VersionUtils.parseVersion(out.toByteArray()); - } - } catch (IOException e) { - throw new ElasticsearchException("failed to verify the remote cluster version on host [" + host + "]", e); - } finally { - if (connection != null) { - try { - connection.getInputStream().close(); - } catch (IOException e) { - // Ignore - } - } - } - } - - /** - * Checks if the index pipeline already exists and, if not, uploads it. - * - * @return {@code true} if the pipeline exists after executing. - * @throws RuntimeException if any error occurs that should prevent indexing - */ - private boolean checkAndUploadIndexPipeline(final String host) { - if (hasPipeline(host) == false) { - logger.debug("monitoring pipeline [{}] not found", EXPORT_PIPELINE_NAME); - - return putPipeline(host); - } else { - logger.trace("monitoring pipeline [{}] found", EXPORT_PIPELINE_NAME); - } - - return true; - } - - private boolean hasPipeline(final String host) { - final String url = urlWithMasterTimeout("_ingest/pipeline/" + EXPORT_PIPELINE_NAME, pipelineCheckTimeout); - - HttpURLConnection connection = null; - try { - logger.trace("checking if monitoring pipeline [{}] exists on the monitoring cluster", EXPORT_PIPELINE_NAME); - connection = openConnection(host, "GET", url, null); - if (connection == null) { - throw new IOException("no available connection to check for monitoring pipeline [" + EXPORT_PIPELINE_NAME + "] existence"); - } - - // 200 means that the template has been found, 404 otherwise - if (connection.getResponseCode() == 200) { - logger.debug("monitoring pipeline [{}] found", EXPORT_PIPELINE_NAME); - return true; - } - } catch (Exception e) { - logger.error( - (Supplier) () -> new ParameterizedMessage( - "failed to verify the monitoring pipeline [{}] on [{}]", EXPORT_PIPELINE_NAME, host), e); - return false; - } finally { - if (connection != null) { - try { - connection.getInputStream().close(); - } catch (IOException e) { - // Ignore - } - } - } - return false; - } - - private boolean putPipeline(final String host) { - logger.trace("installing pipeline [{}]", EXPORT_PIPELINE_NAME); - - HttpURLConnection connection = null; - - try { - connection = openConnection(host, "PUT", "_ingest/pipeline/" + EXPORT_PIPELINE_NAME, XContentType.JSON.mediaType()); - if (connection == null) { - logger.debug("no available connection to upload monitoring pipeline [{}]", EXPORT_PIPELINE_NAME); - return false; - } - - // Uploads the template and closes the outputstream - Streams.copy(BytesReference.toBytes(emptyPipeline(XContentType.JSON).bytes()), connection.getOutputStream()); - if (connection.getResponseCode() != 200 && connection.getResponseCode() != 201) { - logConnectionError("error adding the monitoring pipeline [" + EXPORT_PIPELINE_NAME + "] to [" + host + "]", connection); - return false; - } - - logger.info("monitoring pipeline [{}] set", EXPORT_PIPELINE_NAME); - return true; - } catch (IOException e) { - logger.error( - (Supplier) () -> new ParameterizedMessage( - "failed to update monitoring pipeline [{}] on host [{}]", EXPORT_PIPELINE_NAME, host), e); - return false; - } finally { - if (connection != null) { - try { - connection.getInputStream().close(); - } catch (IOException e) { - // Ignore - } - } - } - } - - /** - * Checks if the index templates already exist and if not uploads it - * - * @return true if template exists after executing. - * @throws RuntimeException if any error occurs that should prevent indexing - */ - private boolean checkAndUploadIndexTemplate(final String host) { - // List of distinct templates - Map templates = StreamSupport.stream(new ResolversRegistry(Settings.EMPTY).spliterator(), false) - .collect(Collectors.toMap(MonitoringIndexNameResolver::templateName, MonitoringIndexNameResolver::template, (a, b) -> a)); - - for (Map.Entry template : templates.entrySet()) { - if (hasTemplate(template.getKey(), host) == false) { - logger.debug("template [{}] not found", template.getKey()); - if (putTemplate(host, template.getKey(), template.getValue()) == false) { - return false; - } - } else { - logger.debug("template [{}] found", template.getKey()); - } - } - return true; - } - - private boolean hasTemplate(String templateName, String host) { - final String url = urlWithMasterTimeout("_template/" + templateName, templateCheckTimeout); - - HttpURLConnection connection = null; - try { - logger.debug("checking if monitoring template [{}] exists on the monitoring cluster", templateName); - connection = openConnection(host, "GET", url, null); - if (connection == null) { - throw new IOException("no available connection to check for monitoring template [" + templateName + "] existence"); - } - - // 200 means that the template has been found, 404 otherwise - if (connection.getResponseCode() == 200) { - logger.debug("monitoring template [{}] found", templateName); - return true; - } - } catch (Exception e) { - logger.error( - (Supplier) () -> new ParameterizedMessage( - "failed to verify the monitoring template [{}] on [{}]", templateName, host), e); - return false; - } finally { - if (connection != null) { - try { - connection.getInputStream().close(); - } catch (IOException e) { - // Ignore - } - } - } - return false; - } - - boolean putTemplate(String host, String template, String source) { - logger.debug("installing template [{}]", template); - HttpURLConnection connection = null; - try { - connection = openConnection(host, "PUT", "_template/" + template, XContentType.JSON.mediaType()); - if (connection == null) { - logger.debug("no available connection to update monitoring template [{}]", template); - return false; - } - - // Uploads the template and closes the outputstream - Streams.copy(source.getBytes(StandardCharsets.UTF_8), connection.getOutputStream()); - if (connection.getResponseCode() != 200 && connection.getResponseCode() != 201) { - logConnectionError("error adding the monitoring template [" + template + "] to [" + host + "]", connection); - return false; - } - - logger.info("monitoring template [{}] updated ", template); - return true; - } catch (IOException e) { - logger.error( - (Supplier) () -> new ParameterizedMessage( - "failed to update monitoring template [{}] on host [{}]", template, host), e); - return false; - } finally { - if (connection != null) { - try { - connection.getInputStream().close(); - } catch (IOException e) { - // Ignore - } - } - } - } - - /** - * Get the {@code url} with the optional {@code masterTimeout}. - *

- * This method assumes that there is no query string applied yet! - * - * @param url The URL being used - * @param masterTimeout The optional master_timeout - * @return Never {@code null} - */ - private String urlWithMasterTimeout(final String url, final TimeValue masterTimeout) { - if (masterTimeout != null) { - return url + "?master_timeout=" + masterTimeout; - } - - return url; - } - - private void logConnectionError(String msg, HttpURLConnection conn) { - InputStream inputStream = conn.getErrorStream(); - String err = ""; - if (inputStream != null) { - java.util.Scanner s = new java.util.Scanner(inputStream, "UTF-8").useDelimiter("\\A"); - err = s.hasNext() ? s.next() : ""; - } - - try { - logger.error("{} response code [{} {}]. content: [{}]", - msg, conn.getResponseCode(), - conn.getResponseMessage(), - err); - } catch (IOException e) { - logger.error("{}. connection had an error while reporting the error. tough life.", msg); - } - } - - protected void initKeepAliveThread() { - if (keepAlive) { - keepAliveThread = new Thread(keepAliveWorker, "monitoring-exporter[" + config.name() + "][keep_alive]"); - keepAliveThread.setDaemon(true); - keepAliveThread.start(); - } - } - - BasicAuth resolveAuth(Settings setting) { - String username = setting.get(AUTH_USERNAME_SETTING, null); - String password = setting.get(AUTH_PASSWORD_SETTING, null); - if (username == null && password == null) { - return null; - } - if (username == null) { - throw new SettingsException("invalid auth setting. missing [" + settingFQN(AUTH_USERNAME_SETTING) + "]"); - } - return new BasicAuth(username, password); - } - - /** - * Trust all hostname verifier. This simply returns true to completely disable hostname verification - */ - static class TrustAllHostnameVerifier implements HostnameVerifier { - static final HostnameVerifier INSTANCE = new TrustAllHostnameVerifier(); - - private TrustAllHostnameVerifier() { - } - - @Override - public boolean verify(String s, SSLSession sslSession) { - return true; - } - } - - /** - * Sadly we need to make sure we keep the connection open to the target ES a - * Java's connection pooling closes connections if idle for 5sec. - */ - class ConnectionKeepAliveWorker implements Runnable { - volatile boolean closed = false; - - @Override - public void run() { - logger.trace("starting keep alive thread"); - while (!closed) { - try { - Thread.sleep(1000); - if (closed) { - return; - } - String[] currentHosts = hosts; - if (currentHosts.length == 0) { - logger.trace("keep alive thread shutting down. no hosts defined"); - return; // no hosts configured at the moment. - } - HttpURLConnection conn = openConnection(currentHosts[0], "GET", "", null); - if (conn == null) { - logger.trace("keep alive thread shutting down. failed to open connection to current host [{}]", currentHosts[0]); - return; - } else { - conn.getInputStream().close(); // close and release to connection pool. - } - } catch (InterruptedException e) { - // ignore, if closed, good.... - } catch (Exception e) { - logger.debug("error in keep alive thread, shutting down (will be restarted after a successful connection has been " + - "made) {}", ExceptionsHelper.detailedMessage(e)); - return; - } - } - } - } - - static class BasicAuth { - - String username; - char[] password; - - public BasicAuth(String username, String password) { - this.username = username; - this.password = password != null ? password.toCharArray() : null; - } - - void apply(HttpURLConnection connection) throws UnsupportedEncodingException { - String userInfo = username + ":" + (password != null ? new String(password) : ""); - String basicAuth = "Basic " + Base64.getEncoder().encodeToString(userInfo.getBytes("ISO-8859-1")); - connection.setRequestProperty("Authorization", basicAuth); - } - } - - class Bulk extends ExportBulk { - - private HttpURLConnection connection; - private OutputStream out; - - public Bulk(HttpURLConnection connection) { - super(name()); - this.connection = connection; - } - - @Override - public void doAdd(Collection docs) throws ExportException { - try { - if ((docs != null) && (!docs.isEmpty())) { - if (connection == null) { - connection = openExportingConnection(); - if (connection == null) { - throw new IllegalStateException("No connection available to export documents"); - } - } - if (out == null) { - out = connection.getOutputStream(); - } - - // We need to use a buffer to render each monitoring document - // because the renderer might close the outputstream (ex: XContentBuilder) - try (BytesStreamOutput buffer = new BytesStreamOutput()) { - for (MonitoringDoc monitoringDoc : docs) { - try { - render(monitoringDoc, buffer); - BytesRef bytesRef = buffer.bytes().toBytesRef(); - // write the result to the connection - out.write(bytesRef.bytes, bytesRef.offset, bytesRef.length); - } finally { - buffer.reset(); - } - } - } - } - } catch (Exception e) { - throw new ExportException("failed to add documents to export bulk [{}]", name); - } - } - - @Override - public void doFlush() throws ExportException { - if (connection != null) { - try { - sendCloseExportingConnection(connection); - } catch (Exception e) { - throw new ExportException("failed to flush export bulk [{}]", e, name); - } finally { - connection = null; - } - } - } - - @Override - protected void doClose() throws ExportException { - if (connection != null) { - try { - closeExportingConnection(connection); - } catch (Exception e) { - throw new ExportException("failed to close export bulk [{}]", e, name); - } finally { - connection = null; - } - } - } - } } diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterUtils.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterUtils.java deleted file mode 100644 index 73054b3bfb7..00000000000 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterUtils.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.monitoring.exporter.http; - -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; - -public class HttpExporterUtils { - - public static URL parseHostWithPath(String host, String path) throws URISyntaxException, MalformedURLException { - - if (!host.contains("://")) { - // prefix with http - host = "http://" + host; - } - if (!host.endsWith("/")) { - // make sure we can safely resolves sub paths and not replace parent folders - host = host + "/"; - } - - URL hostUrl = new URL(host); - - if (hostUrl.getPort() == -1) { - // url has no port, default to 9200 - sadly we need to rebuild.. - StringBuilder newUrl = new StringBuilder(hostUrl.getProtocol() + "://"); - newUrl.append(hostUrl.getHost()).append(":9200").append(hostUrl.toURI().getPath()); - hostUrl = new URL(newUrl.toString()); - - } - return new URL(hostUrl, path); - - } - -} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpHostBuilder.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpHostBuilder.java new file mode 100644 index 00000000000..49825458a77 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpHostBuilder.java @@ -0,0 +1,227 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Objects; + +/** + * {@code HttpHostBuilder} creates an {@link HttpHost} meant to be used with an Elasticsearch cluster. The {@code HttpHostBuilder} uses + * defaults that are most common for Elasticsearch, including an unspecified port defaulting to 9200 and the default scheme + * being http (as opposed to https). + *

+ * The only required detail is the host to connect too, either via hostname or IP address. + *

+ * This enables you to create an {@code HttpHost} directly via a builder mechanism, or indirectly by parsing a URI-like string. For example: + *


+ * HttpHost host1 = HttpHostBuilder.builder("localhost").build();               // http://localhost:9200
+ * HttpHost host2 = HttpHostBuilder.builder("localhost:9200").build();          // http://localhost:9200
+ * HttpHost host4 = HttpHostBuilder.builder("http://localhost:9200").build();   // http://localhost:9200
+ * HttpHost host5 = HttpHostBuilder.builder("https://localhost:9200").build();  // https://localhost:9200
+ * HttpHost host6 = HttpHostBuilder.builder("https://localhost:9200").build();  // https://127.0.0.1:9200 (IPv4 localhost)
+ * HttpHost host7 = HttpHostBuilder.builder("http://10.1.2.3").build();         // http://10.2.3.4:9200
+ * HttpHost host8 = HttpHostBuilder.builder("https://[::1]").build();           // http://[::1]:9200      (IPv6 localhost)
+ * HttpHost host9 = HttpHostBuilder.builder("https://[::1]:9200").build();      // http://[::1]:9200      (IPv6 localhost)
+ * HttpHost host10= HttpHostBuilder.builder("https://sub.domain").build();      // https://sub.domain:9200
+ * 
+ * Note: {@code HttpHost}s are the mechanism that the {@link RestClient} uses to build the base request. If you need to specify proxy + * settings, then use the {@link RestClientBuilder.RequestConfigCallback} to configure the {@code Proxy} settings. + * + * @see #builder(String) + * @see #builder() + */ +public class HttpHostBuilder { + + /** + * The scheme used to connect to Elasticsearch. + */ + private Scheme scheme = Scheme.HTTP; + /** + * The host is the only required portion of the supplied URI when building it. The rest can be defaulted. + */ + private String host = null; + /** + * The port used to connect to Elasticsearch. + *

+ * The default port is 9200 when unset. + */ + private int port = -1; + + /** + * Create an empty {@link HttpHostBuilder}. + *

+ * The expectation is that you then explicitly build the {@link HttpHost} piece-by-piece. + *

+ * For example: + *


+     * HttpHost localhost = HttpHostBuilder.builder().host("localhost").build();                            // http://localhost:9200
+     * HttpHost explicitLocalhost = HttpHostBuilder.builder.().scheme(Scheme.HTTP).host("localhost").port(9200).build();
+     *                                                                                                      // http://localhost:9200
+     * HttpHost secureLocalhost = HttpHostBuilder.builder().scheme(Scheme.HTTPS).host("localhost").build(); // https://localhost:9200
+     * HttpHost differentPort = HttpHostBuilder.builder().host("my_host").port(19200).build();              // https://my_host:19200
+     * HttpHost ipBased = HttpHostBuilder.builder().host("192.168.0.11").port(80).build();                  // https://192.168.0.11:80
+     * 
+ * + * @return Never {@code null}. + */ + public static HttpHostBuilder builder() { + return new HttpHostBuilder(); + } + + /** + * Create an empty {@link HttpHostBuilder}. + *

+ * The expectation is that you then explicitly build the {@link HttpHost} piece-by-piece. + *

+ * For example: + *


+     * HttpHost localhost = HttpHostBuilder.builder("localhost").build();                     // http://localhost:9200
+     * HttpHost explicitLocalhost = HttpHostBuilder.builder("http://localhost:9200").build(); // http://localhost:9200
+     * HttpHost secureLocalhost = HttpHostBuilder.builder("https://localhost").build();       // https://localhost:9200
+     * HttpHost differentPort = HttpHostBuilder.builder("my_host:19200").build();             // http://my_host:19200
+     * HttpHost ipBased = HttpHostBuilder.builder("192.168.0.11:80").build();                 // http://192.168.0.11:80
+     * 
+ * + * @return Never {@code null}. + * @throws NullPointerException if {@code uri} is {@code null}. + * @throws IllegalArgumentException if any issue occurs while parsing the {@code uri}. + */ + public static HttpHostBuilder builder(final String uri) { + return new HttpHostBuilder(uri); + } + + /** + * Create a new {@link HttpHost} from scratch. + */ + HttpHostBuilder() { + // everything is in the default state + } + + /** + * Create a new {@link HttpHost} based on the supplied host. + * + * @param uri The [partial] URI used to build. + * @throws NullPointerException if {@code uri} is {@code null}. + * @throws IllegalArgumentException if any issue occurs while parsing the {@code uri}. + */ + HttpHostBuilder(final String uri) { + Objects.requireNonNull(uri, "uri must not be null"); + + try { + String cleanedUri = uri; + + if (uri.contains("://") == false) { + cleanedUri = "http://" + uri; + } + + final URI parsedUri = new URI(cleanedUri); + + // "localhost:9200" doesn't have a scheme + if (parsedUri.getScheme() != null) { + scheme(Scheme.fromString(parsedUri.getScheme())); + } + + if (parsedUri.getHost() != null) { + host(parsedUri.getHost()); + } else { + // if the host is null, then it means one of two things: we're in a broken state _or_ it had something like underscores + // we want the raw form so that parts of the URI are not decoded + final String host = parsedUri.getRawAuthority(); + + // they explicitly provided the port, which is unparsed when the host is null + if (host.contains(":")) { + final String[] hostPort = host.split(":", 2); + + host(hostPort[0]); + port(Integer.parseInt(hostPort[1])); + } else { + host(host); + } + } + + if (parsedUri.getPort() != -1) { + port(parsedUri.getPort()); + } + + // fail for proxies + if (parsedUri.getRawPath() != null && parsedUri.getRawPath().isEmpty() == false) { + throw new IllegalArgumentException( + "HttpHosts do not use paths [" + parsedUri.getRawPath() + + "]. see setRequestConfigCallback for proxies. value: [" + uri + "]"); + } + } catch (URISyntaxException | IndexOutOfBoundsException | NullPointerException e) { + throw new IllegalArgumentException("error parsing host: [" + uri + "]", e); + } + } + + /** + * Set the scheme (aka protocol) for the {@link HttpHost}. + * + * @param scheme The scheme to use. + * @return Always {@code this}. + * @throws NullPointerException if {@code scheme} is {@code null}. + */ + public HttpHostBuilder scheme(final Scheme scheme) { + this.scheme = Objects.requireNonNull(scheme); + + return this; + } + + /** + * Set the host for the {@link HttpHost}. + *

+ * This does not attempt to parse the {@code host} in any way. + * + * @param host The host to use. + * @return Always {@code this}. + * @throws NullPointerException if {@code host} is {@code null}. + */ + public HttpHostBuilder host(final String host) { + this.host = Objects.requireNonNull(host); + + return this; + } + + /** + * Set the port for the {@link HttpHost}. + *

+ * Specifying the {@code port} as -1 will cause it to be defaulted to 9200 when the {@code HttpHost} is built. + * + * @param port The port to use. + * @return Always {@code this}. + * @throws IllegalArgumentException if the {@code port} is not -1 or [1, 65535]. + */ + public HttpHostBuilder port(final int port) { + // setting a port to 0 makes no sense when you're the client; -1 allows us to use the default when we build + if (port != -1 && (port < 1 || port > 65535)) { + throw new IllegalArgumentException("port must be -1 for the default or [1, 65535]. was: " + port); + } + + this.port = port; + + return this; + } + + /** + * Create a new {@link HttpHost} from the current {@code scheme}, {@code host}, and {@code port}. + * + * @return Never {@code null}. + * @throws IllegalStateException if {@code host} is unset. + */ + public HttpHost build() { + if (host == null) { + throw new IllegalStateException("host must be set"); + } + + return new HttpHost(host, port == -1 ? 9200 : port, scheme.toString()); + } + +} \ No newline at end of file diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResource.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResource.java new file mode 100644 index 00000000000..fc86100dd09 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResource.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.elasticsearch.client.RestClient; + +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; + +/** + * An {@code HttpResource} is some "thing" that needs to exist on the other side. If it does not exist, then follow-on actions cannot + * occur. + *

+ * {@code HttpResource}s can assume that, as long as the connection stays active, then a verified resource should continue to exist on the + * other side. + * + * @see MultiHttpResource + * @see PublishableHttpResource + */ +public abstract class HttpResource { + + /** + * The current state of the {@link HttpResource}. + */ + enum State { + + /** + * The resource is ready to use. + */ + CLEAN, + /** + * The resource is being checked right now to see if it can be used. + */ + CHECKING, + /** + * The resource needs to be checked before it can be used. + */ + DIRTY + } + + /** + * The user-recognizable name for whatever owns this {@link HttpResource}. + */ + protected final String resourceOwnerName; + /** + * The current state of the resource, which helps to determine if it needs to be checked. + */ + protected final AtomicReference state; + + /** + * Create a new {@link HttpResource} that {@linkplain #isDirty() is dirty}. + * + * @param resourceOwnerName The user-recognizable name + */ + protected HttpResource(final String resourceOwnerName) { + this(resourceOwnerName, true); + } + + /** + * Create a new {@link HttpResource} that is {@code dirty}. + * + * @param resourceOwnerName The user-recognizable name + * @param dirty Whether the resource is dirty or not + */ + protected HttpResource(final String resourceOwnerName, final boolean dirty) { + this.resourceOwnerName = Objects.requireNonNull(resourceOwnerName); + this.state = new AtomicReference<>(dirty ? State.DIRTY : State.CLEAN); + } + + /** + * Get the resource owner for this {@link HttpResource}. + * + * @return Never {@code null}. + */ + public String getResourceOwnerName() { + return resourceOwnerName; + } + + /** + * Determine if the resource needs to be checked. + * + * @return {@code true} to indicate that the resource should block follow-on actions that require it. + * @see #checkAndPublish(RestClient) + */ + public boolean isDirty() { + return state.get() != State.CLEAN; + } + + /** + * Mark the resource as {@linkplain #isDirty() dirty}. + */ + public final void markDirty() { + state.compareAndSet(State.CLEAN, State.DIRTY); + } + + /** + * If the resource is currently {@linkplain #isDirty() dirty}, then check and, if necessary, publish this {@link HttpResource}. + *

+ * Expected usage: + *


+     * if (resource.checkAndPublishIfDirty(client)) {
+     *     // use client with resources having been verified
+     * }
+     * 
+ * + * @param client The REST client to make the request(s). + * @return {@code true} if the resource is available for use. {@code false} to stop. + */ + public final boolean checkAndPublishIfDirty(final RestClient client) { + final State state = this.state.get(); + + // get in line and wait until the check passes or fails if it's checking now, or start checking + return state == State.CLEAN || blockUntilCheckAndPublish(client); + } + + /** + * Invoked by {@link #checkAndPublishIfDirty(RestClient)} to block incase {@link #checkAndPublish(RestClient)} is in the middle of + * {@linkplain State#CHECKING checking}. + *

+ * Unlike {@link #isDirty()} and {@link #checkAndPublishIfDirty(RestClient)}, this is {@code synchronized} in order to prevent + * double-execution and it invokes {@link #checkAndPublish(RestClient)} if it's {@linkplain State#DIRTY dirty}. + * + * @param client The REST client to make the request(s). + * @return {@code true} if the resource is available for use. {@code false} to stop. + */ + private synchronized boolean blockUntilCheckAndPublish(final RestClient client) { + final State state = this.state.get(); + + return state == State.CLEAN || (state == State.DIRTY && checkAndPublish(client)); + } + + /** + * Check and, if necessary, publish this {@link HttpResource}. + *

+ * This will perform the check regardless of the {@linkplain #isDirty() dirtiness} and it will update the dirtiness. + * Using this directly can be useful if there is ever a need to double-check dirtiness without having to {@linkplain #markDirty() mark} + * it as dirty. + * + * @param client The REST client to make the request(s). + * @return {@code true} if the resource is available for use. {@code false} to stop. + * @see #isDirty() + */ + public final synchronized boolean checkAndPublish(final RestClient client) { + // we always check when asked, regardless of clean or dirty + state.set(State.CHECKING); + + boolean success = false; + + try { + success = doCheckAndPublish(client); + } finally { + // nothing else should be unsetting from CHECKING + assert state.get() == State.CHECKING; + + state.set(success ? State.CLEAN : State.DIRTY); + } + + return success; + } + + /** + * Perform whatever is necessary to check and publish this {@link HttpResource}. + * + * @param client The REST client to make the request(s). + * @return {@code true} if the resource is available for use. {@code false} to stop. + */ + protected abstract boolean doCheckAndPublish(final RestClient client); + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/MultiHttpResource.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/MultiHttpResource.java new file mode 100644 index 00000000000..15747a6f5db --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/MultiHttpResource.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.logging.Loggers; + +import java.util.Collections; +import java.util.List; + +/** + * {@code MultiHttpResource} serves as a wrapper of a {@link List} of {@link HttpResource}s. + *

+ * By telling the {@code MultiHttpResource} to become dirty, it effectively marks all of its sub-resources dirty as well. + *

+ * Sub-resources should be the sole responsibility of the the {@code MultiHttpResource}; there should not be something using them directly + * if they are included in a {@code MultiHttpResource}. + */ +public class MultiHttpResource extends HttpResource { + + private static final Logger logger = Loggers.getLogger(MultiHttpResource.class); + + /** + * Sub-resources that are grouped to simplify notification. + */ + private final List resources; + + /** + * Create a {@link MultiHttpResource}. + * + * @param resourceOwnerName The user-recognizable name. + * @param resources The sub-resources to aggregate. + */ + public MultiHttpResource(final String resourceOwnerName, final List resources) { + super(resourceOwnerName); + + this.resources = Collections.unmodifiableList(resources); + } + + /** + * Get the resources that are checked by this {@link MultiHttpResource}. + * + * @return Never {@code null}. + */ + public List getResources() { + return resources; + } + + /** + * Check and publish all {@linkplain #resources sub-resources}. + */ + @Override + protected boolean doCheckAndPublish(RestClient client) { + logger.trace("checking sub-resources existence and publishing on the [{}]", resourceOwnerName); + + boolean exists = true; + + // short-circuits on the first failure, thus marking the whole thing dirty + for (final HttpResource resource : resources) { + if (resource.checkAndPublish(client) == false) { + exists = false; + break; + } + } + + logger.trace("all sub-resources exist [{}] on the [{}]", exists, resourceOwnerName); + + return exists; + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListener.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListener.java new file mode 100644 index 00000000000..6590232fda1 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListener.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpHost; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.sniff.Sniffer; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.logging.Loggers; + +/** + * {@code NodeFailureListener} logs warnings for any node failure, but it can also notify a {@link Sniffer} and/or {@link HttpResource} + * upon failures as well. + *

+ * The {@linkplain #setSniffer(Sniffer) sniffer} and {@linkplain #setResource(HttpResource) resource} are expected to be set immediately + * or not at all. + */ +class NodeFailureListener extends RestClient.FailureListener { + + private static final Logger logger = Loggers.getLogger(NodeFailureListener.class); + + /** + * The optional {@link Sniffer} associated with the {@link RestClient}. + */ + @Nullable + private SetOnce sniffer = new SetOnce<>(); + /** + * The optional {@link HttpResource} associated with the {@link RestClient}. + */ + @Nullable + private SetOnce resource = new SetOnce<>(); + + /** + * Get the {@link Sniffer} that is notified upon node failure. + * + * @return Can be {@code null}. + */ + @Nullable + public Sniffer getSniffer() { + return sniffer.get(); + } + + /** + * Set the {@link Sniffer} that is notified upon node failure. + * + * @param sniffer The sniffer to notify + * @throws SetOnce.AlreadySetException if called more than once + */ + public void setSniffer(@Nullable final Sniffer sniffer) { + this.sniffer.set(sniffer); + } + + /** + * Get the {@link HttpResource} that is notified upon node failure. + * + * @return Can be {@code null}. + */ + @Nullable + public HttpResource getResource() { + return resource.get(); + } + + /** + * Set the {@link HttpResource} that is notified upon node failure. + * + * @param resource The resource to notify + * @throws SetOnce.AlreadySetException if called more than once + */ + public void setResource(@Nullable final HttpResource resource) { + this.resource.set(resource); + } + + @Override + public void onFailure(final HttpHost host) { + logger.warn("connection failed to node at [{}://{}:{}]", host.getSchemeName(), host.getHostName(), host.getPort()); + + final HttpResource resource = this.resource.get(); + final Sniffer sniffer = this.sniffer.get(); + + if (resource != null) { + resource.markDirty(); + } + if (sniffer != null) { + sniffer.sniffOnFailure(host); + } + } + +} \ No newline at end of file diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResource.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResource.java new file mode 100644 index 00000000000..dde2dad6bd1 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResource.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.unit.TimeValue; + +import java.util.Objects; +import java.util.function.Supplier; + +/** + * {@code PipelineHttpResource}s allow the checking and uploading of ingest pipelines to a remote cluster. + *

+ * In the future, we will need to also support the transformation or replacement of pipelines based on their version, but we do not need + * that functionality until some breaking change in the Monitoring API requires it. + */ +public class PipelineHttpResource extends PublishableHttpResource { + + private static final Logger logger = Loggers.getLogger(PipelineHttpResource.class); + + /** + * The name of the pipeline that is sent to the remote cluster. + */ + private final String pipelineName; + /** + * Provides a fully formed template (e.g., no variables that need replaced). + */ + private final Supplier pipeline; + + /** + * Create a new {@link PipelineHttpResource}. + * + * @param resourceOwnerName The user-recognizable name + * @param masterTimeout Master timeout to use with any request. + * @param pipelineName The name of the template (e.g., ".pipeline123"). + * @param pipeline The pipeline provider. + */ + public PipelineHttpResource(final String resourceOwnerName, @Nullable final TimeValue masterTimeout, + final String pipelineName, final Supplier pipeline) { + super(resourceOwnerName, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS); + + this.pipelineName = Objects.requireNonNull(pipelineName); + this.pipeline = Objects.requireNonNull(pipeline); + } + + /** + * Determine if the current {@linkplain #pipelineName pipeline} exists. + */ + @Override + protected CheckResponse doCheck(final RestClient client) { + return checkForResource(client, logger, + "/_ingest/pipeline", pipelineName, "monitoring pipeline", + resourceOwnerName, "monitoring cluster"); + } + + /** + * Publish the current {@linkplain #pipelineName pipeline}. + */ + @Override + protected boolean doPublish(final RestClient client) { + return putResource(client, logger, + "/_ingest/pipeline", pipelineName, this::pipelineToHttpEntity, "monitoring pipeline", + resourceOwnerName, "monitoring cluster"); + } + + /** + * Create a {@link HttpEntity} for the {@link #pipeline}. + * + * @return Never {@code null}. + */ + HttpEntity pipelineToHttpEntity() { + return new ByteArrayEntity(pipeline.get(), ContentType.APPLICATION_JSON); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java new file mode 100644 index 00000000000..1479e9fcdb1 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java @@ -0,0 +1,257 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * {@code PublishableHttpResource} represents an {@link HttpResource} that is a single file or object that can be checked and + * published in the event that the check does not pass. + * + * @see #doCheck(RestClient) + * @see #doPublish(RestClient) + */ +public abstract class PublishableHttpResource extends HttpResource { + + /** + * {@code CheckResponse} provides a ternary state for {@link #doCheck(RestClient)}. + */ + public enum CheckResponse { + + /** + * The check found the resource, so nothing needs to be published. + */ + EXISTS, + /** + * The check did not find the resource, so we need to attempt to publish it. + */ + DOES_NOT_EXIST, + /** + * The check hit an unexpected exception that should block publishing attempts until it can check again. + */ + ERROR + + } + + /** + * A value that will never match anything in the JSON response body, thus limiting it to "{}". + */ + public static final String FILTER_PATH_NONE = "$NONE"; + + /** + * Use this to avoid getting any JSON response from a request. + */ + public static final Map NO_BODY_PARAMETERS = Collections.singletonMap("filter_path", FILTER_PATH_NONE); + + /** + * The default parameters to use for any request. + */ + protected final Map parameters; + + /** + * Create a new {@link PublishableHttpResource} that {@linkplain #isDirty() is dirty}. + * + * @param resourceOwnerName The user-recognizable name. + * @param masterTimeout Master timeout to use with any request. + * @param baseParameters The base parameters to specify for the request. + */ + protected PublishableHttpResource(final String resourceOwnerName, @Nullable final TimeValue masterTimeout, + final Map baseParameters) { + this(resourceOwnerName, masterTimeout, baseParameters, true); + } + + /** + * Create a new {@link PublishableHttpResource}. + * + * @param resourceOwnerName The user-recognizable name. + * @param masterTimeout Master timeout to use with any request. + * @param baseParameters The base parameters to specify for the request. + * @param dirty Whether the resource is dirty or not + */ + protected PublishableHttpResource(final String resourceOwnerName, @Nullable final TimeValue masterTimeout, + final Map baseParameters, final boolean dirty) { + super(resourceOwnerName, dirty); + + if (masterTimeout != null) { + final Map parameters = new HashMap<>(baseParameters.size() + 1); + + parameters.putAll(baseParameters); + parameters.put("master_timeout", masterTimeout.toString()); + + this.parameters = Collections.unmodifiableMap(parameters); + } else { + this.parameters = baseParameters; + } + } + + /** + * Get the default parameters to use with every request. + * + * @return Never {@code null}. + */ + public Map getParameters() { + return parameters; + } + + /** + * Perform whatever is necessary to check and publish this {@link PublishableHttpResource}. + * + * @param client The REST client to make the request(s). + * @return {@code true} if the resource is available for use. {@code false} to stop. + */ + @Override + protected final boolean doCheckAndPublish(final RestClient client) { + final CheckResponse check = doCheck(client); + + // errors cause a dead-stop + return check != CheckResponse.ERROR && (check == CheckResponse.EXISTS || doPublish(client)); + } + + /** + * Determine if the current resource exists. + *

    + *
  • + * {@link CheckResponse#EXISTS EXISTS} will not run {@link #doPublish(RestClient)} and mark this as not dirty. + *
  • + *
  • + * {@link CheckResponse#DOES_NOT_EXIST DOES_NOT_EXIST} will run {@link #doPublish(RestClient)}, which determines the dirtiness. + *
  • + *
  • {@link CheckResponse#ERROR ERROR} will not run {@link #doPublish(RestClient)} and mark this as dirty.
  • + *
+ * + * @param client The REST client to make the request(s). + * @return Never {@code null}. + */ + protected abstract CheckResponse doCheck(final RestClient client); + + /** + * Determine if the current {@code resourceName} exists at the {@code resourceBasePath} endpoint. + *

+ * This provides the base-level check for any resource that does not need to inspect its actual contents. + * + * @param client The REST client to make the request(s). + * @param logger The logger to use for status messages. + * @param resourceBasePath The base path/endpoint to check for the resource (e.g., "/_template"). + * @param resourceName The name of the resource (e.g., "template123"). + * @param resourceType The type of resource (e.g., "monitoring template"). + * @param resourceOwnerName The user-recognizeable resource owner. + * @param resourceOwnerType The type of resource owner being dealt with (e.g., "monitoring cluster"). + * @return Never {@code null}. + */ + protected CheckResponse checkForResource(final RestClient client, final Logger logger, + final String resourceBasePath, + final String resourceName, final String resourceType, + final String resourceOwnerName, final String resourceOwnerType) { + logger.trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType); + + try { + final Response response = client.performRequest("GET", resourceBasePath + "/" + resourceName, parameters); + + // we don't currently check for the content because we always expect it to be the same; + // if we ever make a BWC change to any template (thus without renaming it), then we need to check the content! + if (response.getStatusLine().getStatusCode() == RestStatus.OK.getStatus()) { + logger.debug("{} [{}] found on the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType); + + return CheckResponse.EXISTS; + } else { + throw new ResponseException(response); + } + } catch (final ResponseException e) { + final int statusCode = e.getResponse().getStatusLine().getStatusCode(); + + // 404 + if (statusCode == RestStatus.NOT_FOUND.getStatus()) { + logger.debug("{} [{}] does not exist on the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType); + + return CheckResponse.DOES_NOT_EXIST; + } else { + logger.error((Supplier) () -> + new ParameterizedMessage("failed to verify {} [{}] on the [{}] {} with status code [{}]", + resourceType, resourceName, resourceOwnerName, resourceOwnerType, statusCode), + e); + + // weirder failure than below; block responses just like other unexpected failures + return CheckResponse.ERROR; + } + } catch (IOException | RuntimeException e) { + logger.error((Supplier) () -> + new ParameterizedMessage("failed to verify {} [{}] on the [{}] {}", + resourceType, resourceName, resourceOwnerName, resourceOwnerType), + e); + + // do not attempt to publish the resource because we're in a broken state + return CheckResponse.ERROR; + } + } + + /** + * Publish the current resource. + *

+ * This is only invoked if {@linkplain #doCheck(RestClient) the check} fails. + * + * @param client The REST client to make the request(s). + * @return {@code true} if it exists. + */ + protected abstract boolean doPublish(final RestClient client); + + /** + * Upload the {@code resourceName} to the {@code resourceBasePath} endpoint. + * + * @param client The REST client to make the request(s). + * @param logger The logger to use for status messages. + * @param resourceBasePath The base path/endpoint to check for the resource (e.g., "/_template"). + * @param resourceName The name of the resource (e.g., "template123"). + * @param body The {@link HttpEntity} that makes up the body of the request. + * @param resourceType The type of resource (e.g., "monitoring template"). + * @param resourceOwnerName The user-recognizeable resource owner. + * @param resourceOwnerType The type of resource owner being dealt with (e.g., "monitoring cluster"). + */ + protected boolean putResource(final RestClient client, final Logger logger, + final String resourceBasePath, + final String resourceName, final java.util.function.Supplier body, + final String resourceType, + final String resourceOwnerName, final String resourceOwnerType) { + logger.trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType); + + boolean success = false; + + try { + final Response response = client.performRequest("PUT", resourceBasePath + "/" + resourceName, parameters, body.get()); + final int statusCode = response.getStatusLine().getStatusCode(); + + // 200 or 201 + if (statusCode == RestStatus.OK.getStatus() || statusCode == RestStatus.CREATED.getStatus()) { + logger.debug("{} [{}] uploaded to the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType); + + success = true; + } else { + throw new RuntimeException("[" + resourceBasePath + "/" + resourceName + "] responded with [" + statusCode + "]"); + } + } catch (IOException | RuntimeException e) { + logger.error((Supplier) () -> + new ParameterizedMessage("failed to upload {} [{}] on the [{}] {}", + resourceType, resourceName, resourceOwnerName, resourceOwnerType), + e); + } + + return success; + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/Scheme.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/Scheme.java new file mode 100644 index 00000000000..e7bcdf39214 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/Scheme.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.elasticsearch.client.RestClient; + +import java.util.Locale; + +/** + * {@code Scheme} provides the list of supported {@code URI} schemes (aka protocols) for working with Elasticsearch via the + * {@link RestClient}. + * + * @see HttpHostBuilder + */ +public enum Scheme { + + /** + * HTTP is the default {@linkplain Scheme scheme} used by Elasticsearch. + */ + HTTP("http"), + /** + * HTTPS is the secure form of {@linkplain #HTTP http}, which requires that Elasticsearch be using X-Pack Security with TLS/SSL or + * a similar securing mechanism. + */ + HTTPS("https"); + + private final String scheme; + + Scheme(final String scheme) { + this.scheme = scheme; + } + + @Override + public String toString() { + return scheme; + } + + /** + * Determine the {@link Scheme} from the {@code scheme}. + *


+     * Scheme http = Scheme.fromString("http");
+     * Scheme https = Scheme.fromString("https");
+     * Scheme httpsCaps = Scheme.fromString("HTTPS"); // same as https
+     * 
+ * + * @param scheme The scheme to check. + * @return Never {@code null}. + * @throws NullPointerException if {@code scheme} is {@code null}. + * @throws IllegalArgumentException if the {@code scheme} is not supported. + */ + public static Scheme fromString(final String scheme) { + switch (scheme.toLowerCase(Locale.ROOT)) { + case "http": + return HTTP; + case "https": + return HTTPS; + } + + throw new IllegalArgumentException("unsupported scheme: [" + scheme + "]"); + } + +} \ No newline at end of file diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/SecurityHttpClientConfigCallback.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/SecurityHttpClientConfigCallback.java new file mode 100644 index 00000000000..6ff613d33b7 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/SecurityHttpClientConfigCallback.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.client.CredentialsProvider; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.Nullable; + +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.SSLContext; +import java.util.Objects; + +/** + * {@code SecurityHttpClientConfigCallback} configures a {@link RestClient} for user authentication and SSL / TLS. + */ +class SecurityHttpClientConfigCallback implements RestClientBuilder.HttpClientConfigCallback { + + /** + * The optional {@link CredentialsProvider} for all requests to enable user authentication. + */ + @Nullable + private final CredentialsProvider credentialsProvider; + /** + * The {@link SSLIOSessionStrategy} for all requests to enable SSL / TLS encryption. + */ + private final SSLIOSessionStrategy sslStrategy; + + /** + * Create a new {@link SecurityHttpClientConfigCallback}. + * + * @param credentialsProvider The credential provider, if a username/password have been supplied + * @param sslStrategy The SSL strategy, if SSL / TLS have been supplied + * @throws NullPointerException if {@code sslStrategy} is {@code null} + */ + SecurityHttpClientConfigCallback(final SSLIOSessionStrategy sslStrategy, + @Nullable final CredentialsProvider credentialsProvider) { + this.sslStrategy = Objects.requireNonNull(sslStrategy); + this.credentialsProvider = credentialsProvider; + } + + /** + * Get the {@link CredentialsProvider} that will be added to the HTTP client. + * + * @return Can be {@code null}. + */ + @Nullable + CredentialsProvider getCredentialsProvider() { + return credentialsProvider; + } + + /** + * Get the {@link SSLIOSessionStrategy} that will be added to the HTTP client. + * + * @return Never {@code null}. + */ + SSLIOSessionStrategy getSSLStrategy() { + return sslStrategy; + } + + /** + * Sets the {@linkplain HttpAsyncClientBuilder#setDefaultCredentialsProvider(CredentialsProvider) credential provider}, + * {@linkplain HttpAsyncClientBuilder#setSSLContext(SSLContext) SSL context}, and + * {@linkplain HttpAsyncClientBuilder#setSSLHostnameVerifier(HostnameVerifier) SSL Hostname Verifier}. + * + * @param httpClientBuilder The client to configure. + * @return Always {@code httpClientBuilder}. + */ + @Override + public HttpAsyncClientBuilder customizeHttpClient(final HttpAsyncClientBuilder httpClientBuilder) { + // enable SSL / TLS + httpClientBuilder.setSSLStrategy(sslStrategy); + + // enable user authentication + if (credentialsProvider != null) { + httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + } + + return httpClientBuilder; + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResource.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResource.java new file mode 100644 index 00000000000..db0c72754b2 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResource.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.unit.TimeValue; + +import java.util.Objects; +import java.util.function.Supplier; + +/** + * {@code TemplateHttpResource}s allow the checking and uploading of templates to a remote cluster. + *

+ * There is currently no need to check the response body of the template for consistency, but if we ever make a backwards-compatible change + * that requires the template to be replaced, then we will need to check for something in the body in order to see if we need to + * replace the existing template(s). + */ +public class TemplateHttpResource extends PublishableHttpResource { + + private static final Logger logger = Loggers.getLogger(TemplateHttpResource.class); + + /** + * The name of the template that is sent to the remote cluster. + */ + private final String templateName; + /** + * Provides a fully formed template (e.g., no variables that need replaced). + */ + private final Supplier template; + + /** + * Create a new {@link TemplateHttpResource}. + * + * @param resourceOwnerName The user-recognizable name. + * @param masterTimeout Master timeout to use with any request. + * @param templateName The name of the template (e.g., ".template123"). + * @param template The template provider. + */ + public TemplateHttpResource(final String resourceOwnerName, @Nullable final TimeValue masterTimeout, + final String templateName, final Supplier template) { + super(resourceOwnerName, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS); + + this.templateName = Objects.requireNonNull(templateName); + this.template = Objects.requireNonNull(template); + } + + /** + * Determine if the current {@linkplain #templateName template} exists. + */ + @Override + protected CheckResponse doCheck(final RestClient client) { + return checkForResource(client, logger, + "/_template", templateName, "monitoring template", + resourceOwnerName, "monitoring cluster"); + } + + /** + * Publish the missing {@linkplain #templateName template}. + */ + @Override + protected boolean doPublish(final RestClient client) { + return putResource(client, logger, + "/_template", templateName, this::templateToHttpEntity, "monitoring template", + resourceOwnerName, "monitoring cluster"); + } + + /** + * Create a {@link HttpEntity} for the {@link #template}. + * + * @return Never {@code null}. + */ + HttpEntity templateToHttpEntity() { + return new StringEntity(template.get(), ContentType.APPLICATION_JSON); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TimeoutRequestConfigCallback.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TimeoutRequestConfigCallback.java new file mode 100644 index 00000000000..6b60d92f95f --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TimeoutRequestConfigCallback.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.client.config.RequestConfig.Builder; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.unit.TimeValue; + +/** + * {@code TimeoutRequestConfigCallback} enables the setting of connection-related timeouts for HTTP requests. + */ +class TimeoutRequestConfigCallback implements RestClientBuilder.RequestConfigCallback { + + @Nullable + private final TimeValue connectTimeout; + @Nullable + private final TimeValue socketTimeout; + + /** + * Create a new {@link TimeoutRequestConfigCallback}. + * + * @param connectTimeout The initial connection timeout, if any is supplied + * @param socketTimeout The socket timeout, if any is supplied + */ + TimeoutRequestConfigCallback(@Nullable final TimeValue connectTimeout, @Nullable final TimeValue socketTimeout) { + assert connectTimeout != null || socketTimeout != null : "pointless to use with defaults"; + + this.connectTimeout = connectTimeout; + this.socketTimeout = socketTimeout; + } + + /** + * Get the initial connection timeout. + * + * @return Can be {@code null} for default (1 second). + */ + @Nullable + TimeValue getConnectTimeout() { + return connectTimeout; + } + + /** + * Get the socket timeout. + * + * @return Can be {@code null} for default (10 seconds). + */ + @Nullable + TimeValue getSocketTimeout() { + return socketTimeout; + } + + /** + * Sets the {@linkplain Builder#setConnectTimeout(int) connect timeout} and {@linkplain Builder#setSocketTimeout(int) socket timeout}. + * + * @param requestConfigBuilder The request to configure. + * @return Always {@code requestConfigBuilder}. + */ + @Override + public Builder customizeRequestConfig(Builder requestConfigBuilder) { + if (connectTimeout != null) { + requestConfigBuilder.setConnectTimeout((int)connectTimeout.millis()); + } + if (socketTimeout != null) { + requestConfigBuilder.setSocketTimeout((int)socketTimeout.millis()); + } + + return requestConfigBuilder; + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResource.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResource.java new file mode 100644 index 00000000000..ea1eb857573 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResource.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.Version; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; + +/** + * {@code VersionHttpResource} verifies that the returned {@link Version} of Elasticsearch is at least the specified minimum version. + */ +public class VersionHttpResource extends HttpResource { + + private static final Logger logger = Loggers.getLogger(VersionHttpResource.class); + + /** + * The parameters to pass with every version request to limit the output to just the version number. + */ + public static final Map PARAMETERS = Collections.singletonMap("filter_path", "version.number"); + + /** + * The minimum supported version of Elasticsearch. + */ + private final Version minimumVersion; + + /** + * Create a new {@link VersionHttpResource}. + * + * @param resourceOwnerName The user-recognizable name. + * @param minimumVersion The minimum supported version of Elasticsearch. + */ + public VersionHttpResource(final String resourceOwnerName, final Version minimumVersion) { + super(resourceOwnerName); + + this.minimumVersion = Objects.requireNonNull(minimumVersion); + } + + /** + * Verify that the minimum {@link Version} is supported on the remote cluster. + *

+ * If it does not, then there is nothing that can be done except wait until it does. There is no publishing aspect to this operation. + */ + @Override + protected boolean doCheckAndPublish(final RestClient client) { + logger.trace("checking [{}] to ensure that it supports the minimum version [{}]", resourceOwnerName, minimumVersion); + + try { + return validateVersion(client.performRequest("GET", "/", PARAMETERS)); + } catch (IOException | RuntimeException e) { + logger.error( + (Supplier)() -> + new ParameterizedMessage("failed to verify minimum version [{}] on the [{}] monitoring cluster", + minimumVersion, resourceOwnerName), + e); + } + + return false; + } + + /** + * Ensure that the {@code response} contains a {@link Version} that is {@linkplain Version#onOrAfter(Version) on or after} the + * {@link #minimumVersion}. + * + * @param response The response to parse. + * @return {@code true} if the remote cluster is running a supported version. + * @throws NullPointerException if the response is malformed. + * @throws ClassCastException if the response is malformed. + * @throws IOException if any parsing issue occurs. + */ + private boolean validateVersion(final Response response) throws IOException { + boolean supported = false; + + try (final XContentParser parser = XContentType.JSON.xContent().createParser(response.getEntity().getContent())) { + // the response should be filtered to just '{"version":{"number":"xyz"}}', so this is cheap and guaranteed + @SuppressWarnings("unchecked") + final String versionNumber = (String)((Map)parser.map().get("version")).get("number"); + final Version version = Version.fromString(versionNumber); + + if (version.onOrAfter(minimumVersion)) { + logger.debug("version [{}] >= [{}] and supported for [{}]", version, minimumVersion, resourceOwnerName); + + supported = true; + } else { + logger.error("version [{}] < [{}] and NOT supported for [{}]", version, minimumVersion, resourceOwnerName); + } + } + + return supported; + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index aa5300d65ff..8f6e0b6fa60 100644 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.monitoring.exporter.local; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + +import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; @@ -23,6 +25,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -54,6 +57,8 @@ import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; */ public class LocalExporter extends Exporter implements ClusterStateListener, CleanerService.Listener { + private static final Logger logger = Loggers.getLogger(LocalExporter.class); + public static final String TYPE = "local"; private final InternalClient client; @@ -104,7 +109,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle @Override public void doClose() { if (state.getAndSet(State.TERMINATED) != State.TERMINATED) { - logger.debug("stopped"); + logger.trace("stopped"); clusterService.remove(this); cleanerService.remove(this); } diff --git a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoResolver.java b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoResolver.java index 20e10c8940b..93613edaf82 100644 --- a/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoResolver.java +++ b/elasticsearch/x-pack/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoResolver.java @@ -11,11 +11,13 @@ import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.License; +import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterInfoMonitoringDoc; import org.elasticsearch.xpack.monitoring.resolver.MonitoringIndexNameResolver; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.List; import java.util.Map; public class ClusterInfoResolver extends MonitoringIndexNameResolver.Data { @@ -34,27 +36,38 @@ public class ClusterInfoResolver extends MonitoringIndexNameResolver.Data extraParams = new MapBuilder() .put(License.REST_VIEW_MODE, "true") .map(); params = new ToXContent.DelegatingMapParams(extraParams, params); license.toInnerXContent(builder, params); - builder.field(Fields.HKEY, hash(license, document.getClusterUUID())); + builder.field("hkey", hash(license, document.getClusterUUID())); builder.endObject(); } - builder.startObject(Fields.CLUSTER_STATS); - ClusterStatsResponse clusterStats = document.getClusterStats(); + final ClusterStatsResponse clusterStats = document.getClusterStats(); if (clusterStats != null) { + builder.startObject("cluster_stats"); clusterStats.toXContent(builder, params); + builder.endObject(); + } + + final List usages = document.getUsage(); + if (usages != null) { + // in the future we may choose to add other usages under the stack_stats section, but it is only xpack for now + // it may also be combined on the UI side of phone-home to add things like "kibana" and "logstash" under "stack_stats" + builder.startObject("stack_stats").startObject("xpack"); + for (final XPackFeatureSet.Usage usage : usages) { + builder.field(usage.name(), usage); + } + builder.endObject().endObject(); } - builder.endObject(); } public static String hash(License license, String clusterName) { @@ -66,15 +79,4 @@ public class ClusterInfoResolver extends MonitoringIndexNameResolver.Data templates does not exist: it should have been created in the current version"); - for (String template : monitoringTemplates().keySet()) { + for (String template : monitoringTemplateNames()) { assertTemplateExists(template); } assertPipelineExists(Exporter.EXPORT_PIPELINE_NAME); @@ -93,7 +93,7 @@ public abstract class AbstractExporterTemplateTestCase extends MonitoringIntegTe assertTemplateExists(indexTemplateName()); logger.debug("--> existing templates are old: new templates should be created"); - for (String template : monitoringTemplates().keySet()) { + for (String template : monitoringTemplateNames()) { assertTemplateExists(template); } assertPipelineExists(Exporter.EXPORT_PIPELINE_NAME); @@ -115,7 +115,7 @@ public abstract class AbstractExporterTemplateTestCase extends MonitoringIntegTe doExporting(); logger.debug("--> existing templates are up to date"); - for (String template : monitoringTemplates().keySet()) { + for (String template : monitoringTemplateNames()) { assertTemplateExists(template); } assertPipelineExists(Exporter.EXPORT_PIPELINE_NAME); diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java new file mode 100644 index 00000000000..11144671ac3 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.http.RequestLine; +import org.apache.http.StatusLine; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse; + +import java.io.IOException; +import java.util.Map; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * Base test helper for any {@link PublishableHttpResource}. + */ +public abstract class AbstractPublishableHttpResourceTestCase extends ESTestCase { + + protected final String owner = getClass().getSimpleName(); + @Nullable + protected final TimeValue masterTimeout = randomFrom(TimeValue.timeValueMinutes(5), null); + + protected final RestClient client = mock(RestClient.class); + + /** + * Perform {@link PublishableHttpResource#doCheck(RestClient) doCheck} against the {@code resource} and assert that it returns + * {@code true} given a {@link RestStatus} that is {@link RestStatus#OK}. + * + * @param resource The resource to execute. + * @param resourceBasePath The base endpoint (e.g., "/_template") + * @param resourceName The resource name (e.g., the template or pipeline name). + */ + protected void assertCheckExists(final PublishableHttpResource resource, final String resourceBasePath, final String resourceName) + throws IOException { + doCheckWithStatusCode(resource, resourceBasePath, resourceName, successfulCheckStatus(), CheckResponse.EXISTS); + } + + /** + * Perform {@link PublishableHttpResource#doCheck(RestClient) doCheck} against the {@code resource} and assert that it returns + * {@code false} given a {@link RestStatus} that is not {@link RestStatus#OK}. + * + * @param resource The resource to execute. + * @param resourceBasePath The base endpoint (e.g., "/_template") + * @param resourceName The resource name (e.g., the template or pipeline name). + */ + protected void assertCheckDoesNotExist(final PublishableHttpResource resource, final String resourceBasePath, final String resourceName) + throws IOException { + doCheckWithStatusCode(resource, resourceBasePath, resourceName, notFoundCheckStatus(), CheckResponse.DOES_NOT_EXIST); + } + + /** + * Perform {@link PublishableHttpResource#doCheck(RestClient) doCheck} against the {@code resource} that throws an exception and assert + * that it returns {@code false}. + * + * @param resource The resource to execute. + * @param resourceBasePath The base endpoint (e.g., "/_template") + * @param resourceName The resource name (e.g., the template or pipeline name). + */ + protected void assertCheckWithException(final PublishableHttpResource resource, + final String resourceBasePath, final String resourceName) + throws IOException { + final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); + final ResponseException responseException = responseException("GET", endpoint, failedCheckStatus()); + final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); + + when(client.performRequest("GET", endpoint, resource.getParameters())).thenThrow(e); + + assertThat(resource.doCheck(client), is(CheckResponse.ERROR)); + } + + /** + * Perform {@link PublishableHttpResource#doPublish(RestClient) doPublish} against the {@code resource} and assert that it returns + * {@code true} given a {@link RestStatus} that is {@link RestStatus#OK} or {@link RestStatus#CREATED}. + * + * @param resource The resource to execute. + * @param resourceBasePath The base endpoint (e.g., "/_template") + * @param resourceName The resource name (e.g., the template or pipeline name). + * @param bodyType The request body provider's type. + */ + protected void assertPublishSucceeds(final PublishableHttpResource resource, final String resourceBasePath, final String resourceName, + final Class bodyType) + throws IOException { + doPublishWithStatusCode(resource, resourceBasePath, resourceName, bodyType, successfulPublishStatus(), true); + } + + /** + * Perform {@link PublishableHttpResource#doPublish(RestClient) doPublish} against the {@code resource} and assert that it returns + * {@code false} given a {@link RestStatus} that is neither {@link RestStatus#OK} or {@link RestStatus#CREATED}. + * + * @param resource The resource to execute. + * @param resourceBasePath The base endpoint (e.g., "/_template") + * @param resourceName The resource name (e.g., the template or pipeline name). + * @param bodyType The request body provider's type. + */ + protected void assertPublishFails(final PublishableHttpResource resource, final String resourceBasePath, final String resourceName, + final Class bodyType) + throws IOException { + doPublishWithStatusCode(resource, resourceBasePath, resourceName, bodyType, failedPublishStatus(), false); + } + + /** + * Perform {@link PublishableHttpResource#doPublish(RestClient) doPublish} against the {@code resource} that throws an exception and + * assert that it returns {@code false}. + * + * @param resource The resource to execute. + * @param resourceBasePath The base endpoint (e.g., "/_template") + * @param resourceName The resource name (e.g., the template or pipeline name). + */ + protected void assertPublishWithException(final PublishableHttpResource resource, + final String resourceBasePath, final String resourceName, + final Class bodyType) + throws IOException { + final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); + final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected")); + + when(client.performRequest(eq("PUT"), eq(endpoint), eq(resource.getParameters()), any(bodyType))).thenThrow(e); + + assertThat(resource.doPublish(client), is(false)); + } + + protected void assertParameters(final PublishableHttpResource resource) { + final Map parameters = resource.getParameters(); + + if (masterTimeout != null) { + assertThat(parameters.get("master_timeout"), is(masterTimeout.toString())); + } + + assertThat(parameters.get("filter_path"), is("$NONE")); + } + + private void doCheckWithStatusCode(final PublishableHttpResource resource, final String resourceBasePath, final String resourceName, + final RestStatus status, + final CheckResponse expected) + throws IOException { + final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); + final Response response = response("GET", endpoint, status); + + when(client.performRequest("GET", endpoint, resource.getParameters())).thenReturn(response); + + assertThat(resource.doCheck(client), is(expected)); + } + + private void doPublishWithStatusCode(final PublishableHttpResource resource, final String resourceBasePath, final String resourceName, + final Class bodyType, + final RestStatus status, + final boolean expected) + throws IOException { + final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); + final Response response = response("GET", endpoint, status); + + when(client.performRequest(eq("PUT"), eq(endpoint), eq(resource.getParameters()), any(bodyType))).thenReturn(response); + + assertThat(resource.doPublish(client), is(expected)); + } + + protected RestStatus successfulCheckStatus() { + return RestStatus.OK; + } + + protected RestStatus notFoundCheckStatus() { + return RestStatus.NOT_FOUND; + } + + protected RestStatus failedCheckStatus() { + final Predicate ignoreStatus = (final RestStatus status) -> status == RestStatus.OK || status == RestStatus.NOT_FOUND; + return randomValueOtherThanMany(ignoreStatus, () -> randomFrom(RestStatus.values())); + } + + protected RestStatus successfulPublishStatus() { + return randomFrom(RestStatus.OK, RestStatus.CREATED); + } + + protected RestStatus failedPublishStatus() { + final Predicate ignoreStatus = (final RestStatus status) -> status == RestStatus.OK || status == RestStatus.CREATED; + return randomValueOtherThanMany(ignoreStatus, () -> randomFrom(RestStatus.values())); + } + + protected String concatenateEndpoint(final String resourceBasePath, final String resourceName) { + return resourceBasePath + "/" + resourceName; + } + + protected Response response(final String method, final String endpoint, final RestStatus status) { + final Response response = mock(Response.class); + // fill out the response enough so that the exception can be constructed + final RequestLine requestLine = mock(RequestLine.class); + when(requestLine.getMethod()).thenReturn(method); + when(requestLine.getUri()).thenReturn(endpoint); + final StatusLine statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(status.getStatus()); + + when(response.getRequestLine()).thenReturn(requestLine); + when(response.getStatusLine()).thenReturn(statusLine); + + return response; + } + + protected ResponseException responseException(final String method, final String endpoint, final RestStatus status) { + try { + return new ResponseException(response(method, endpoint, status)); + } catch (final IOException e) { + throw new IllegalStateException("update responseException to properly build the ResponseException", e); + } + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExportBulkResponseListenerTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExportBulkResponseListenerTests.java new file mode 100644 index 00000000000..f522f7c1699 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExportBulkResponseListenerTests.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.io.InputStream; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Tests {@link HttpExportBulkResponseListener}. + */ +public class HttpExportBulkResponseListenerTests extends ESTestCase { + + public void testOnSuccess() throws IOException { + final Response response = mock(Response.class); + final StringEntity entity = new StringEntity("{\"took\":5,\"errors\":false}", ContentType.APPLICATION_JSON); + + when(response.getEntity()).thenReturn(entity); + + // doesn't explode + new WarningsHttpExporterBulkResponseListener().onSuccess(response); + } + + public void testOnSuccessParsing() throws IOException { + // {"took": 4, "errors": false, ... + final Response response = mock(Response.class); + final XContent xContent = mock(XContent.class); + final XContentParser parser = mock(XContentParser.class); + final HttpEntity entity = mock(HttpEntity.class); + final InputStream stream = mock(InputStream.class); + + when(response.getEntity()).thenReturn(entity); + when(entity.getContent()).thenReturn(stream); + when(xContent.createParser(stream)).thenReturn(parser); + + // {, "took", 4, "errors", false + when(parser.nextToken()).thenReturn(Token.START_OBJECT, + Token.FIELD_NAME, Token.VALUE_NUMBER, + Token.FIELD_NAME, Token.VALUE_BOOLEAN); + when(parser.currentName()).thenReturn("took", "errors"); + when(parser.booleanValue()).thenReturn(false); + + new HttpExportBulkResponseListener(xContent).onSuccess(response); + + verify(parser, times(5)).nextToken(); + verify(parser, times(2)).currentName(); + verify(parser).booleanValue(); + } + + public void testOnSuccessWithInnerErrors() { + final String[] expectedErrors = new String[] { randomAsciiOfLengthBetween(4, 10), randomAsciiOfLengthBetween(5, 9) }; + final AtomicInteger counter = new AtomicInteger(0); + final Response response = mock(Response.class); + final StringEntity entity = new StringEntity( + "{\"took\":4,\"errors\":true,\"items\":[" + + "{\"index\":{\"_index\":\".monitoring-data-2\",\"_type\":\"node\",\"_id\":\"123\"}}," + + "{\"index\":{\"_index\":\".monitoring-data-2\",\"_type\":\"node\",\"_id\":\"456\"," + + "\"error\":\"" + expectedErrors[0] + "\"}}," + + "{\"index\":{\"_index\":\".monitoring-data-2\",\"_type\":\"node\",\"_id\":\"789\"}}," + + "{\"index\":{\"_index\":\".monitoring-data-2\",\"_type\":\"node\",\"_id\":\"012\"," + + "\"error\":\"" + expectedErrors[1] + "\"}}" + + "]}", + ContentType.APPLICATION_JSON); + + when(response.getEntity()).thenReturn(entity); + + // doesn't explode + new WarningsHttpExporterBulkResponseListener() { + @Override + void onItemError(final String text) { + assertEquals(expectedErrors[counter.getAndIncrement()], text); + } + }.onSuccess(response); + + assertEquals(expectedErrors.length, counter.get()); + } + + public void testOnSuccessParsingWithInnerErrors() throws IOException { + // {"took": 4, "errors": true, "items": [ { "index": { "_index": "ignored", "_type": "ignored", "_id": "ignored" }, + // { "index": { "_index": "ignored", "_type": "ignored", "_id": "ignored", "error": "blah" } + // ]... + final Response response = mock(Response.class); + final XContent xContent = mock(XContent.class); + final XContentParser parser = mock(XContentParser.class); + final HttpEntity entity = mock(HttpEntity.class); + final InputStream stream = mock(InputStream.class); + + when(response.getEntity()).thenReturn(entity); + when(entity.getContent()).thenReturn(stream); + when(xContent.createParser(stream)).thenReturn(parser); + + // {, "took", 4, "errors", false nextToken, currentName + when(parser.nextToken()).thenReturn(Token.START_OBJECT, // 1 + Token.FIELD_NAME, Token.VALUE_NUMBER, // 3, 1 + Token.FIELD_NAME, Token.VALUE_BOOLEAN, // 5, 2 + Token.FIELD_NAME, Token.START_ARRAY, // 7, 3 + // no error: + Token.START_OBJECT, // 8 + Token.FIELD_NAME, Token.START_OBJECT, // 10, 4 + Token.FIELD_NAME, Token.VALUE_STRING, // 12, 5 + Token.FIELD_NAME, Token.VALUE_STRING, // 14, 6 + Token.FIELD_NAME, Token.VALUE_STRING, // 16, 7 + Token.END_OBJECT, // 17 + Token.START_OBJECT, // 18 + Token.FIELD_NAME, Token.START_OBJECT, // 20, 8 + Token.FIELD_NAME, Token.VALUE_STRING, // 22, 9 + Token.FIELD_NAME, Token.VALUE_STRING, // 24, 10 + Token.FIELD_NAME, Token.VALUE_STRING, // 26, 11 + Token.FIELD_NAME, Token.VALUE_STRING, // 28, 12 ("error") + Token.END_OBJECT, // 29 + Token.END_ARRAY); // 30 + when(parser.currentName()).thenReturn("took", "errors", "items", + "index", "_index", "_type", "_id", + "index", "_index", "_type", "_id", "error"); + // there were errors; so go diving for the error + when(parser.booleanValue()).thenReturn(true); + when(parser.text()).thenReturn("this is the error"); + + new HttpExportBulkResponseListener(xContent).onSuccess(response); + + verify(parser, times(30)).nextToken(); + verify(parser, times(12)).currentName(); + verify(parser).booleanValue(); + verify(parser).text(); + } + + public void testOnSuccessMalformed() { + final AtomicInteger counter = new AtomicInteger(0); + final Response response = mock(Response.class); + + if (randomBoolean()) { + // malformed JSON + when(response.getEntity()).thenReturn(new StringEntity("{", ContentType.APPLICATION_JSON)); + } + + new WarningsHttpExporterBulkResponseListener() { + @Override + void onError(final String msg, final Throwable cause) { + counter.getAndIncrement(); + } + }.onSuccess(response); + + assertEquals(1, counter.get()); + } + + public void testOnFailure() { + final Exception exception = randomBoolean() ? new Exception() : new RuntimeException(); + + new WarningsHttpExporterBulkResponseListener() { + @Override + void onError(final String msg, final Throwable cause) { + assertSame(exception, cause); + } + }.onFailure(exception); + } + + private static class WarningsHttpExporterBulkResponseListener extends HttpExportBulkResponseListener { + + WarningsHttpExporterBulkResponseListener() { + super(XContentType.JSON.xContent()); + } + + @Override + void onItemError(final String msg) { + fail("There should be no errors within the response!"); + } + + @Override + void onError(final String msg, final Throwable cause) { + super.onError(msg, cause); // let it log the exception so you can check the output + + fail("There should be no errors!"); + } + + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterIT.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterIT.java new file mode 100644 index 00000000000..1347911ec03 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterIT.java @@ -0,0 +1,590 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import com.squareup.okhttp.mockwebserver.MockResponse; +import com.squareup.okhttp.mockwebserver.MockWebServer; +import com.squareup.okhttp.mockwebserver.RecordedRequest; +import okio.Buffer; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.client.Requests; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.xpack.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.monitoring.MonitoringSettings; +import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStateMonitoringDoc; +import org.elasticsearch.xpack.monitoring.collector.indices.IndexRecoveryMonitoringDoc; +import org.elasticsearch.xpack.monitoring.exporter.Exporter; +import org.elasticsearch.xpack.monitoring.exporter.Exporters; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; +import org.elasticsearch.xpack.monitoring.resolver.ResolversRegistry; +import org.elasticsearch.xpack.monitoring.resolver.bulk.MonitoringBulkTimestampedResolver; +import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; +import org.joda.time.format.DateTimeFormat; + +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.FILTER_PATH_NONE; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; + +@ESIntegTestCase.ClusterScope(scope = Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0.0) +public class HttpExporterIT extends MonitoringIntegTestCase { + + private MockWebServerContainer webServerContainer; + private MockWebServer webServer; + + @Before + public void startWebServer() { + webServerContainer = new MockWebServerContainer(); + webServer = webServerContainer.getWebServer(); + } + + @After + public void stopWebServer() throws Exception { + webServer.shutdown(); + } + + @Override + protected boolean ignoreExternalCluster() { + return true; + } + + public void testExport() throws Exception { + final boolean templatesExistsAlready = randomBoolean(); + final boolean pipelineExistsAlready = randomBoolean(); + + enqueueGetClusterVersionResponse(Version.CURRENT); + enqueueTemplateAndPipelineResponses(webServer, templatesExistsAlready, pipelineExistsAlready); + enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); + + final Settings.Builder builder = Settings.builder() + .put(MonitoringSettings.INTERVAL.getKey(), "-1") + .put("xpack.monitoring.exporters._http.type", "http") + .put("xpack.monitoring.exporters._http.host", webServerContainer.getFormattedAddress()); + + internalCluster().startNode(builder); + + final int nbDocs = randomIntBetween(1, 25); + export(newRandomMonitoringDocs(nbDocs)); + + assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready); + assertBulk(webServer, nbDocs); + } + + public void testExportWithHeaders() throws Exception { + final boolean templatesExistsAlready = randomBoolean(); + final boolean pipelineExistsAlready = randomBoolean(); + + final String headerValue = randomAsciiOfLengthBetween(3, 9); + final String[] array = generateRandomStringArray(2, 4, false); + + final Map headers = new HashMap<>(); + + headers.put("X-Cloud-Cluster", new String[] { headerValue }); + headers.put("X-Found-Cluster", new String[] { headerValue }); + headers.put("Array-Check", array); + + enqueueGetClusterVersionResponse(Version.CURRENT); + enqueueTemplateAndPipelineResponses(webServer, templatesExistsAlready, pipelineExistsAlready); + enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); + + Settings.Builder builder = Settings.builder() + .put(MonitoringSettings.INTERVAL.getKey(), "-1") + .put("xpack.monitoring.exporters._http.type", "http") + .put("xpack.monitoring.exporters._http.host", webServerContainer.getFormattedAddress()) + .put("xpack.monitoring.exporters._http.headers.X-Cloud-Cluster", headerValue) + .put("xpack.monitoring.exporters._http.headers.X-Found-Cluster", headerValue) + .putArray("xpack.monitoring.exporters._http.headers.Array-Check", array); + + internalCluster().startNode(builder); + + final int nbDocs = randomIntBetween(1, 25); + export(newRandomMonitoringDocs(nbDocs)); + + assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready, headers, null); + assertBulk(webServer, nbDocs, headers, null); + } + + public void testExportWithBasePath() throws Exception { + final boolean useHeaders = randomBoolean(); + final boolean templatesExistsAlready = randomBoolean(); + final boolean pipelineExistsAlready = randomBoolean(); + + final String headerValue = randomAsciiOfLengthBetween(3, 9); + final String[] array = generateRandomStringArray(2, 4, false); + + final Map headers = new HashMap<>(); + + if (useHeaders) { + headers.put("X-Cloud-Cluster", new String[] { headerValue }); + headers.put("X-Found-Cluster", new String[] { headerValue }); + headers.put("Array-Check", array); + } + + enqueueGetClusterVersionResponse(Version.CURRENT); + enqueueTemplateAndPipelineResponses(webServer, templatesExistsAlready, pipelineExistsAlready); + enqueueResponse(200, "{\"errors\": false}"); + + String basePath = "path/to"; + + if (randomBoolean()) { + basePath += "/something"; + + if (rarely()) { + basePath += "/proxied"; + } + } + + if (randomBoolean()) { + basePath = "/" + basePath; + } + + final Settings.Builder builder = Settings.builder() + .put(MonitoringSettings.INTERVAL.getKey(), "-1") + .put("xpack.monitoring.exporters._http.type", "http") + .put("xpack.monitoring.exporters._http.host", webServerContainer.getFormattedAddress()) + .put("xpack.monitoring.exporters._http.proxy.base_path", basePath + (randomBoolean() ? "/" : "")); + + if (useHeaders) { + builder + .put("xpack.monitoring.exporters._http.headers.X-Cloud-Cluster", headerValue) + .put("xpack.monitoring.exporters._http.headers.X-Found-Cluster", headerValue) + .putArray("xpack.monitoring.exporters._http.headers.Array-Check", array); + } + + internalCluster().startNode(builder); + + final int nbDocs = randomIntBetween(1, 25); + export(newRandomMonitoringDocs(nbDocs)); + + assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready, headers, basePath); + assertBulk(webServer, nbDocs, headers, basePath); + } + + public void testHostChangeReChecksTemplate() throws Exception { + final boolean templatesExistsAlready = randomBoolean(); + final boolean pipelineExistsAlready = randomBoolean(); + + Settings.Builder builder = Settings.builder() + .put(MonitoringSettings.INTERVAL.getKey(), "-1") + .put("xpack.monitoring.exporters._http.type", "http") + .put("xpack.monitoring.exporters._http.host", webServerContainer.getFormattedAddress()); + + enqueueGetClusterVersionResponse(Version.CURRENT); + enqueueTemplateAndPipelineResponses(webServer, templatesExistsAlready, pipelineExistsAlready); + enqueueResponse(200, "{\"errors\": false}"); + + internalCluster().startNode(builder); + + export(Collections.singletonList(newRandomMonitoringDoc())); + + assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready); + assertBulk(webServer); + + try (final MockWebServerContainer secondWebServerContainer = new MockWebServerContainer(webServerContainer.getPort() + 1)) { + final MockWebServer secondWebServer = secondWebServerContainer.getWebServer(); + + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings( + Settings.builder().putArray("xpack.monitoring.exporters._http.host", secondWebServerContainer.getFormattedAddress()))); + + enqueueGetClusterVersionResponse(secondWebServer, Version.CURRENT); + // pretend that one of the templates is missing + for (Tuple template : monitoringTemplates()) { + if (template.v1().contains(MonitoringBulkTimestampedResolver.Data.DATA)) { + enqueueResponse(secondWebServer, 200, "template [" + template + "] exists"); + } else { + enqueueResponse(secondWebServer, 404, "template [" + template + "] does not exist"); + enqueueResponse(secondWebServer, 201, "template [" + template + "] created"); + } + } + // opposite of if it existed before + enqueuePipelineResponses(secondWebServer, !pipelineExistsAlready); + enqueueResponse(secondWebServer, 200, "{\"errors\": false}"); + + logger.info("--> exporting a second event"); + export(Collections.singletonList(newRandomMonitoringDoc())); + + assertMonitorVersion(secondWebServer); + + for (Tuple template : monitoringTemplates()) { + RecordedRequest recordedRequest = secondWebServer.takeRequest(); + assertThat(recordedRequest.getMethod(), equalTo("GET")); + assertThat(recordedRequest.getPath(), equalTo("/_template/" + template.v1() + resourceQueryString())); + + if (template.v1().contains(MonitoringBulkTimestampedResolver.Data.DATA) == false) { + recordedRequest = secondWebServer.takeRequest(); + assertThat(recordedRequest.getMethod(), equalTo("PUT")); + assertThat(recordedRequest.getPath(), equalTo("/_template/" + template.v1() + resourceQueryString())); + assertThat(recordedRequest.getBody().readUtf8(), equalTo(template.v2())); + } + } + assertMonitorPipelines(secondWebServer, !pipelineExistsAlready, null, null); + assertBulk(secondWebServer); + } + } + + public void testUnsupportedClusterVersion() throws Exception { + Settings.Builder builder = Settings.builder() + .put(MonitoringSettings.INTERVAL.getKey(), "-1") + .put("xpack.monitoring.exporters._http.type", "http") + .put("xpack.monitoring.exporters._http.host", webServerContainer.getFormattedAddress()); + + // returning an unsupported cluster version + enqueueGetClusterVersionResponse(randomFrom(Version.fromString("0.18.0"), Version.fromString("1.0.0"), + Version.fromString("1.4.0"), Version.fromString("2.4.0"))); + + String agentNode = internalCluster().startNode(builder); + + // fire off what should be an unsuccessful request + assertNull(getExporter(agentNode).openBulk()); + + assertThat(webServer.getRequestCount(), equalTo(1)); + + assertMonitorVersion(webServer); + } + + public void testDynamicIndexFormatChange() throws Exception { + final boolean templatesExistsAlready = randomBoolean(); + final boolean pipelineExistsAlready = randomBoolean(); + + Settings.Builder builder = Settings.builder() + .put(MonitoringSettings.INTERVAL.getKey(), "-1") + .put("xpack.monitoring.exporters._http.type", "http") + .put("xpack.monitoring.exporters._http.host", webServerContainer.getFormattedAddress()); + + internalCluster().startNode(builder); + + enqueueGetClusterVersionResponse(Version.CURRENT); + enqueueTemplateAndPipelineResponses(webServer, templatesExistsAlready, pipelineExistsAlready); + enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); + + MonitoringDoc doc = newRandomMonitoringDoc(); + export(Collections.singletonList(doc)); + + assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready); + RecordedRequest recordedRequest = assertBulk(webServer); + + @SuppressWarnings("unchecked") + String indexName = new ResolversRegistry(Settings.EMPTY).getResolver(doc).index(doc); + + byte[] bytes = recordedRequest.getBody().readByteArray(); + Map data = XContentHelper.convertToMap(new BytesArray(bytes), false).v2(); + @SuppressWarnings("unchecked") + Map index = (Map) data.get("index"); + assertThat(index.get("_index"), equalTo(indexName)); + + String newTimeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM"); + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() + .put("xpack.monitoring.exporters._http.index.name.time_format", newTimeFormat))); + + enqueueGetClusterVersionResponse(Version.CURRENT); + enqueueTemplateAndPipelineResponses(webServer, true, true); + enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); + + doc = newRandomMonitoringDoc(); + export(Collections.singletonList(doc)); + + String expectedMonitoringIndex = ".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-" + + DateTimeFormat.forPattern(newTimeFormat).withZoneUTC().print(doc.getTimestamp()); + + assertMonitorResources(webServer, true, true); + recordedRequest = assertBulk(webServer); + + bytes = recordedRequest.getBody().readByteArray(); + data = XContentHelper.convertToMap(new BytesArray(bytes), false).v2(); + @SuppressWarnings("unchecked") + final Map newIndex = (Map) data.get("index"); + assertThat(newIndex.get("_index"), equalTo(expectedMonitoringIndex)); + } + + private void assertMonitorVersion(final MockWebServer webServer) throws Exception { + assertMonitorVersion(webServer, null, null); + } + + private void assertMonitorVersion(final MockWebServer webServer, + @Nullable final Map customHeaders, @Nullable final String basePath) + throws Exception { + final String pathPrefix = basePathToAssertablePrefix(basePath); + final RecordedRequest request = webServer.takeRequest(); + + assertThat(request.getMethod(), equalTo("GET")); + assertThat(request.getPath(), equalTo(pathPrefix + "/?filter_path=version.number")); + assertHeaders(request, customHeaders); + } + + private void assertMonitorResources(final MockWebServer webServer, + final boolean templateAlreadyExists, final boolean pipelineAlreadyExists) + throws Exception { + assertMonitorResources(webServer, templateAlreadyExists, pipelineAlreadyExists, null, null); + } + + private void assertMonitorResources(final MockWebServer webServer, + final boolean templateAlreadyExists, final boolean pipelineAlreadyExists, + @Nullable final Map customHeaders, @Nullable final String basePath) + throws Exception { + assertMonitorVersion(webServer, customHeaders, basePath); + assertMonitorTemplates(webServer, templateAlreadyExists, customHeaders, basePath); + assertMonitorPipelines(webServer, pipelineAlreadyExists, customHeaders, basePath); + } + + private void assertMonitorTemplates(final MockWebServer webServer, final boolean alreadyExists, + @Nullable final Map customHeaders, @Nullable final String basePath) + throws Exception { + final String pathPrefix = basePathToAssertablePrefix(basePath); + RecordedRequest request; + + for (Tuple template : monitoringTemplates()) { + request = webServer.takeRequest(); + + assertThat(request.getMethod(), equalTo("GET")); + assertThat(request.getPath(), equalTo(pathPrefix + "/_template/" + template.v1() + resourceQueryString())); + assertHeaders(request, customHeaders); + + if (alreadyExists == false) { + request = webServer.takeRequest(); + + assertThat(request.getMethod(), equalTo("PUT")); + assertThat(request.getPath(), equalTo(pathPrefix + "/_template/" + template.v1() + resourceQueryString())); + assertThat(request.getBody().readUtf8(), equalTo(template.v2())); + assertHeaders(request, customHeaders); + } + } + } + + private void assertMonitorPipelines(final MockWebServer webServer, final boolean alreadyExists, + @Nullable final Map customHeaders, @Nullable final String basePath) + throws Exception { + final String pathPrefix = basePathToAssertablePrefix(basePath); + RecordedRequest request = webServer.takeRequest(); + + assertThat(request.getMethod(), equalTo("GET")); + assertThat(request.getPath(), equalTo(pathPrefix + "/_ingest/pipeline/" + Exporter.EXPORT_PIPELINE_NAME + resourceQueryString())); + assertHeaders(request, customHeaders); + + if (alreadyExists == false) { + request = webServer.takeRequest(); + + assertThat(request.getMethod(), equalTo("PUT")); + assertThat(request.getPath(), + equalTo(pathPrefix + "/_ingest/pipeline/" + Exporter.EXPORT_PIPELINE_NAME + resourceQueryString())); + assertThat(request.getBody().readUtf8(), equalTo(Exporter.emptyPipeline(XContentType.JSON).string())); + assertHeaders(request, customHeaders); + } + } + + private RecordedRequest assertBulk(final MockWebServer webServer) throws Exception { + return assertBulk(webServer, -1); + } + + private RecordedRequest assertBulk(final MockWebServer webServer, final int docs) throws Exception { + return assertBulk(webServer, docs, null, null); + } + + + private RecordedRequest assertBulk(final MockWebServer webServer, final int docs, + @Nullable final Map customHeaders, @Nullable final String basePath) + throws Exception { + final String pathPrefix = basePathToAssertablePrefix(basePath); + final RecordedRequest request = webServer.takeRequest(); + + assertThat(request.getMethod(), equalTo("POST")); + assertThat(request.getPath(), equalTo(pathPrefix + "/_bulk" + bulkQueryString())); + assertHeaders(request, customHeaders); + + if (docs != -1) { + assertBulkRequest(request.getBody(), docs); + } + + return request; + } + + private void assertHeaders(final RecordedRequest request, final Map customHeaders) { + if (customHeaders != null) { + for (final Map.Entry entry : customHeaders.entrySet()) { + final String header = entry.getKey(); + final String[] values = entry.getValue(); + + final List headerValues = request.getHeaders().values(header); + + assertThat(header, headerValues, hasSize(values.length)); + assertThat(header, headerValues, containsInAnyOrder(values)); + } + } + } + + private void export(Collection docs) throws Exception { + Exporters exporters = internalCluster().getInstance(Exporters.class); + assertThat(exporters, notNullValue()); + + // Wait for exporting bulks to be ready to export + assertBusy(() -> exporters.forEach(exporter -> assertThat(exporter.openBulk(), notNullValue()))); + exporters.export(docs); + } + + private HttpExporter getExporter(String nodeName) { + Exporters exporters = internalCluster().getInstance(Exporters.class, nodeName); + return (HttpExporter) exporters.iterator().next(); + } + + private MonitoringDoc newRandomMonitoringDoc() { + if (randomBoolean()) { + IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString()); + doc.setClusterUUID(internalCluster().getClusterName()); + doc.setTimestamp(System.currentTimeMillis()); + doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); + doc.setRecoveryResponse(new RecoveryResponse()); + return doc; + } else { + ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString()); + doc.setClusterUUID(internalCluster().getClusterName()); + doc.setTimestamp(System.currentTimeMillis()); + doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); + doc.setClusterState(ClusterState.PROTO); + doc.setStatus(ClusterHealthStatus.GREEN); + return doc; + } + } + + private List newRandomMonitoringDocs(int nb) { + List docs = new ArrayList<>(nb); + for (int i = 0; i < nb; i++) { + docs.add(newRandomMonitoringDoc()); + } + return docs; + } + + private String basePathToAssertablePrefix(@Nullable final String basePath) { + if (basePath == null) { + return ""; + } + + return basePath.startsWith("/") == false ? "/" + basePath : basePath; + } + + private String resourceQueryString() { + return "?filter_path=" + urlEncode(FILTER_PATH_NONE); + } + + private String bulkQueryString() { + return "?pipeline=" + urlEncode(Exporter.EXPORT_PIPELINE_NAME) + "&filter_path=" + urlEncode("errors,items.*.error"); + } + + private String urlEncode(final String value) { + try { + return URLEncoder.encode(value, "UTF-8"); + } catch (UnsupportedEncodingException e) { + // whelp, our JVM is broken + throw new RuntimeException(e); + } + } + + private void enqueueGetClusterVersionResponse(Version v) throws IOException { + enqueueGetClusterVersionResponse(webServer, v); + } + + private void enqueueGetClusterVersionResponse(MockWebServer mockWebServer, Version v) throws IOException { + mockWebServer.enqueue(new MockResponse().setResponseCode(200).setBody( + jsonBuilder() + .startObject().startObject("version").field("number", v.toString()).endObject().endObject().bytes() + .utf8ToString())); + } + + private void enqueueTemplateAndPipelineResponses(final MockWebServer webServer, + final boolean templatesAlreadyExists, final boolean pipelineAlreadyExists) + throws IOException { + enqueueTemplateResponses(webServer, templatesAlreadyExists); + enqueuePipelineResponses(webServer, pipelineAlreadyExists); + } + + private void enqueueTemplateResponses(final MockWebServer webServer, final boolean alreadyExists) throws IOException { + if (alreadyExists) { + enqueueTemplateResponsesExistsAlready(webServer); + } else { + enqueueTemplateResponsesDoesNotExistYet(webServer); + } + } + + private void enqueueTemplateResponsesDoesNotExistYet(final MockWebServer webServer) throws IOException { + for (String template : monitoringTemplateNames()) { + enqueueResponse(webServer, 404, "template [" + template + "] does not exist"); + enqueueResponse(webServer, 201, "template [" + template + "] created"); + } + } + + private void enqueueTemplateResponsesExistsAlready(final MockWebServer webServer) throws IOException { + for (String template : monitoringTemplateNames()) { + enqueueResponse(webServer, 200, "template [" + template + "] exists"); + } + } + + private void enqueuePipelineResponses(final MockWebServer webServer, final boolean alreadyExists) throws IOException { + if (alreadyExists) { + enqueuePipelineResponsesExistsAlready(webServer); + } else { + enqueuePipelineResponsesDoesNotExistYet(webServer); + } + } + + private void enqueuePipelineResponsesDoesNotExistYet(final MockWebServer webServer) throws IOException { + enqueueResponse(webServer, 404, "pipeline [" + Exporter.EXPORT_PIPELINE_NAME + "] does not exist"); + enqueueResponse(webServer, 201, "pipeline [" + Exporter.EXPORT_PIPELINE_NAME + "] created"); + } + + private void enqueuePipelineResponsesExistsAlready(final MockWebServer webServer) throws IOException { + enqueueResponse(webServer, 200, "pipeline [" + Exporter.EXPORT_PIPELINE_NAME + "] exists"); + } + + private void enqueueResponse(int responseCode, String body) throws IOException { + enqueueResponse(webServer, responseCode, body); + } + + private void enqueueResponse(MockWebServer mockWebServer, int responseCode, String body) throws IOException { + mockWebServer.enqueue(new MockResponse().setResponseCode(responseCode).setBody(body)); + } + + private void assertBulkRequest(Buffer requestBody, int numberOfActions) throws Exception { + BulkRequest bulkRequest = Requests.bulkRequest().add(new BytesArray(requestBody.readByteArray()), null, null); + assertThat(bulkRequest.numberOfActions(), equalTo(numberOfActions)); + for (ActionRequest actionRequest : bulkRequest.requests()) { + assertThat(actionRequest, instanceOf(IndexRequest.class)); + } + } +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java new file mode 100644 index 00000000000..1a8884a5db6 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java @@ -0,0 +1,382 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.http.StatusLine; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.Version; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.monitoring.exporter.Exporter; +import org.elasticsearch.xpack.monitoring.resolver.ResolversRegistry; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyMapOf; +import static org.mockito.Matchers.eq; +import static org.mockito.Matchers.startsWith; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +/** + * Tests {@link HttpExporter} explicitly for its resource handling. + */ +public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTestCase { + + private final int EXPECTED_TEMPLATES = 3; + + private final RestClient client = mock(RestClient.class); + private final Response versionResponse = mock(Response.class); + + private final MultiHttpResource resources = + HttpExporter.createResources(new Exporter.Config("_http", "http", Settings.EMPTY), new ResolversRegistry(Settings.EMPTY)); + + public void testInvalidVersionBlocks() throws IOException { + final HttpEntity entity = new StringEntity("{\"version\":{\"number\":\"unknown\"}}", ContentType.APPLICATION_JSON); + + when(versionResponse.getEntity()).thenReturn(entity); + when(client.performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class))).thenReturn(versionResponse); + + assertTrue(resources.isDirty()); + assertFalse(resources.checkAndPublish(client)); + // ensure it didn't magically become clean + assertTrue(resources.isDirty()); + + verifyVersionCheck(); + verifyNoMoreInteractions(client); + } + + public void testTemplateCheckBlocksAfterSuccessfulVersion() throws IOException { + final Exception exception = failureGetException(); + final boolean firstSucceeds = randomBoolean(); + int expectedGets = 1; + int expectedPuts = 0; + + whenValidVersionResponse(); + + // failure in the middle of various templates being checked/published; suggests a node dropped + if (firstSucceeds) { + final boolean successfulFirst = randomBoolean(); + // -2 from one success + a necessary failure after it! + final int extraPasses = randomIntBetween(0, EXPECTED_TEMPLATES - 2); + final int successful = randomIntBetween(0, extraPasses); + final int unsuccessful = extraPasses - successful; + + final Response first = successfulFirst ? successfulGetResponse() : unsuccessfulGetResponse(); + + final List otherResponses = getResponses(successful, unsuccessful); + + // last check fails implies that N - 2 publishes succeeded! + when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class))) + .thenReturn(first, otherResponses.toArray(new Response[otherResponses.size()])) + .thenThrow(exception); + whenSuccessfulPutTemplates(otherResponses.size() + 1); + + expectedGets += 1 + successful + unsuccessful; + expectedPuts = (successfulFirst ? 0 : 1) + unsuccessful; + } else { + when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class))) + .thenThrow(exception); + } + + assertTrue(resources.isDirty()); + assertFalse(resources.checkAndPublish(client)); + // ensure it didn't magically become + assertTrue(resources.isDirty()); + + verifyVersionCheck(); + verifyGetTemplates(expectedGets); + verifyPutTemplates(expectedPuts); + verifyNoMoreInteractions(client); + } + + public void testTemplatePublishBlocksAfterSuccessfulVersion() throws IOException { + final Exception exception = failurePutException(); + final boolean firstSucceeds = randomBoolean(); + int expectedGets = 1; + int expectedPuts = 1; + + whenValidVersionResponse(); + + // failure in the middle of various templates being checked/published; suggests a node dropped + if (firstSucceeds) { + final Response firstSuccess = successfulPutResponse(); + // -2 from one success + a necessary failure after it! + final int extraPasses = randomIntBetween(0, EXPECTED_TEMPLATES - 2); + final int successful = randomIntBetween(0, extraPasses); + final int unsuccessful = extraPasses - successful; + + final List otherResponses = successfulPutResponses(unsuccessful); + + // first one passes for sure, so we need an extra "unsuccessful" GET + whenGetTemplates(successful, unsuccessful + 2); + + // previous publishes must have succeeded + when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class))) + .thenReturn(firstSuccess, otherResponses.toArray(new Response[otherResponses.size()])) + .thenThrow(exception); + + // GETs required for each PUT attempt (first is guaranteed "unsuccessful") + expectedGets += successful + unsuccessful + 1; + // unsuccessful are PUT attempts + the guaranteed successful PUT (first) + expectedPuts += unsuccessful + 1; + } else { + // fail the check so that it has to attempt the PUT + whenGetTemplates(0, 1); + + when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class))) + .thenThrow(exception); + } + + assertTrue(resources.isDirty()); + assertFalse(resources.checkAndPublish(client)); + // ensure it didn't magically become + assertTrue(resources.isDirty()); + + verifyVersionCheck(); + verifyGetTemplates(expectedGets); + verifyPutTemplates(expectedPuts); + verifyNoMoreInteractions(client); + } + + public void testPipelineCheckBlocksAfterSuccessfulTemplates() throws IOException { + final int successfulGetTemplates = randomIntBetween(0, EXPECTED_TEMPLATES); + final int unsuccessfulGetTemplates = EXPECTED_TEMPLATES - successfulGetTemplates; + final Exception exception = failureGetException(); + + whenValidVersionResponse(); + whenGetTemplates(successfulGetTemplates, unsuccessfulGetTemplates); + whenSuccessfulPutTemplates(EXPECTED_TEMPLATES); + + // we only expect a single pipeline for now + when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class))) + .thenThrow(exception); + + assertTrue(resources.isDirty()); + assertFalse(resources.checkAndPublish(client)); + // ensure it didn't magically become + assertTrue(resources.isDirty()); + + verifyVersionCheck(); + verifyGetTemplates(EXPECTED_TEMPLATES); + verifyPutTemplates(unsuccessfulGetTemplates); + verifyGetPipelines(1); + verifyPutPipelines(0); + verifyNoMoreInteractions(client); + } + + public void testPipelinePublishBlocksAfterSuccessfulTemplates() throws IOException { + final int successfulGetTemplates = randomIntBetween(0, EXPECTED_TEMPLATES); + final int unsuccessfulGetTemplates = EXPECTED_TEMPLATES - successfulGetTemplates; + final Exception exception = failurePutException(); + + whenValidVersionResponse(); + whenGetTemplates(successfulGetTemplates, unsuccessfulGetTemplates); + whenSuccessfulPutTemplates(EXPECTED_TEMPLATES); + // pipeline can't be there + whenGetPipelines(0, 1); + + // we only expect a single pipeline for now + when(client.performRequest(eq("PUT"), + startsWith("/_ingest/pipeline/"), + anyMapOf(String.class, String.class), + any(HttpEntity.class))) + .thenThrow(exception); + + assertTrue(resources.isDirty()); + assertFalse(resources.checkAndPublish(client)); + // ensure it didn't magically become + assertTrue(resources.isDirty()); + + verifyVersionCheck(); + verifyGetTemplates(EXPECTED_TEMPLATES); + verifyPutTemplates(unsuccessfulGetTemplates); + verifyGetPipelines(1); + verifyPutPipelines(1); + verifyNoMoreInteractions(client); + } + + public void testSuccessfulChecks() throws IOException { + final int successfulGetTemplates = randomIntBetween(0, EXPECTED_TEMPLATES); + final int unsuccessfulGetTemplates = EXPECTED_TEMPLATES - successfulGetTemplates; + final int successfulGetPipelines = randomIntBetween(0, 1); + final int unsuccessfulGetPipelines = 1 - successfulGetPipelines; + + whenValidVersionResponse(); + whenGetTemplates(successfulGetTemplates, unsuccessfulGetTemplates); + whenSuccessfulPutTemplates(unsuccessfulGetTemplates); + whenGetPipelines(successfulGetPipelines, unsuccessfulGetPipelines); + whenSuccessfulPutPipelines(1); + + assertTrue(resources.isDirty()); + + // it should be able to proceed! + assertTrue(resources.checkAndPublish(client)); + assertFalse(resources.isDirty()); + + verifyVersionCheck(); + verifyGetTemplates(EXPECTED_TEMPLATES); + verifyPutTemplates(unsuccessfulGetTemplates); + verifyGetPipelines(1); + verifyPutPipelines(unsuccessfulGetPipelines); + verifyNoMoreInteractions(client); + } + + private Exception failureGetException() { + final ResponseException responseException = responseException("GET", "/_get_something", failedCheckStatus()); + + return randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); + } + + private Exception failurePutException() { + final ResponseException responseException = responseException("PUT", "/_put_something", failedPublishStatus()); + + return randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); + } + + private Response successfulGetResponse() { + return response("GET", "/_get_something", successfulCheckStatus()); + } + + private Response unsuccessfulGetResponse() { + return response("GET", "/_get_something", notFoundCheckStatus()); + } + + private List getResponses(final int successful, final int unsuccessful) { + final List responses = new ArrayList<>(successful); + + for (int i = 0; i < successful; ++i) { + responses.add(successfulGetResponse()); + } + + for (int i = 0; i < unsuccessful; ++i) { + responses.add(unsuccessfulGetResponse()); + } + + return responses; + } + + private Response successfulPutResponse() { + final Response response = mock(Response.class); + final StatusLine statusLine = mock(StatusLine.class); + + when(response.getStatusLine()).thenReturn(statusLine); + when(statusLine.getStatusCode()).thenReturn(randomFrom(RestStatus.OK, RestStatus.CREATED).getStatus()); + + return response; + } + + private List successfulPutResponses(final int successful) { + final List responses = new ArrayList<>(successful); + + for (int i = 0; i < successful; ++i) { + responses.add(successfulPutResponse()); + } + + return responses; + } + + private void whenValidVersionResponse() throws IOException { + final HttpEntity entity = new StringEntity("{\"version\":{\"number\":\"" + Version.CURRENT + "\"}}", ContentType.APPLICATION_JSON); + + when(versionResponse.getEntity()).thenReturn(entity); + when(client.performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class))).thenReturn(versionResponse); + } + + private void whenGetTemplates(final int successful, final int unsuccessful) throws IOException { + final List gets = getResponses(successful, unsuccessful); + + if (gets.size() == 1) { + when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class))) + .thenReturn(gets.get(0)); + } else { + when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class))) + .thenReturn(gets.get(0), gets.subList(1, gets.size()).toArray(new Response[gets.size() - 1])); + } + } + + private void whenSuccessfulPutTemplates(final int successful) throws IOException { + final List successfulPuts = successfulPutResponses(successful); + + // empty is possible if they all exist + if (successful == 1) { + when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class))) + .thenReturn(successfulPuts.get(0)); + } else if (successful > 1) { + when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class))) + .thenReturn(successfulPuts.get(0), successfulPuts.subList(1, successful).toArray(new Response[successful - 1])); + } + } + + private void whenGetPipelines(final int successful, final int unsuccessful) throws IOException { + final List gets = getResponses(successful, unsuccessful); + + if (gets.size() == 1) { + when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class))) + .thenReturn(gets.get(0)); + } else { + when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class))) + .thenReturn(gets.get(0), gets.subList(1, gets.size()).toArray(new Response[gets.size() - 1])); + } + } + + private void whenSuccessfulPutPipelines(final int successful) throws IOException { + final List successfulPuts = successfulPutResponses(successful); + + // empty is possible if they all exist + if (successful == 1) { + when(client.performRequest(eq("PUT"), + startsWith("/_ingest/pipeline/"), + anyMapOf(String.class, String.class), + any(HttpEntity.class))) + .thenReturn(successfulPuts.get(0)); + } else if (successful > 1) { + when(client.performRequest(eq("PUT"), + startsWith("/_ingest/pipeline/"), + anyMapOf(String.class, String.class), + any(HttpEntity.class))) + .thenReturn(successfulPuts.get(0), successfulPuts.subList(1, successful).toArray(new Response[successful - 1])); + } + } + + private void verifyVersionCheck() throws IOException { + verify(client).performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class)); + } + + private void verifyGetTemplates(final int called) throws IOException { + verify(client, times(called)).performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class)); + } + + private void verifyPutTemplates(final int called) throws IOException { + verify(client, times(called)).performRequest(eq("PUT"), // method + startsWith("/_template/"), // endpoint + anyMapOf(String.class, String.class), // parameters (e.g., timeout) + any(HttpEntity.class)); // raw template + } + + private void verifyGetPipelines(final int called) throws IOException { + verify(client, times(called)).performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class)); + } + + private void verifyPutPipelines(final int called) throws IOException { + verify(client, times(called)).performRequest(eq("PUT"), // method + startsWith("/_ingest/pipeline/"), // endpoint + anyMapOf(String.class, String.class), // parameters (e.g., timeout) + any(HttpEntity.class)); // raw template + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSimpleTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSimpleTests.java deleted file mode 100644 index 65fb608d321..00000000000 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSimpleTests.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.monitoring.exporter.http; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsException; -import org.elasticsearch.env.Environment; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.monitoring.exporter.Exporter; -import org.elasticsearch.xpack.ssl.SSLService; - -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Mockito.mock; - -/** - * Tests for {@link HttpExporter}. - */ -public class HttpExporterSimpleTests extends ESTestCase { - - private final Environment environment = mock(Environment.class); - - public void testExporterWithBlacklistedHeaders() { - final String blacklistedHeader = randomFrom(HttpExporter.BLACKLISTED_HEADERS); - final String expected = "[" + blacklistedHeader + "] cannot be overwritten via [xpack.monitoring.exporters._http.headers]"; - final Settings.Builder builder = Settings.builder() - .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE) - .put("xpack.monitoring.exporters._http.host", "http://localhost:9200") - .put("xpack.monitoring.exporters._http.headers.abc", "xyz") - .put("xpack.monitoring.exporters._http.headers." + blacklistedHeader, "value should not matter"); - - if (randomBoolean()) { - builder.put("xpack.monitoring.exporters._http.headers.xyz", "abc"); - } - - final Exporter.Config config = createConfig("_http", builder.build()); - - final SettingsException exception = expectThrows(SettingsException.class, () -> { - new HttpExporter(config, environment, new SSLService(builder.build(), environment)); - }); - - assertThat(exception.getMessage(), equalTo(expected)); - } - - public void testExporterWithEmptyHeaders() { - final String name = randomFrom("abc", "ABC", "X-Flag"); - final String expected = "headers must have values, missing for setting [xpack.monitoring.exporters._http.headers." + name + "]"; - final Settings.Builder builder = Settings.builder() - .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE) - .put("xpack.monitoring.exporters._http.host", "localhost:9200") - .put("xpack.monitoring.exporters._http.headers." + name, ""); - - if (randomBoolean()) { - builder.put("xpack.monitoring.exporters._http.headers.xyz", "abc"); - } - - final Exporter.Config config = createConfig("_http", builder.build()); - - final SettingsException exception = expectThrows(SettingsException.class, () -> { - new HttpExporter(config, environment, new SSLService(builder.build(), environment)); - }); - - assertThat(exception.getMessage(), equalTo(expected)); - } - - public void testExporterWithMissingHost() { - // forgot host! - final Settings.Builder builder = Settings.builder() - .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE); - - if (randomBoolean()) { - builder.put("xpack.monitoring.exporters._http.host", ""); - } else if (randomBoolean()) { - builder.putArray("xpack.monitoring.exporters._http.host"); - } else if (randomBoolean()) { - builder.putNull("xpack.monitoring.exporters._http.host"); - } - - final Exporter.Config config = createConfig("_http", builder.build()); - - final SettingsException exception = expectThrows(SettingsException.class, () -> { - new HttpExporter(config, environment, new SSLService(builder.build(), environment)); - }); - - assertThat(exception.getMessage(), equalTo("missing required setting [xpack.monitoring.exporters._http.host]")); - } - - public void testExporterWithInvalidHost() { - final String invalidHost = randomFrom("://localhost:9200", "gopher!://xyz.my.com"); - - final Settings.Builder builder = Settings.builder() - .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE); - - // sometimes add a valid URL with it - if (randomBoolean()) { - if (randomBoolean()) { - builder.putArray("xpack.monitoring.exporters._http.host", "localhost:9200", invalidHost); - } else { - builder.putArray("xpack.monitoring.exporters._http.host", invalidHost, "localhost:9200"); - } - } else { - builder.put("xpack.monitoring.exporters._http.host", invalidHost); - } - - final Exporter.Config config = createConfig("_http", builder.build()); - - final SettingsException exception = expectThrows(SettingsException.class, () -> { - new HttpExporter(config, environment, new SSLService(builder.build(), environment)); - }); - - assertThat(exception.getMessage(), equalTo("[xpack.monitoring.exporters._http.host] invalid host: [" + invalidHost + "]")); - } - - public void testExporterWithHostOnly() { - final Settings.Builder builder = Settings.builder() - .put("xpack.monitoring.exporters._http.type", "http") - .put("xpack.monitoring.exporters._http.host", "http://localhost:9200"); - - final Exporter.Config config = createConfig("_http", builder.build()); - - new HttpExporter(config, environment, new SSLService(builder.build(), environment)); - } - - /** - * Create the {@link Exporter.Config} with the given name, and select those settings from {@code settings}. - * - * @param name The name of the exporter. - * @param settings The settings to select the exporter's settings from - * @return Never {@code null}. - */ - private static Exporter.Config createConfig(String name, Settings settings) { - return new Exporter.Config(name, HttpExporter.TYPE, Settings.EMPTY, settings.getAsSettings("xpack.monitoring.exporters." + name)); - } - -} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTemplateTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTemplateTests.java deleted file mode 100644 index 5862163a331..00000000000 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTemplateTests.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.monitoring.exporter.http; - -import com.squareup.okhttp.mockwebserver.Dispatcher; -import com.squareup.okhttp.mockwebserver.MockResponse; -import com.squareup.okhttp.mockwebserver.MockWebServer; -import com.squareup.okhttp.mockwebserver.RecordedRequest; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.xpack.monitoring.exporter.AbstractExporterTemplateTestCase; -import org.elasticsearch.xpack.monitoring.exporter.Exporter; -import org.junit.After; -import org.junit.Before; - -import java.net.BindException; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.core.Is.is; - -public class HttpExporterTemplateTests extends AbstractExporterTemplateTestCase { - - private MockWebServer webServer; - private MockServerDispatcher dispatcher; - - @Before - public void startWebServer() throws Exception { - for (int webPort = 9250; webPort < 9300; webPort++) { - try { - webServer = new MockWebServer(); - dispatcher = new MockServerDispatcher(); - webServer.setDispatcher(dispatcher); - webServer.start(webPort); - return; - } catch (BindException be) { - logger.warn("port [{}] was already in use trying next port", webPort); - } - } - throw new ElasticsearchException("unable to find open port between 9200 and 9300"); - } - - @After - public void stopWebServer() throws Exception { - webServer.shutdown(); - } - - @Override - protected Settings exporterSettings() { - return Settings.builder() - .put("type", "http") - .put("host", webServer.getHostName() + ":" + webServer.getPort()) - .put("connection.keep_alive", false) - .put(Exporter.INDEX_NAME_TIME_FORMAT_SETTING, "YYYY") - .build(); - } - - @Override - protected void deleteTemplates() throws Exception { - dispatcher.templates.clear(); - } - - @Override - protected void deletePipeline() throws Exception { - dispatcher.pipelines.clear(); - } - - @Override - protected void putTemplate(String name) throws Exception { - dispatcher.templates.put(name, generateTemplateSource(name)); - } - - @Override - protected void putPipeline(String name) throws Exception { - dispatcher.pipelines.put(name, Exporter.emptyPipeline(XContentType.JSON).bytes()); - } - - @Override - protected void assertTemplateExists(String name) throws Exception { - assertThat("failed to find a template matching [" + name + "]", dispatcher.templates.containsKey(name), is(true)); - } - - @Override - protected void assertPipelineExists(String name) throws Exception { - assertThat("failed to find a pipeline matching [" + name + "]", dispatcher.pipelines.containsKey(name), is(true)); - } - - @Override - protected void assertTemplateNotUpdated(String name) throws Exception { - // Checks that no PUT Template request has been made - assertThat(dispatcher.hasRequest("PUT", "/_template/" + name), is(false)); - - // Checks that the current template exists - assertThat(dispatcher.templates.containsKey(name), is(true)); - } - - @Override - protected void assertPipelineNotUpdated(String name) throws Exception { - // Checks that no PUT pipeline request has been made - assertThat(dispatcher.hasRequest("PUT", "/_ingest/pipeline/" + name), is(false)); - - // Checks that the current pipeline exists - assertThat(dispatcher.pipelines.containsKey(name), is(true)); - } - - @Override - protected void awaitIndexExists(String index) throws Exception { - Runnable busy = () -> assertThat("could not find index " + index, dispatcher.hasIndex(index), is(true)); - assertBusy(busy, 10, TimeUnit.SECONDS); - } - - class MockServerDispatcher extends Dispatcher { - - private final MockResponse NOT_FOUND = newResponse(404, ""); - - private final Set requests = new HashSet<>(); - private final Map templates = ConcurrentCollections.newConcurrentMap(); - private final Map pipelines = ConcurrentCollections.newConcurrentMap(); - private final Set indices = ConcurrentCollections.newConcurrentSet(); - - @Override - public MockResponse dispatch(RecordedRequest request) throws InterruptedException { - final String requestLine = request.getRequestLine(); - requests.add(requestLine); - - // Cluster version - if ("GET / HTTP/1.1".equals(requestLine)) { - return newResponse(200, "{\"version\": {\"number\": \"" + Version.CURRENT.toString() + "\"}}"); - // Bulk - } else if ("POST".equals(request.getMethod()) && request.getPath().startsWith("/_bulk")) { - // Parse the bulk request and extract all index names - try { - BulkRequest bulk = new BulkRequest(); - byte[] source = request.getBody().readByteArray(); - bulk.add(source, 0, source.length); - for (ActionRequest docRequest : bulk.requests()) { - if (docRequest instanceof IndexRequest) { - indices.add(((IndexRequest) docRequest).index()); - } - } - } catch (Exception e) { - return newResponse(500, e.getMessage()); - } - return newResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); - // Templates and Pipelines - } else if ("GET".equals(request.getMethod()) || "PUT".equals(request.getMethod())) { - final String[] paths = request.getPath().split("/"); - - if (paths.length > 2) { - // Templates - if ("_template".equals(paths[1])) { - // _template/{name} - return newResponseForType(templates, request, paths[2]); - } else if ("_ingest".equals(paths[1])) { - // _ingest/pipeline/{name} - return newResponseForType(pipelines, request, paths[3]); - } - } - } - return newResponse(500, "MockServerDispatcher does not support: " + request.getRequestLine()); - } - - private MockResponse newResponseForType(Map type, RecordedRequest request, String name) { - final boolean exists = type.containsKey(name); - - if ("GET".equals(request.getMethod())) { - return exists ? newResponse(200, type.get(name).utf8ToString()) : NOT_FOUND; - } else if ("PUT".equals(request.getMethod())) { - type.put(name, new BytesArray(request.getMethod())); - return exists ? newResponse(200, "updated") : newResponse(201, "created"); - } - - return newResponse(500, request.getMethod() + " " + request.getPath() + " is not supported"); - } - - MockResponse newResponse(int code, String body) { - return new MockResponse().setResponseCode(code).setBody(body); - } - - int countRequests(String method, String path) { - int count = 0; - for (String request : requests) { - if (request.startsWith(method + " " + path)) { - count += 1; - } - } - return count; - } - - boolean hasRequest(String method, String path) { - return countRequests(method, path) > 0; - } - - boolean hasIndex(String index) { - return indices.contains(index); - } - } -} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java index f1a01e2bab8..60738de74c1 100644 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java @@ -5,606 +5,422 @@ */ package org.elasticsearch.xpack.monitoring.exporter.http; -import com.squareup.okhttp.mockwebserver.MockResponse; -import com.squareup.okhttp.mockwebserver.MockWebServer; -import com.squareup.okhttp.mockwebserver.QueueDispatcher; -import com.squareup.okhttp.mockwebserver.RecordedRequest; -import okio.Buffer; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.client.Requests; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.bytes.BytesArray; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.sniff.Sniffer; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.LocalTransportAddress; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.xpack.monitoring.MonitoredSystem; -import org.elasticsearch.xpack.monitoring.MonitoringSettings; -import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStateMonitoringDoc; -import org.elasticsearch.xpack.monitoring.collector.indices.IndexRecoveryMonitoringDoc; +import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.monitoring.exporter.Exporter; -import org.elasticsearch.xpack.monitoring.exporter.Exporters; -import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; -import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; -import org.elasticsearch.xpack.monitoring.resolver.bulk.MonitoringBulkTimestampedResolver; -import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; -import org.joda.time.format.DateTimeFormat; -import org.junit.After; -import org.junit.Before; +import org.elasticsearch.xpack.monitoring.exporter.Exporter.Config; +import org.elasticsearch.xpack.monitoring.resolver.ResolversRegistry; +import org.elasticsearch.xpack.ssl.SSLService; + +import org.mockito.InOrder; import java.io.IOException; -import java.net.BindException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.arrayContaining; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyMapOf; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.atMost; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +/** + * Tests {@link HttpExporter}. + */ +public class HttpExporterTests extends ESTestCase { + + private final SSLService sslService = mock(SSLService.class); + + public void testExporterWithBlacklistedHeaders() { + final String blacklistedHeader = randomFrom(HttpExporter.BLACKLISTED_HEADERS); + final String expected = "[" + blacklistedHeader + "] cannot be overwritten via [xpack.monitoring.exporters._http.headers]"; + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE) + .put("xpack.monitoring.exporters._http.host", "http://localhost:9200") + .put("xpack.monitoring.exporters._http.headers.abc", "xyz") + .put("xpack.monitoring.exporters._http.headers." + blacklistedHeader, "value should not matter"); -@ESIntegTestCase.ClusterScope(scope = Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0.0) -public class HttpExporterTests extends MonitoringIntegTestCase { - - private int webPort; - private MockWebServer webServer; - - @Before - public void startWebservice() throws Exception { - for (webPort = 9250; webPort < 9300; webPort++) { - try { - webServer = new MockWebServer(); - QueueDispatcher dispatcher = new QueueDispatcher(); - dispatcher.setFailFast(true); - webServer.setDispatcher(dispatcher); - webServer.start(webPort); - return; - } catch (BindException be) { - logger.warn("port [{}] was already in use trying next port", webPort); - } - } - throw new ElasticsearchException("unable to find open port between 9200 and 9300"); - } - - @After - public void cleanup() throws Exception { - webServer.shutdown(); - } - - private int expectedTemplateAndPipelineCalls(final boolean templateAlreadyExists, final boolean pipelineAlreadyExists) { - return expectedTemplateCalls(templateAlreadyExists) + expectedPipelineCalls(pipelineAlreadyExists); - } - - private int expectedTemplateCalls(final boolean alreadyExists) { - return monitoringTemplates().size() * (alreadyExists ? 1 : 2); - } - - private int expectedPipelineCalls(final boolean alreadyExists) { - return alreadyExists ? 1 : 2; - } - - private void assertMonitorVersion(final MockWebServer webServer) throws Exception { - assertMonitorVersion(webServer, null); - } - - private void assertMonitorVersion(final MockWebServer webServer, @Nullable final Map customHeaders) - throws Exception { - RecordedRequest request = webServer.takeRequest(); - - assertThat(request.getMethod(), equalTo("GET")); - assertThat(request.getPath(), equalTo("/")); - assertHeaders(request, customHeaders); - } - - private void assertMonitorTemplatesAndPipeline(final MockWebServer webServer, - final boolean templateAlreadyExists, final boolean pipelineAlreadyExists) - throws Exception { - assertMonitorTemplatesAndPipeline(webServer, templateAlreadyExists, pipelineAlreadyExists, null); - } - - private void assertMonitorTemplatesAndPipeline(final MockWebServer webServer, - final boolean templateAlreadyExists, final boolean pipelineAlreadyExists, - @Nullable final Map customHeaders) throws Exception { - assertMonitorVersion(webServer, customHeaders); - assertMonitorTemplates(webServer, templateAlreadyExists, customHeaders); - assertMonitorPipelines(webServer, pipelineAlreadyExists, customHeaders); - } - - private void assertMonitorTemplates(final MockWebServer webServer, final boolean alreadyExists, - @Nullable final Map customHeaders) throws Exception { - RecordedRequest request; - - for (Map.Entry template : monitoringTemplates().entrySet()) { - request = webServer.takeRequest(); - - assertThat(request.getMethod(), equalTo("GET")); - assertThat(request.getPath(), equalTo("/_template/" + template.getKey())); - assertHeaders(request, customHeaders); - - if (alreadyExists == false) { - request = webServer.takeRequest(); - - assertThat(request.getMethod(), equalTo("PUT")); - assertThat(request.getPath(), equalTo("/_template/" + template.getKey())); - assertThat(request.getBody().readUtf8(), equalTo(template.getValue())); - assertHeaders(request, customHeaders); - } - } - } - - private void assertMonitorPipelines(final MockWebServer webServer, final boolean alreadyExists, - @Nullable final Map customHeaders) throws Exception { - RecordedRequest request = webServer.takeRequest(); - - assertThat(request.getMethod(), equalTo("GET")); - assertThat(request.getPath(), equalTo("/_ingest/pipeline/" + Exporter.EXPORT_PIPELINE_NAME)); - assertHeaders(request, customHeaders); - - if (alreadyExists == false) { - request = webServer.takeRequest(); - - assertThat(request.getMethod(), equalTo("PUT")); - assertThat(request.getPath(), equalTo("/_ingest/pipeline/" + Exporter.EXPORT_PIPELINE_NAME)); - assertThat(request.getBody().readUtf8(), equalTo(Exporter.emptyPipeline(XContentType.JSON).string())); - assertHeaders(request, customHeaders); - } - } - - private RecordedRequest assertBulk(final MockWebServer webServer) throws Exception { - return assertBulk(webServer, -1); - } - - private RecordedRequest assertBulk(final MockWebServer webServer, final int docs) throws Exception { - return assertBulk(webServer, docs, null); - } - - - private RecordedRequest assertBulk(final MockWebServer webServer, final int docs, @Nullable final Map customHeaders) - throws Exception { - RecordedRequest request = webServer.takeRequest(); - - assertThat(request.getMethod(), equalTo("POST")); - assertThat(request.getPath(), equalTo("/_bulk?pipeline=" + Exporter.EXPORT_PIPELINE_NAME)); - assertHeaders(request, customHeaders); - - if (docs != -1) { - assertBulkRequest(request.getBody(), docs); - } - - return request; - } - - private void assertHeaders(final RecordedRequest request, final Map customHeaders) { - if (customHeaders != null) { - for (final Map.Entry entry : customHeaders.entrySet()) { - final String header = entry.getKey(); - final String[] values = entry.getValue(); - - final List headerValues = request.getHeaders().values(header); - - assertThat(header, headerValues, hasSize(values.length)); - assertThat(header, headerValues, containsInAnyOrder(values)); - } - } - } - - public void testExport() throws Exception { - final boolean templatesExistsAlready = randomBoolean(); - final boolean pipelineExistsAlready = randomBoolean(); - final int expectedTemplateAndPipelineCalls = expectedTemplateAndPipelineCalls(templatesExistsAlready, pipelineExistsAlready); - - enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueTemplateAndPipelineResponses(webServer, templatesExistsAlready, pipelineExistsAlready); - enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); - - Settings.Builder builder = Settings.builder() - .put(MonitoringSettings.INTERVAL.getKey(), "-1") - .put("xpack.monitoring.exporters._http.type", "http") - .put("xpack.monitoring.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort()) - .put("xpack.monitoring.exporters._http.connection.keep_alive", false) - .put("xpack.monitoring.exporters._http.update_mappings", false); - - internalCluster().startNode(builder); - - final int nbDocs = randomIntBetween(1, 25); - export(newRandomMonitoringDocs(nbDocs)); - - assertThat(webServer.getRequestCount(), equalTo(2 + expectedTemplateAndPipelineCalls)); - assertMonitorTemplatesAndPipeline(webServer, templatesExistsAlready, pipelineExistsAlready); - assertBulk(webServer, nbDocs); - } - - public void testExportWithHeaders() throws Exception { - final boolean templatesExistsAlready = randomBoolean(); - final boolean pipelineExistsAlready = randomBoolean(); - final int expectedTemplateAndPipelineCalls = expectedTemplateAndPipelineCalls(templatesExistsAlready, pipelineExistsAlready); - - final String headerValue = randomAsciiOfLengthBetween(3, 9); - final String[] array = generateRandomStringArray(2, 4, false); - - final Map headers = new HashMap<>(); - - headers.put("X-Cloud-Cluster", new String[] { headerValue }); - headers.put("X-Found-Cluster", new String[] { headerValue }); - headers.put("Array-Check", array); - - enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueTemplateAndPipelineResponses(webServer, templatesExistsAlready, pipelineExistsAlready); - enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); - - Settings.Builder builder = Settings.builder() - .put(MonitoringSettings.INTERVAL.getKey(), "-1") - .put("xpack.monitoring.exporters._http.type", "http") - .put("xpack.monitoring.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort()) - .put("xpack.monitoring.exporters._http.connection.keep_alive", false) - .put("xpack.monitoring.exporters._http.update_mappings", false) - .put("xpack.monitoring.exporters._http.headers.X-Cloud-Cluster", headerValue) - .put("xpack.monitoring.exporters._http.headers.X-Found-Cluster", headerValue) - .putArray("xpack.monitoring.exporters._http.headers.Array-Check", array); - - internalCluster().startNode(builder); - - final int nbDocs = randomIntBetween(1, 25); - export(newRandomMonitoringDocs(nbDocs)); - - assertThat(webServer.getRequestCount(), equalTo(2 + expectedTemplateAndPipelineCalls)); - assertMonitorTemplatesAndPipeline(webServer, templatesExistsAlready, pipelineExistsAlready); - assertBulk(webServer, nbDocs, headers); - } - - public void testDynamicHostChange() { - // disable exporting to be able to use non valid hosts - Settings.Builder builder = Settings.builder() - .put(MonitoringSettings.INTERVAL.getKey(), "-1") - .put("xpack.monitoring.exporters._http.type", "http") - .put("xpack.monitoring.exporters._http.host", "test0"); - - String nodeName = internalCluster().startNode(builder); - - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .putArray("xpack.monitoring.exporters._http.host", "test1"))); - assertThat(getExporter(nodeName).hosts, arrayContaining("test1")); - - // wipes the non array settings - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .putArray("xpack.monitoring.exporters._http.host", "test2") - .put("xpack.monitoring.exporters._http.host", ""))); - assertThat(getExporter(nodeName).hosts, arrayContaining("test2")); - - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .putArray("xpack.monitoring.exporters._http.host", "test3"))); - assertThat(getExporter(nodeName).hosts, arrayContaining("test3")); - } - - public void testHostChangeReChecksTemplate() throws Exception { - final boolean templatesExistsAlready = randomBoolean(); - final boolean pipelineExistsAlready = randomBoolean(); - final int expectedTemplateAndPipelineCalls = expectedTemplateAndPipelineCalls(templatesExistsAlready, pipelineExistsAlready); - - Settings.Builder builder = Settings.builder() - .put(MonitoringSettings.INTERVAL.getKey(), "-1") - .put("xpack.monitoring.exporters._http.type", "http") - .put("xpack.monitoring.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort()) - .put("xpack.monitoring.exporters._http.connection.keep_alive", false) - .put("xpack.monitoring.exporters._http.update_mappings", false); - - enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueTemplateAndPipelineResponses(webServer, templatesExistsAlready, pipelineExistsAlready); - enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); - - String agentNode = internalCluster().startNode(builder); - - HttpExporter exporter = getExporter(agentNode); - assertThat(exporter.supportedClusterVersion, is(false)); - export(Collections.singletonList(newRandomMonitoringDoc())); - - assertThat(exporter.supportedClusterVersion, is(true)); - assertThat(webServer.getRequestCount(), equalTo(2 + expectedTemplateAndPipelineCalls)); - assertMonitorTemplatesAndPipeline(webServer, templatesExistsAlready, pipelineExistsAlready); - assertBulk(webServer); - - MockWebServer secondWebServer = null; - int secondWebPort; - - try { - final int expectedPipelineCalls = expectedPipelineCalls(!pipelineExistsAlready); - - for (secondWebPort = 9250; secondWebPort < 9300; secondWebPort++) { - try { - secondWebServer = new MockWebServer(); - QueueDispatcher dispatcher = new QueueDispatcher(); - dispatcher.setFailFast(true); - secondWebServer.setDispatcher(dispatcher); - secondWebServer.start(secondWebPort); - break; - } catch (BindException be) { - logger.warn("port [{}] was already in use trying next port", secondWebPort); - } - } - - assertNotNull("Unable to start the second mock web server", secondWebServer); - - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings( - Settings.builder().putArray("xpack.monitoring.exporters._http.host", - secondWebServer.getHostName() + ":" + secondWebServer.getPort())).get()); - - // a new exporter is created on update, so we need to re-fetch it - exporter = getExporter(agentNode); - - enqueueGetClusterVersionResponse(secondWebServer, Version.CURRENT); - for (String template : monitoringTemplates().keySet()) { - if (template.contains(MonitoringBulkTimestampedResolver.Data.DATA)) { - enqueueResponse(secondWebServer, 200, "template [" + template + "] exists"); - } else { - enqueueResponse(secondWebServer, 404, "template [" + template + "] does not exist"); - enqueueResponse(secondWebServer, 201, "template [" + template + "] created"); - } - } - enqueuePipelineResponses(secondWebServer, !pipelineExistsAlready); - enqueueResponse(secondWebServer, 200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); - - logger.info("--> exporting a second event"); - export(Collections.singletonList(newRandomMonitoringDoc())); - - assertThat(secondWebServer.getRequestCount(), equalTo(2 + monitoringTemplates().size() * 2 - 1 + expectedPipelineCalls)); - assertMonitorVersion(secondWebServer); - - for (Map.Entry template : monitoringTemplates().entrySet()) { - RecordedRequest recordedRequest = secondWebServer.takeRequest(); - assertThat(recordedRequest.getMethod(), equalTo("GET")); - assertThat(recordedRequest.getPath(), equalTo("/_template/" + template.getKey())); - - if (template.getKey().contains(MonitoringBulkTimestampedResolver.Data.DATA) == false) { - recordedRequest = secondWebServer.takeRequest(); - assertThat(recordedRequest.getMethod(), equalTo("PUT")); - assertThat(recordedRequest.getPath(), equalTo("/_template/" + template.getKey())); - assertThat(recordedRequest.getBody().readUtf8(), equalTo(template.getValue())); - } - } - assertMonitorPipelines(secondWebServer, !pipelineExistsAlready, null); - assertBulk(secondWebServer); - } finally { - if (secondWebServer != null) { - secondWebServer.shutdown(); - } - } - } - - public void testUnsupportedClusterVersion() throws Exception { - Settings.Builder builder = Settings.builder() - .put(MonitoringSettings.INTERVAL.getKey(), "-1") - .put("xpack.monitoring.exporters._http.type", "http") - .put("xpack.monitoring.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort()) - .put("xpack.monitoring.exporters._http.connection.keep_alive", false); - - // returning an unsupported cluster version - enqueueGetClusterVersionResponse(randomFrom(Version.fromString("0.18.0"), Version.fromString("1.0.0"), - Version.fromString("1.4.0"))); - - String agentNode = internalCluster().startNode(builder); - - HttpExporter exporter = getExporter(agentNode); - assertThat(exporter.supportedClusterVersion, is(false)); - assertNull(exporter.openBulk()); - - assertThat(exporter.supportedClusterVersion, is(false)); - assertThat(webServer.getRequestCount(), equalTo(1)); - - assertMonitorVersion(webServer); - } - - public void testDynamicIndexFormatChange() throws Exception { - final boolean templatesExistsAlready = randomBoolean(); - final boolean pipelineExistsAlready = randomBoolean(); - final int expectedTemplateAndPipelineCalls = expectedTemplateAndPipelineCalls(templatesExistsAlready, pipelineExistsAlready); - - Settings.Builder builder = Settings.builder() - .put(MonitoringSettings.INTERVAL.getKey(), "-1") - .put("xpack.monitoring.exporters._http.type", "http") - .put("xpack.monitoring.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort()) - .put("xpack.monitoring.exporters._http.connection.keep_alive", false) - .put("xpack.monitoring.exporters._http.update_mappings", false); - - String agentNode = internalCluster().startNode(builder); - - enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueTemplateAndPipelineResponses(webServer, templatesExistsAlready, pipelineExistsAlready); - enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); - - HttpExporter exporter = getExporter(agentNode); - - MonitoringDoc doc = newRandomMonitoringDoc(); - export(Collections.singletonList(doc)); - - final int expectedRequests = 2 + expectedTemplateAndPipelineCalls; - assertThat(webServer.getRequestCount(), equalTo(expectedRequests)); - assertMonitorTemplatesAndPipeline(webServer, templatesExistsAlready, pipelineExistsAlready); - RecordedRequest recordedRequest = assertBulk(webServer); - - String indexName = exporter.getResolvers().getResolver(doc).index(doc); - - byte[] bytes = recordedRequest.getBody().readByteArray(); - Map data = XContentHelper.convertToMap(new BytesArray(bytes), false).v2(); - Map index = (Map) data.get("index"); - assertThat(index.get("_index"), equalTo(indexName)); - - String newTimeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM"); - assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() - .put("xpack.monitoring.exporters._http.index.name.time_format", newTimeFormat))); - - enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueTemplateAndPipelineResponses(webServer, true, true); - enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); - - doc = newRandomMonitoringDoc(); - export(Collections.singletonList(doc)); - - String expectedMonitoringIndex = ".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-" - + DateTimeFormat.forPattern(newTimeFormat).withZoneUTC().print(doc.getTimestamp()); - - final int expectedTemplatesAndPipelineExists = expectedTemplateAndPipelineCalls(true, true); - assertThat(webServer.getRequestCount(), equalTo(expectedRequests + 2 + expectedTemplatesAndPipelineExists)); - assertMonitorTemplatesAndPipeline(webServer, true, true); - recordedRequest = assertBulk(webServer); - - bytes = recordedRequest.getBody().readByteArray(); - data = XContentHelper.convertToMap(new BytesArray(bytes), false).v2(); - index = (Map) data.get("index"); - assertThat(index.get("_index"), equalTo(expectedMonitoringIndex)); - } - - public void testLoadRemoteClusterVersion() throws IOException { - final String host = webServer.getHostName() + ":" + webServer.getPort(); - - Settings.Builder builder = Settings.builder() - .put(MonitoringSettings.INTERVAL.getKey(), "-1") - .put("xpack.monitoring.exporters._http.type", "http") - .put("xpack.monitoring.exporters._http.host", host) - .put("xpack.monitoring.exporters._http.connection.keep_alive", false); - - String agentNode = internalCluster().startNode(builder); - HttpExporter exporter = getExporter(agentNode); - - enqueueGetClusterVersionResponse(Version.CURRENT); - Version resolved = exporter.loadRemoteClusterVersion(host); - assertTrue(resolved.equals(Version.CURRENT)); - - final Version expected = randomFrom(Version.CURRENT, Version.V_2_0_0_beta1, Version.V_2_0_0_beta2, Version.V_2_0_0_rc1, - Version.V_2_0_0, Version.V_2_1_0, Version.V_2_2_0, Version.V_2_3_0); - enqueueGetClusterVersionResponse(expected); - resolved = exporter.loadRemoteClusterVersion(host); - assertTrue(resolved.equals(expected)); - } - - private void export(Collection docs) throws Exception { - Exporters exporters = internalCluster().getInstance(Exporters.class); - assertThat(exporters, notNullValue()); - - // Wait for exporting bulks to be ready to export - assertBusy(() -> exporters.forEach(exporter -> assertThat(exporter.openBulk(), notNullValue()))); - exporters.export(docs); - } - - private HttpExporter getExporter(String nodeName) { - Exporters exporters = internalCluster().getInstance(Exporters.class, nodeName); - return (HttpExporter) exporters.iterator().next(); - } - - private MonitoringDoc newRandomMonitoringDoc() { if (randomBoolean()) { - IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString()); - doc.setClusterUUID(internalCluster().getClusterName()); - doc.setTimestamp(System.currentTimeMillis()); - doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); - doc.setRecoveryResponse(new RecoveryResponse()); - return doc; + builder.put("xpack.monitoring.exporters._http.headers.xyz", "abc"); + } + + final Config config = createConfig(builder.build()); + + final SettingsException exception = expectThrows(SettingsException.class, () -> new HttpExporter(config, sslService)); + + assertThat(exception.getMessage(), equalTo(expected)); + } + + public void testExporterWithEmptyHeaders() { + final String name = randomFrom("abc", "ABC", "X-Flag"); + final String expected = "headers must have values, missing for setting [xpack.monitoring.exporters._http.headers." + name + "]"; + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE) + .put("xpack.monitoring.exporters._http.host", "localhost:9200") + .put("xpack.monitoring.exporters._http.headers." + name, ""); + + if (randomBoolean()) { + builder.put("xpack.monitoring.exporters._http.headers.xyz", "abc"); + } + + final Config config = createConfig(builder.build()); + + final SettingsException exception = expectThrows(SettingsException.class, () -> new HttpExporter(config, sslService)); + + assertThat(exception.getMessage(), equalTo(expected)); + } + + public void testExporterWithPasswordButNoUsername() { + final String expected = + "[xpack.monitoring.exporters._http.auth.password] without [xpack.monitoring.exporters._http.auth.username]"; + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE) + .put("xpack.monitoring.exporters._http.host", "localhost:9200") + .put("xpack.monitoring.exporters._http.auth.password", "_pass"); + + final Config config = createConfig(builder.build()); + + final SettingsException exception = expectThrows(SettingsException.class, () -> new HttpExporter(config, sslService)); + + assertThat(exception.getMessage(), equalTo(expected)); + } + + public void testExporterWithMissingHost() { + // forgot host! + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE); + + if (randomBoolean()) { + builder.put("xpack.monitoring.exporters._http.host", ""); + } else if (randomBoolean()) { + builder.putArray("xpack.monitoring.exporters._http.host"); + } else if (randomBoolean()) { + builder.putNull("xpack.monitoring.exporters._http.host"); + } + + final Config config = createConfig(builder.build()); + + final SettingsException exception = expectThrows(SettingsException.class, () -> new HttpExporter(config, sslService)); + + assertThat(exception.getMessage(), equalTo("missing required setting [xpack.monitoring.exporters._http.host]")); + } + + public void testExporterWithInconsistentSchemes() { + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE) + .putArray("xpack.monitoring.exporters._http.host", "http://localhost:9200", "https://localhost:9201"); + + final Config config = createConfig(builder.build()); + + final SettingsException exception = expectThrows(SettingsException.class, () -> new HttpExporter(config, sslService)); + + assertThat(exception.getMessage(), + equalTo("[xpack.monitoring.exporters._http.host] must use a consistent scheme: http or https")); + } + + public void testExporterWithInvalidHost() { + final String invalidHost = randomFrom("://localhost:9200", "gopher!://xyz.my.com"); + + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", HttpExporter.TYPE); + + // sometimes add a valid URL with it + if (randomBoolean()) { + if (randomBoolean()) { + builder.putArray("xpack.monitoring.exporters._http.host", "localhost:9200", invalidHost); + } else { + builder.putArray("xpack.monitoring.exporters._http.host", invalidHost, "localhost:9200"); + } } else { - ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString()); - doc.setClusterUUID(internalCluster().getClusterName()); - doc.setTimestamp(System.currentTimeMillis()); - doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); - doc.setClusterState(ClusterState.PROTO); - doc.setStatus(ClusterHealthStatus.GREEN); - return doc; + builder.put("xpack.monitoring.exporters._http.host", invalidHost); + } + + final Config config = createConfig(builder.build()); + + final SettingsException exception = expectThrows(SettingsException.class, () -> new HttpExporter(config, sslService)); + + assertThat(exception.getMessage(), equalTo("[xpack.monitoring.exporters._http.host] invalid host: [" + invalidHost + "]")); + } + + public void testExporterWithHostOnly() throws Exception { + final SSLIOSessionStrategy sslStrategy = mock(SSLIOSessionStrategy.class); + when(sslService.sslIOSessionStrategy(any(Settings.class))).thenReturn(sslStrategy); + + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", "http") + .put("xpack.monitoring.exporters._http.host", "http://localhost:9200"); + + final Config config = createConfig(builder.build()); + + new HttpExporter(config, sslService).close(); + } + + public void testCreateRestClient() throws IOException { + final SSLIOSessionStrategy sslStrategy = mock(SSLIOSessionStrategy.class); + + when(sslService.sslIOSessionStrategy(any(Settings.class))).thenReturn(sslStrategy); + + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", "http") + .put("xpack.monitoring.exporters._http.host", "http://localhost:9200"); + + // use basic auth + if (randomBoolean()) { + builder.put("xpack.monitoring.exporters._http.auth.username", "_user") + .put("xpack.monitoring.exporters._http.auth.password", "_pass"); + } + + // use headers + if (randomBoolean()) { + builder.put("xpack.monitoring.exporters._http.headers.abc", "xyz"); + } + + final Config config = createConfig(builder.build()); + final NodeFailureListener listener = mock(NodeFailureListener.class); + + // doesn't explode + HttpExporter.createRestClient(config, sslService, listener).close(); + } + + public void testCreateSnifferDisabledByDefault() { + final Config config = createConfig(Settings.EMPTY); + final RestClient client = mock(RestClient.class); + final NodeFailureListener listener = mock(NodeFailureListener.class); + + assertThat(HttpExporter.createSniffer(config, client, listener), nullValue()); + + verifyZeroInteractions(client, listener); + } + + public void testCreateSnifferWithoutHosts() { + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", "http") + .put("xpack.monitoring.exporters._http.sniff.enabled", true); + + final Config config = createConfig(builder.build()); + final RestClient client = mock(RestClient.class); + final NodeFailureListener listener = mock(NodeFailureListener.class); + + expectThrows(IndexOutOfBoundsException.class, () -> HttpExporter.createSniffer(config, client, listener)); + } + + public void testCreateSniffer() throws IOException { + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", "http") + // it's a simple check: does it start with "https"? + .put("xpack.monitoring.exporters._http.host", randomFrom("neither", "http", "https")) + .put("xpack.monitoring.exporters._http.sniff.enabled", true); + + final Config config = createConfig(builder.build()); + final RestClient client = mock(RestClient.class); + final NodeFailureListener listener = mock(NodeFailureListener.class); + final Response response = mock(Response.class); + final StringEntity entity = new StringEntity("{}", ContentType.APPLICATION_JSON); + + when(response.getEntity()).thenReturn(entity); + when(client.performRequest(eq("get"), eq("/_nodes/http"), anyMapOf(String.class, String.class))).thenReturn(response); + + try (final Sniffer sniffer = HttpExporter.createSniffer(config, client, listener)) { + assertThat(sniffer, not(nullValue())); + + verify(listener).setSniffer(sniffer); + } + + // it's a race whether it triggers this at all + verify(client, atMost(1)).performRequest(eq("get"), eq("/_nodes/http"), anyMapOf(String.class, String.class)); + + verifyNoMoreInteractions(client, listener); + } + + public void testCreateResources() { + final boolean useIngest = randomBoolean(); + final TimeValue templateTimeout = randomFrom(TimeValue.timeValueSeconds(30), null); + final TimeValue pipelineTimeout = randomFrom(TimeValue.timeValueSeconds(30), null); + + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", "http"); + + if (useIngest == false) { + builder.put("xpack.monitoring.exporters._http.use_ingest", false); + } + + if (templateTimeout != null) { + builder.put("xpack.monitoring.exporters._http.index.template.master_timeout", templateTimeout.toString()); + } + + // note: this shouldn't get used with useIngest == false, but it doesn't hurt to try to cause issues + if (pipelineTimeout != null) { + builder.put("xpack.monitoring.exporters._http.index.pipeline.master_timeout", pipelineTimeout.toString()); + } + + final Config config = createConfig(builder.build()); + + final MultiHttpResource multiResource = HttpExporter.createResources(config, new ResolversRegistry(config.settings())); + + final List resources = multiResource.getResources(); + final int version = (int)resources.stream().filter((resource) -> resource instanceof VersionHttpResource).count(); + final List templates = + resources.stream().filter((resource) -> resource instanceof TemplateHttpResource) + .map(TemplateHttpResource.class::cast) + .collect(Collectors.toList()); + final List pipelines = + resources.stream().filter((resource) -> resource instanceof PipelineHttpResource) + .map(PipelineHttpResource.class::cast) + .collect(Collectors.toList()); + + // expected number of resources + assertThat(multiResource.getResources().size(), equalTo(version + templates.size() + pipelines.size())); + assertThat(version, equalTo(1)); + assertThat(templates, hasSize(3)); + assertThat(pipelines, hasSize(useIngest ? 1 : 0)); + + // timeouts + assertMasterTimeoutSet(templates, templateTimeout); + assertMasterTimeoutSet(pipelines, pipelineTimeout); + + // logging owner names + final List uniqueOwners = + resources.stream().map(HttpResource::getResourceOwnerName).distinct().collect(Collectors.toList()); + + assertThat(uniqueOwners, hasSize(1)); + assertThat(uniqueOwners.get(0), equalTo("xpack.monitoring.exporters._http")); + } + + public void testCreateDefaultParams() { + final TimeValue bulkTimeout = randomFrom(TimeValue.timeValueSeconds(30), null); + final boolean useIngest = randomBoolean(); + + final Settings.Builder builder = Settings.builder() + .put("xpack.monitoring.exporters._http.type", "http"); + + if (bulkTimeout != null) { + builder.put("xpack.monitoring.exporters._http.bulk.timeout", bulkTimeout.toString()); + } + + if (useIngest == false) { + builder.put("xpack.monitoring.exporters._http.use_ingest", false); + } + + final Config config = createConfig(builder.build()); + + final Map parameters = new HashMap<>(HttpExporter.createDefaultParams(config)); + + assertThat(parameters.remove("filter_path"), equalTo("errors,items.*.error")); + + if (bulkTimeout != null) { + assertThat(parameters.remove("master_timeout"), equalTo(bulkTimeout.toString())); + } + + if (useIngest) { + assertThat(parameters.remove("pipeline"), equalTo(Exporter.EXPORT_PIPELINE_NAME)); + } + + // should have removed everything + assertThat(parameters.size(), equalTo(0)); + } + + public void testHttpExporterDirtyResourcesBlock() throws Exception { + final Config config = createConfig(Settings.EMPTY); + final RestClient client = mock(RestClient.class); + final Sniffer sniffer = randomFrom(mock(Sniffer.class), null); + final NodeFailureListener listener = mock(NodeFailureListener.class); + final ResolversRegistry resolvers = mock(ResolversRegistry.class); + final HttpResource resource = new MockHttpResource(exporterName(), true, PublishableHttpResource.CheckResponse.ERROR, false); + + try (final HttpExporter exporter = new HttpExporter(config, client, sniffer, listener, resolvers, resource)) { + verify(listener).setResource(resource); + + assertThat(exporter.openBulk(), nullValue()); } } - private List newRandomMonitoringDocs(int nb) { - List docs = new ArrayList<>(nb); - for (int i = 0; i < nb; i++) { - docs.add(newRandomMonitoringDoc()); + public void testHttpExporter() throws Exception { + final Config config = createConfig(Settings.EMPTY); + final RestClient client = mock(RestClient.class); + final Sniffer sniffer = randomFrom(mock(Sniffer.class), null); + final NodeFailureListener listener = mock(NodeFailureListener.class); + final ResolversRegistry resolvers = mock(ResolversRegistry.class); + // sometimes dirty to start with and sometimes not; but always succeeds on checkAndPublish + final HttpResource resource = new MockHttpResource(exporterName(), randomBoolean()); + + try (final HttpExporter exporter = new HttpExporter(config, client, sniffer, listener, resolvers, resource)) { + verify(listener).setResource(resource); + + final HttpExportBulk bulk = exporter.openBulk(); + + assertThat(bulk.getName(), equalTo(exporterName())); } - return docs; } - private void enqueueGetClusterVersionResponse(Version v) throws IOException { - enqueueGetClusterVersionResponse(webServer, v); - } + public void testHttpExporterShutdown() throws Exception { + final Config config = createConfig(Settings.EMPTY); + final RestClient client = mock(RestClient.class); + final Sniffer sniffer = randomFrom(mock(Sniffer.class), null); + final NodeFailureListener listener = mock(NodeFailureListener.class); + final ResolversRegistry resolvers = mock(ResolversRegistry.class); + final MultiHttpResource resource = mock(MultiHttpResource.class); - private void enqueueGetClusterVersionResponse(MockWebServer mockWebServer, Version v) throws IOException { - mockWebServer.enqueue(new MockResponse().setResponseCode(200).setBody( - jsonBuilder().startObject().startObject("version").field("number", v.toString()).endObject().endObject().bytes() - .utf8ToString())); - } + if (sniffer != null && rarely()) { + doThrow(randomFrom(new IOException("expected"), new RuntimeException("expected"))).when(sniffer).close(); + } - private void enqueueTemplateAndPipelineResponses(final MockWebServer webServer, - final boolean templatesAlreadyExists, final boolean pipelineAlreadyExists) - throws IOException { - enqueueTemplateResponses(webServer, templatesAlreadyExists); - enqueuePipelineResponses(webServer, pipelineAlreadyExists); - } + if (rarely()) { + doThrow(randomFrom(new IOException("expected"), new RuntimeException("expected"))).when(client).close(); + } - private void enqueueTemplateResponses(final MockWebServer webServer, final boolean alreadyExists) throws IOException { - if (alreadyExists) { - enqueueTemplateResponsesExistsAlready(webServer); + new HttpExporter(config, client, sniffer, listener, resolvers, resource).close(); + + // order matters; sniffer must close first + if (sniffer != null) { + final InOrder inOrder = inOrder(sniffer, client); + + inOrder.verify(sniffer).close(); + inOrder.verify(client).close(); } else { - enqueueTemplateResponsesDoesNotExistYet(webServer); + verify(client).close(); } } - private void enqueueTemplateResponsesDoesNotExistYet(final MockWebServer webServer) throws IOException { - for (String template : monitoringTemplates().keySet()) { - enqueueResponse(webServer, 404, "template [" + template + "] does not exist"); - enqueueResponse(webServer, 201, "template [" + template + "] created"); + private void assertMasterTimeoutSet(final List resources, final TimeValue timeout) { + if (timeout != null) { + for (final PublishableHttpResource resource : resources) { + assertThat(resource.getParameters().get("master_timeout"), equalTo(timeout.toString())); + } } } - private void enqueueTemplateResponsesExistsAlready(final MockWebServer webServer) throws IOException { - for (String template : monitoringTemplates().keySet()) { - enqueueResponse(webServer, 200, "template [" + template + "] exists"); - } + /** + * Create the {@link Config} named "_http" and select those settings from {@code settings}. + * + * @param settings The settings to select the exporter's settings from + * @return Never {@code null}. + */ + private static Config createConfig(Settings settings) { + return new Config("_http", HttpExporter.TYPE, settings.getAsSettings(exporterName())); } - private void enqueuePipelineResponses(final MockWebServer webServer, final boolean alreadyExists) throws IOException { - if (alreadyExists) { - enqueuePipelineResponsesExistsAlready(webServer); - } else { - enqueuePipelineResponsesDoesNotExistYet(webServer); - } + private static String exporterName() { + return "xpack.monitoring.exporters._http"; } - private void enqueuePipelineResponsesDoesNotExistYet(final MockWebServer webServer) throws IOException { - enqueueResponse(webServer, 404, "pipeline [" + Exporter.EXPORT_PIPELINE_NAME + "] does not exist"); - enqueueResponse(webServer, 201, "pipeline [" + Exporter.EXPORT_PIPELINE_NAME + "] created"); - } - - private void enqueuePipelineResponsesExistsAlready(final MockWebServer webServer) throws IOException { - enqueueResponse(webServer, 200, "pipeline [" + Exporter.EXPORT_PIPELINE_NAME + "] exists"); - } - - private void enqueueResponse(int responseCode, String body) throws IOException { - enqueueResponse(webServer, responseCode, body); - } - - private void enqueueResponse(MockWebServer mockWebServer, int responseCode, String body) throws IOException { - mockWebServer.enqueue(new MockResponse().setResponseCode(responseCode).setBody(body)); - } - - private void assertBulkRequest(Buffer requestBody, int numberOfActions) throws Exception { - BulkRequest bulkRequest = Requests.bulkRequest().add(new BytesArray(requestBody.readByteArray()), null, null); - assertThat(bulkRequest.numberOfActions(), equalTo(numberOfActions)); - for (ActionRequest actionRequest : bulkRequest.requests()) { - assertThat(actionRequest, instanceOf(IndexRequest.class)); - } - } } diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterUtilsTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterUtilsTests.java deleted file mode 100644 index ec02cd5d939..00000000000 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterUtilsTests.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.monitoring.exporter.http; - -import org.elasticsearch.test.ESTestCase; - -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; - -import static org.hamcrest.CoreMatchers.equalTo; - - -public class HttpExporterUtilsTests extends ESTestCase { - - public void testHostParsing() throws MalformedURLException, URISyntaxException { - URL url = HttpExporterUtils.parseHostWithPath("localhost:9200", ""); - verifyUrl(url, "http", "localhost", 9200, "/"); - - url = HttpExporterUtils.parseHostWithPath("localhost", "_bulk"); - verifyUrl(url, "http", "localhost", 9200, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("http://localhost:9200", "_bulk"); - verifyUrl(url, "http", "localhost", 9200, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("http://localhost", "_bulk"); - verifyUrl(url, "http", "localhost", 9200, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("https://localhost:9200", "_bulk"); - verifyUrl(url, "https", "localhost", 9200, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("https://boaz-air.local:9200", "_bulk"); - verifyUrl(url, "https", "boaz-air.local", 9200, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("localhost:9200/suburl", ""); - verifyUrl(url, "http", "localhost", 9200, "/suburl/"); - - url = HttpExporterUtils.parseHostWithPath("localhost/suburl", "_bulk"); - verifyUrl(url, "http", "localhost", 9200, "/suburl/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("http://localhost:9200/suburl/suburl1", "_bulk"); - verifyUrl(url, "http", "localhost", 9200, "/suburl/suburl1/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("https://localhost:9200/suburl", "_bulk"); - verifyUrl(url, "https", "localhost", 9200, "/suburl/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("https://server_with_underscore:9300", "_bulk"); - verifyUrl(url, "https", "server_with_underscore", 9300, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("server_with_underscore:9300", "_bulk"); - verifyUrl(url, "http", "server_with_underscore", 9300, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("server_with_underscore", "_bulk"); - verifyUrl(url, "http", "server_with_underscore", 9200, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("https://server-dash:9300", "_bulk"); - verifyUrl(url, "https", "server-dash", 9300, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("server-dash:9300", "_bulk"); - verifyUrl(url, "http", "server-dash", 9300, "/_bulk"); - - url = HttpExporterUtils.parseHostWithPath("server-dash", "_bulk"); - verifyUrl(url, "http", "server-dash", 9200, "/_bulk"); - } - - void verifyUrl(URL url, String protocol, String host, int port, String path) throws URISyntaxException { - assertThat(url.getProtocol(), equalTo(protocol)); - assertThat(url.getHost(), equalTo(host)); - assertThat(url.getPort(), equalTo(port)); - assertThat(url.toURI().getPath(), equalTo(path)); - } -} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpHostBuilderTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpHostBuilderTests.java new file mode 100644 index 00000000000..58efe54defe --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpHostBuilderTests.java @@ -0,0 +1,171 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpHost; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +/** + * Tests {@link HttpHostBuilder}. + */ +public class HttpHostBuilderTests extends ESTestCase { + + private final Scheme scheme = randomFrom(Scheme.values()); + private final String hostname = randomAsciiOfLengthBetween(1, 20); + private final int port = randomIntBetween(1, 65535); + + public void testBuilder() { + assertHttpHost(HttpHostBuilder.builder(hostname), Scheme.HTTP, hostname, 9200); + assertHttpHost(HttpHostBuilder.builder(scheme.toString() + "://" + hostname), scheme, hostname, 9200); + assertHttpHost(HttpHostBuilder.builder(scheme.toString() + "://" + hostname + ":" + port), scheme, hostname, port); + // weird port, but I don't expect it to explode + assertHttpHost(HttpHostBuilder.builder(scheme.toString() + "://" + hostname + ":-1"), scheme, hostname, 9200); + // port without scheme + assertHttpHost(HttpHostBuilder.builder(hostname + ":" + port), Scheme.HTTP, hostname, port); + + // fairly ordinary + assertHttpHost(HttpHostBuilder.builder("localhost"), Scheme.HTTP, "localhost", 9200); + assertHttpHost(HttpHostBuilder.builder("localhost:9200"), Scheme.HTTP, "localhost", 9200); + assertHttpHost(HttpHostBuilder.builder("http://localhost"), Scheme.HTTP, "localhost", 9200); + assertHttpHost(HttpHostBuilder.builder("http://localhost:9200"), Scheme.HTTP, "localhost", 9200); + assertHttpHost(HttpHostBuilder.builder("https://localhost:9200"), Scheme.HTTPS, "localhost", 9200); + assertHttpHost(HttpHostBuilder.builder("https://boaz-air.local:9200"), Scheme.HTTPS, "boaz-air.local", 9200); + assertHttpHost(HttpHostBuilder.builder("https://server-dash:19200"), Scheme.HTTPS, "server-dash", 19200); + assertHttpHost(HttpHostBuilder.builder("server-dash:19200"), Scheme.HTTP, "server-dash", 19200); + assertHttpHost(HttpHostBuilder.builder("server-dash"), Scheme.HTTP, "server-dash", 9200); + assertHttpHost(HttpHostBuilder.builder("sub.domain"), Scheme.HTTP, "sub.domain", 9200); + assertHttpHost(HttpHostBuilder.builder("http://sub.domain"), Scheme.HTTP, "sub.domain", 9200); + assertHttpHost(HttpHostBuilder.builder("http://sub.domain:9200"), Scheme.HTTP, "sub.domain", 9200); + assertHttpHost(HttpHostBuilder.builder("https://sub.domain:9200"), Scheme.HTTPS, "sub.domain", 9200); + assertHttpHost(HttpHostBuilder.builder("https://sub.domain:19200"), Scheme.HTTPS, "sub.domain", 19200); + + // ipv4 + assertHttpHost(HttpHostBuilder.builder("127.0.0.1"), Scheme.HTTP, "127.0.0.1", 9200); + assertHttpHost(HttpHostBuilder.builder("127.0.0.1:19200"), Scheme.HTTP, "127.0.0.1", 19200); + assertHttpHost(HttpHostBuilder.builder("http://127.0.0.1"), Scheme.HTTP, "127.0.0.1", 9200); + assertHttpHost(HttpHostBuilder.builder("http://127.0.0.1:9200"), Scheme.HTTP, "127.0.0.1", 9200); + assertHttpHost(HttpHostBuilder.builder("https://127.0.0.1:9200"), Scheme.HTTPS, "127.0.0.1", 9200); + assertHttpHost(HttpHostBuilder.builder("https://127.0.0.1:19200"), Scheme.HTTPS, "127.0.0.1", 19200); + + // ipv6 + assertHttpHost(HttpHostBuilder.builder("[::1]"), Scheme.HTTP, "[::1]", 9200); + assertHttpHost(HttpHostBuilder.builder("[::1]:19200"), Scheme.HTTP, "[::1]", 19200); + assertHttpHost(HttpHostBuilder.builder("http://[::1]"), Scheme.HTTP, "[::1]", 9200); + assertHttpHost(HttpHostBuilder.builder("http://[::1]:9200"), Scheme.HTTP, "[::1]", 9200); + assertHttpHost(HttpHostBuilder.builder("https://[::1]:9200"), Scheme.HTTPS, "[::1]", 9200); + assertHttpHost(HttpHostBuilder.builder("https://[::1]:19200"), Scheme.HTTPS, "[::1]", 19200); + assertHttpHost(HttpHostBuilder.builder("[fdda:5cc1:23:4::1f]"), Scheme.HTTP, "[fdda:5cc1:23:4::1f]", 9200); + assertHttpHost(HttpHostBuilder.builder("http://[fdda:5cc1:23:4::1f]"), Scheme.HTTP, "[fdda:5cc1:23:4::1f]", 9200); + assertHttpHost(HttpHostBuilder.builder("http://[fdda:5cc1:23:4::1f]:9200"), Scheme.HTTP, "[fdda:5cc1:23:4::1f]", 9200); + assertHttpHost(HttpHostBuilder.builder("https://[fdda:5cc1:23:4::1f]:9200"), Scheme.HTTPS, "[fdda:5cc1:23:4::1f]", 9200); + assertHttpHost(HttpHostBuilder.builder("https://[fdda:5cc1:23:4::1f]:19200"), Scheme.HTTPS, "[fdda:5cc1:23:4::1f]", 19200); + + // underscores + assertHttpHost(HttpHostBuilder.builder("server_with_underscore"), Scheme.HTTP, "server_with_underscore", 9200); + assertHttpHost(HttpHostBuilder.builder("server_with_underscore:19200"), Scheme.HTTP, "server_with_underscore", 19200); + assertHttpHost(HttpHostBuilder.builder("http://server_with_underscore"), Scheme.HTTP, "server_with_underscore", 9200); + assertHttpHost(HttpHostBuilder.builder("http://server_with_underscore:9200"), Scheme.HTTP, "server_with_underscore", 9200); + assertHttpHost(HttpHostBuilder.builder("http://server_with_underscore:19200"), Scheme.HTTP, "server_with_underscore", 19200); + assertHttpHost(HttpHostBuilder.builder("https://server_with_underscore"), Scheme.HTTPS, "server_with_underscore", 9200); + assertHttpHost(HttpHostBuilder.builder("https://server_with_underscore:9200"), Scheme.HTTPS, "server_with_underscore", 9200); + assertHttpHost(HttpHostBuilder.builder("https://server_with_underscore:19200"), Scheme.HTTPS, "server_with_underscore", 19200); + assertHttpHost(HttpHostBuilder.builder("_prefix.domain"), Scheme.HTTP, "_prefix.domain", 9200); + assertHttpHost(HttpHostBuilder.builder("_prefix.domain:19200"), Scheme.HTTP, "_prefix.domain", 19200); + assertHttpHost(HttpHostBuilder.builder("http://_prefix.domain"), Scheme.HTTP, "_prefix.domain", 9200); + assertHttpHost(HttpHostBuilder.builder("http://_prefix.domain:9200"), Scheme.HTTP, "_prefix.domain", 9200); + assertHttpHost(HttpHostBuilder.builder("http://_prefix.domain:19200"), Scheme.HTTP, "_prefix.domain", 19200); + assertHttpHost(HttpHostBuilder.builder("https://_prefix.domain"), Scheme.HTTPS, "_prefix.domain", 9200); + assertHttpHost(HttpHostBuilder.builder("https://_prefix.domain:9200"), Scheme.HTTPS, "_prefix.domain", 9200); + assertHttpHost(HttpHostBuilder.builder("https://_prefix.domain:19200"), Scheme.HTTPS, "_prefix.domain", 19200); + } + + public void testManualBuilder() { + assertHttpHost(HttpHostBuilder.builder().host(hostname), Scheme.HTTP, hostname, 9200); + assertHttpHost(HttpHostBuilder.builder().scheme(scheme).host(hostname), scheme, hostname, 9200); + assertHttpHost(HttpHostBuilder.builder().scheme(scheme).host(hostname).port(port), scheme, hostname, port); + // unset the port (not normal, but ensuring it works) + assertHttpHost(HttpHostBuilder.builder().scheme(scheme).host(hostname).port(port).port(-1), scheme, hostname, 9200); + // port without scheme + assertHttpHost(HttpHostBuilder.builder().host(hostname).port(port), Scheme.HTTP, hostname, port); + } + + public void testBuilderNullUri() { + final NullPointerException e = expectThrows(NullPointerException.class, () -> HttpHostBuilder.builder(null)); + + assertThat(e.getMessage(), equalTo("uri must not be null")); + } + + public void testUnknownScheme() { + assertBuilderBadSchemeThrows("htp://localhost:9200", "htp"); + assertBuilderBadSchemeThrows("htttp://localhost:9200", "htttp"); + assertBuilderBadSchemeThrows("httpd://localhost:9200", "httpd"); + assertBuilderBadSchemeThrows("ws://localhost:9200", "ws"); + assertBuilderBadSchemeThrows("wss://localhost:9200", "wss"); + assertBuilderBadSchemeThrows("ftp://localhost:9200", "ftp"); + assertBuilderBadSchemeThrows("gopher://localhost:9200", "gopher"); + assertBuilderBadSchemeThrows("localhost://9200", "localhost"); + } + + public void testPathIsBlocked() { + assertBuilderPathThrows("http://localhost:9200/", "/"); + assertBuilderPathThrows("http://localhost:9200/sub", "/sub"); + assertBuilderPathThrows("http://localhost:9200/sub/path", "/sub/path"); + } + + public void testBuildWithoutHost() { + final IllegalStateException e = expectThrows(IllegalStateException.class, () -> HttpHostBuilder.builder().build()); + + assertThat(e.getMessage(), equalTo("host must be set")); + } + + public void testNullScheme() { + expectThrows(NullPointerException.class, () -> HttpHostBuilder.builder().scheme(null)); + } + + public void testNullHost() { + expectThrows(NullPointerException.class, () -> HttpHostBuilder.builder().host(null)); + } + + public void testBadPort() { + assertPortThrows(0); + assertPortThrows(65536); + + assertPortThrows(randomIntBetween(Integer.MIN_VALUE, -2)); + assertPortThrows(randomIntBetween(65537, Integer.MAX_VALUE)); + } + + private void assertHttpHost(final HttpHostBuilder host, final Scheme scheme, final String hostname, final int port) { + assertHttpHost(host.build(), scheme, hostname, port); + } + + private void assertHttpHost(final HttpHost host, final Scheme scheme, final String hostname, final int port) { + assertThat(host.getSchemeName(), equalTo(scheme.toString())); + assertThat(host.getHostName(), equalTo(hostname)); + assertThat(host.getPort(), equalTo(port)); + } + + private void assertBuilderPathThrows(final String uri, final String path) { + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> HttpHostBuilder.builder(uri)); + + assertThat(e.getMessage(), containsString("[" + path + "]")); + } + + private void assertBuilderBadSchemeThrows(final String uri, final String scheme) { + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> HttpHostBuilder.builder(uri)); + + assertThat(e.getMessage(), containsString(scheme)); + } + + private void assertPortThrows(final int port) { + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> HttpHostBuilder.builder().port(port)); + + assertThat(e.getMessage(), containsString(Integer.toString(port))); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java new file mode 100644 index 00000000000..565600fe41d --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpResourceTests.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.elasticsearch.client.RestClient; +import org.elasticsearch.test.ESTestCase; + +import java.util.function.Supplier; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Tests {@link HttpResource}. + */ +public class HttpResourceTests extends ESTestCase { + + private final String owner = getTestName(); + private final RestClient client = mock(RestClient.class); + + public void testConstructorRequiresOwner() { + expectThrows(NullPointerException.class, () -> new HttpResource(null) { + @Override + protected boolean doCheckAndPublish(RestClient client) { + return false; + } + }); + } + + public void testConstructor() { + final HttpResource resource = new HttpResource(owner) { + @Override + protected boolean doCheckAndPublish(RestClient client) { + return false; + } + }; + + assertSame(owner, resource.resourceOwnerName); + assertTrue(resource.isDirty()); + } + + public void testConstructorDirtiness() { + final boolean dirty = randomBoolean(); + final HttpResource resource = new HttpResource(owner, dirty) { + @Override + protected boolean doCheckAndPublish(RestClient client) { + return false; + } + }; + + assertSame(owner, resource.resourceOwnerName); + assertEquals(dirty, resource.isDirty()); + } + + public void testDirtiness() { + // MockHttpResponse always succeeds for checkAndPublish + final HttpResource resource = new MockHttpResource(owner); + + assertTrue(resource.isDirty()); + + resource.markDirty(); + + assertTrue(resource.isDirty()); + + // if this fails, then the mocked resource needs to be fixed + assertTrue(resource.checkAndPublish(client)); + + assertFalse(resource.isDirty()); + } + + public void testCheckAndPublish() { + final boolean expected = randomBoolean(); + // the default dirtiness should be irrelevant; it should always be run! + final HttpResource resource = new HttpResource(owner) { + @Override + protected boolean doCheckAndPublish(final RestClient client) { + return expected; + } + }; + + assertEquals(expected, resource.checkAndPublish(client)); + } + + public void testCheckAndPublishEvenWhenDirty() { + final Supplier supplier = mock(Supplier.class); + when(supplier.get()).thenReturn(true, false); + + final HttpResource resource = new HttpResource(owner) { + @Override + protected boolean doCheckAndPublish(final RestClient client) { + return supplier.get(); + } + }; + + assertTrue(resource.isDirty()); + assertTrue(resource.checkAndPublish(client)); + assertFalse(resource.isDirty()); + assertFalse(resource.checkAndPublish(client)); + + verify(supplier, times(2)).get(); + } + + public void testCheckAndPublishIfDirty() { + @SuppressWarnings("unchecked") + final Supplier supplier = mock(Supplier.class); + when(supplier.get()).thenReturn(true, false); + + final HttpResource resource = new HttpResource(owner) { + @Override + protected boolean doCheckAndPublish(final RestClient client) { + return supplier.get(); + } + }; + + assertTrue(resource.isDirty()); + assertTrue(resource.checkAndPublishIfDirty(client)); + assertFalse(resource.isDirty()); + assertTrue(resource.checkAndPublishIfDirty(client)); + + // once is the default! + verify(supplier).get(); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/MockHttpResource.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/MockHttpResource.java new file mode 100644 index 00000000000..1824036eb05 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/MockHttpResource.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.unit.TimeValue; + +import java.util.Collections; +import java.util.Map; + +/** + * {@code MockHttpResource} the {@linkplain HttpResource#isDirty() dirtiness} to be defaulted. + */ +public class MockHttpResource extends PublishableHttpResource { + + public final CheckResponse check; + public final boolean publish; + + public int checked = 0; + public int published = 0; + + /** + * Create a new {@link MockHttpResource} that starts dirty, but always succeeds. + * + * @param resourceOwnerName The user-recognizable name + */ + public MockHttpResource(final String resourceOwnerName) { + this(resourceOwnerName, true, CheckResponse.EXISTS, true); + } + + /** + * Create a new {@link MockHttpResource} that starts {@code dirty}, but always succeeds. + * + * @param resourceOwnerName The user-recognizable name + * @param dirty The starting dirtiness of the resource. + */ + public MockHttpResource(final String resourceOwnerName, final boolean dirty) { + this(resourceOwnerName, dirty, CheckResponse.EXISTS, true); + } + + /** + * Create a new {@link MockHttpResource} that starts dirty, but always succeeds. + * + * @param resourceOwnerName The user-recognizable name. + * @param masterTimeout Master timeout to use with any request. + * @param parameters The base parameters to specify for the request. + */ + public MockHttpResource(final String resourceOwnerName, @Nullable final TimeValue masterTimeout, final Map parameters) { + this(resourceOwnerName, masterTimeout, parameters, true, CheckResponse.EXISTS, true); + } + + /** + * Create a new {@link MockHttpResource} that starts {@code dirty}. + * + * @param resourceOwnerName The user-recognizable name + * @param dirty The starting dirtiness of the resource. + * @param check The expected response when checking for the resource. + * @param publish The expected response when publishing the resource (assumes check was {@link CheckResponse#DOES_NOT_EXIST}). + */ + public MockHttpResource(final String resourceOwnerName, final boolean dirty, final CheckResponse check, final boolean publish) { + this(resourceOwnerName, null, Collections.emptyMap(), dirty, check, publish); + } + + /** + * Create a new {@link MockHttpResource} that starts dirty. + * + * @param resourceOwnerName The user-recognizable name + * @param check The expected response when checking for the resource. + * @param publish The expected response when publishing the resource (assumes check was {@link CheckResponse#DOES_NOT_EXIST}). + * @param masterTimeout Master timeout to use with any request. + * @param parameters The base parameters to specify for the request. + */ + public MockHttpResource(final String resourceOwnerName, @Nullable final TimeValue masterTimeout, final Map parameters, + final CheckResponse check, final boolean publish) { + this(resourceOwnerName, masterTimeout, parameters, true, check, publish); + } + + /** + * Create a new {@link MockHttpResource}. + * + * @param resourceOwnerName The user-recognizable name + * @param dirty The starting dirtiness of the resource. + * @param check The expected response when checking for the resource. + * @param publish The expected response when publishing the resource (assumes check was {@link CheckResponse#DOES_NOT_EXIST}). + * @param masterTimeout Master timeout to use with any request. + * @param parameters The base parameters to specify for the request. + */ + public MockHttpResource(final String resourceOwnerName, @Nullable final TimeValue masterTimeout, final Map parameters, + final boolean dirty, final CheckResponse check, final boolean publish) { + super(resourceOwnerName, masterTimeout, parameters, dirty); + + this.check = check; + this.publish = publish; + } + + @Override + protected CheckResponse doCheck(final RestClient client) { + assert client != null; + + ++checked; + + return check; + } + + @Override + protected boolean doPublish(final RestClient client) { + assert client != null; + + ++published; + + return publish; + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/MockWebServerContainer.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/MockWebServerContainer.java new file mode 100644 index 00000000000..00b4f24064d --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/MockWebServerContainer.java @@ -0,0 +1,128 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import com.squareup.okhttp.mockwebserver.MockWebServer; +import com.squareup.okhttp.mockwebserver.QueueDispatcher; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.logging.Loggers; + +import java.io.IOException; +import java.net.BindException; +import java.util.ArrayList; +import java.util.List; + +/** + * {@code MockWebServerContainer} wraps a {@link MockWebServer} to avoid forcing every usage of it to do the same thing. + */ +public class MockWebServerContainer implements AutoCloseable { + + private static Logger logger = Loggers.getLogger(MockWebServerContainer.class); + + /** + * The running {@link MockWebServer}. + */ + private final MockWebServer server; + + /** + * Create a {@link MockWebServerContainer} that uses a port from [{@code 9250}, {code 9300}). + * + * @throws RuntimeException if an unrecoverable exception occurs (e.g., no open ports available) + */ + public MockWebServerContainer() { + this(9250, 9300); + } + + /** + * Create a {@link MockWebServerContainer} that uses a port from [{@code startPort}, {code 9300}). + *

+ * This is useful if you need to test with two {@link MockWebServer}s, so you can simply skip the port of the existing one. + * + * @param startPort The first port to try (inclusive). + * @throws RuntimeException if an unrecoverable exception occurs (e.g., no open ports available) + */ + public MockWebServerContainer(final int startPort) { + this(startPort, 9300); + } + + /** + * Create a {@link MockWebServerContainer} that uses a port from [{@code startPort}, {code endPort}). + * + * @param startPort The first port to try (inclusive). + * @param endPort The last port to try (exclusive). + * @throws RuntimeException if an unrecoverable exception occurs (e.g., no open ports available) + */ + public MockWebServerContainer(final int startPort, final int endPort) { + final List failedPorts = new ArrayList<>(0); + final QueueDispatcher dispatcher = new QueueDispatcher(); + dispatcher.setFailFast(true); + + MockWebServer webServer = null; + + for (int port = startPort; port < endPort; ++port) { + try { + webServer = new MockWebServer(); + webServer.setDispatcher(dispatcher); + + webServer.start(port); + break; + } catch (final BindException e) { + failedPorts.add(port); + webServer = null; + } catch (final IOException e) { + logger.error("unrecoverable failure while trying to start MockWebServer with port [{}]", e, port); + throw new ElasticsearchException(e); + } + } + + if (webServer != null) { + this.server = webServer; + + if (failedPorts.isEmpty() == false) { + logger.warn("ports [{}] were already in use. using port [{}]", failedPorts, webServer.getPort()); + } + } else { + throw new ElasticsearchException("unable to find open port between [" + startPort + "] and [" + endPort + "]"); + } + } + + /** + * Get the {@link MockWebServer} created by this container. + * + * @return Never {@code null}. + */ + public MockWebServer getWebServer() { + return server; + } + + /** + * Get the port used by the running web server. + * + * @return The local port used by the {@linkplain #getWebServer() web server}. + */ + public int getPort() { + return server.getPort(); + } + + /** + * Get the formatted address in the form of "hostname:port". + * + * @return Never {@code null}. + */ + public String getFormattedAddress() { + return server.getHostName() + ":" + server.getPort(); + } + + /** + * Shutdown the {@linkplain #getWebServer() web server}. + */ + @Override + public void close() throws Exception { + server.shutdown(); + } +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/MultiHttpResourceTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/MultiHttpResourceTests.java new file mode 100644 index 00000000000..086fb728b13 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/MultiHttpResourceTests.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.elasticsearch.client.RestClient; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; + +/** + * Tests {@link MultiHttpResource}. + */ +public class MultiHttpResourceTests extends ESTestCase { + + private final String owner = getClass().getSimpleName(); + private final RestClient client = mock(RestClient.class); + + public void testDoCheckAndPublish() { + final List allResources = successfulResources(); + final MultiHttpResource multiResource = new MultiHttpResource(owner, allResources); + + assertTrue(multiResource.doCheckAndPublish(client)); + + for (final MockHttpResource resource : allResources) { + assertSuccessfulResource(resource); + } + } + + public void testDoCheckAndPublishShortCircuits() { + // fail either the check or the publish + final CheckResponse check = randomFrom(CheckResponse.ERROR, CheckResponse.DOES_NOT_EXIST); + final boolean publish = check == CheckResponse.ERROR; + final List allResources = successfulResources(); + final MockHttpResource failureResource = new MockHttpResource(owner, true, check, publish); + + allResources.add(failureResource); + + Collections.shuffle(allResources, random()); + + final MultiHttpResource multiResource = new MultiHttpResource(owner, allResources); + + assertFalse(multiResource.doCheckAndPublish(client)); + + boolean found = false; + + for (final MockHttpResource resource : allResources) { + // should stop looking at this point + if (resource == failureResource) { + assertThat(resource.checked, equalTo(1)); + if (resource.check == CheckResponse.ERROR) { + assertThat(resource.published, equalTo(0)); + } else { + assertThat(resource.published, equalTo(1)); + } + + found = true; + } else if (found) { + assertThat(resource.checked, equalTo(0)); + assertThat(resource.published, equalTo(0)); + } + else { + assertSuccessfulResource(resource); + } + } + } + + private List successfulResources() { + final int successful = randomIntBetween(2, 5); + final List resources = new ArrayList<>(successful); + + for (int i = 0; i < successful; ++i) { + final CheckResponse check = randomFrom(CheckResponse.DOES_NOT_EXIST, CheckResponse.EXISTS); + final MockHttpResource resource = new MockHttpResource(owner, randomBoolean(), check, check == CheckResponse.DOES_NOT_EXIST); + + resources.add(resource); + } + + return resources; + } + + private void assertSuccessfulResource(final MockHttpResource resource) { + assertThat(resource.checked, equalTo(1)); + if (resource.check == CheckResponse.DOES_NOT_EXIST) { + assertThat(resource.published, equalTo(1)); + } else { + assertThat(resource.published, equalTo(0)); + } + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java new file mode 100644 index 00000000000..f1ecb799406 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/NodeFailureListenerTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpHost; +import org.apache.lucene.util.SetOnce.AlreadySetException; +import org.elasticsearch.client.sniff.Sniffer; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +/** + * Tests {@link NodeFailureListener}. + */ +public class NodeFailureListenerTests extends ESTestCase { + + private final Sniffer sniffer = mock(Sniffer.class); + private final HttpResource resource = new MockHttpResource(getTestName(), false); + private final HttpHost host = new HttpHost("localhost", 9200); + + private final NodeFailureListener listener = new NodeFailureListener(); + + public void testSetSnifferTwiceFails() { + listener.setSniffer(sniffer); + + assertThat(listener.getSniffer(), is(sniffer)); + + expectThrows(AlreadySetException.class, () -> listener.setSniffer(randomFrom(sniffer, null))); + } + + public void testSetResourceTwiceFails() { + listener.setResource(resource); + + assertThat(listener.getResource(), is(resource)); + + expectThrows(AlreadySetException.class, () -> listener.setResource(randomFrom(resource, null))); + } + + public void testSnifferNotifiedOnFailure() { + listener.setSniffer(sniffer); + + listener.onFailure(host); + + verify(sniffer).sniffOnFailure(host); + } + + public void testResourceNotifiedOnFailure() { + listener.setResource(resource); + + listener.onFailure(host); + + assertTrue(resource.isDirty()); + } + + public void testResourceAndSnifferNotifiedOnFailure() { + final HttpResource optionalResource = randomFrom(resource, null); + final Sniffer optionalSniffer = randomFrom(sniffer, null); + + listener.setResource(optionalResource); + listener.setSniffer(optionalSniffer); + + listener.onFailure(host); + + if (optionalResource != null) { + assertTrue(resource.isDirty()); + } + + if (optionalSniffer != null) { + verify(sniffer).sniffOnFailure(host); + } + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResourceTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResourceTests.java new file mode 100644 index 00000000000..5492adf7150 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResourceTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import java.io.IOException; +import java.io.InputStream; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.is; + +/** + * Tests {@link PipelineHttpResource}. + */ +public class PipelineHttpResourceTests extends AbstractPublishableHttpResourceTestCase { + + private final String pipelineName = ".my_pipeline"; + private final byte[] pipelineBytes = new byte[] { randomByte(), randomByte(), randomByte() }; + private final Supplier pipeline = () -> pipelineBytes; + + private final PipelineHttpResource resource = new PipelineHttpResource(owner, masterTimeout, pipelineName, pipeline); + + public void testPipelineToHttpEntity() throws IOException { + final HttpEntity entity = resource.pipelineToHttpEntity(); + + assertThat(entity.getContentType().getValue(), is(ContentType.APPLICATION_JSON.toString())); + + final InputStream byteStream = entity.getContent(); + + assertThat(byteStream.available(), is(pipelineBytes.length)); + + for (final byte pipelineByte : pipelineBytes) { + assertThat(pipelineByte, is((byte)byteStream.read())); + } + + assertThat(byteStream.available(), is(0)); + } + + public void testDoCheckTrue() throws IOException { + assertCheckExists(resource, "/_ingest/pipeline", pipelineName); + } + + public void testDoCheckFalse() throws IOException { + assertCheckDoesNotExist(resource, "/_ingest/pipeline", pipelineName); + } + + public void testDoCheckNullWithException() throws IOException { + assertCheckWithException(resource, "/_ingest/pipeline", pipelineName); + } + + public void testDoPublishTrue() throws IOException { + assertPublishSucceeds(resource, "/_ingest/pipeline", pipelineName, ByteArrayEntity.class); + } + + public void testDoPublishFalse() throws IOException { + assertPublishFails(resource, "/_ingest/pipeline", pipelineName, ByteArrayEntity.class); + } + + public void testDoPublishFalseWithException() throws IOException { + assertPublishWithException(resource, "/_ingest/pipeline", pipelineName, ByteArrayEntity.class); + } + + public void testParameters() { + assertParameters(resource); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java new file mode 100644 index 00000000000..469f4344bf8 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java @@ -0,0 +1,189 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse; + +import org.mockito.ArgumentCaptor; + +import java.io.IOException; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +/** + * Tests {@link PublishableHttpResource}. + */ +public class PublishableHttpResourceTests extends AbstractPublishableHttpResourceTestCase { + + private final String ownerType = "ownerthing"; + private final String resourceBasePath = "/_fake"; + private final String resourceName = ".my_thing"; + private final String resourceType = "thingamajig"; + private final Logger logger = mock(Logger.class); + private final HttpEntity entity = mock(HttpEntity.class); + private final Supplier body = () -> entity; + + private final PublishableHttpResource resource = + new MockHttpResource(owner, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS); + + public void testCheckForResourceExists() throws IOException { + assertCheckForResource(successfulCheckStatus(), CheckResponse.EXISTS, "{} [{}] found on the [{}] {}"); + } + + public void testCheckForResourceDoesNotExist() throws IOException { + assertCheckForResource(notFoundCheckStatus(), CheckResponse.DOES_NOT_EXIST, "{} [{}] does not exist on the [{}] {}"); + } + + public void testCheckForResourceUnexpectedResponse() throws IOException { + final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); + final RestStatus failedStatus = failedCheckStatus(); + final Response response = response("GET", endpoint, failedStatus); + + when(client.performRequest("GET", endpoint, resource.getParameters())).thenReturn(response); + + assertThat(resource.checkForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), + is(CheckResponse.ERROR)); + + verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); + verify(client).performRequest("GET", endpoint, resource.getParameters()); + verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class)); + + verifyNoMoreInteractions(client, logger); + } + + public void testCheckForResourceErrors() throws IOException { + final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); + final RestStatus failedStatus = failedCheckStatus(); + final ResponseException responseException = responseException("GET", endpoint, failedStatus); + final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); + + when(client.performRequest("GET", endpoint, resource.getParameters())).thenThrow(e); + + assertThat(resource.checkForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), + is(CheckResponse.ERROR)); + + verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); + verify(client).performRequest("GET", endpoint, resource.getParameters()); + verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e)); + + verifyNoMoreInteractions(client, logger); + } + + public void testPutResourceTrue() throws IOException { + assertPutResource(successfulPublishStatus(), true); + } + + public void testPutResourceFalse() throws IOException { + assertPutResource(failedPublishStatus(), false); + } + + public void testPutResourceFalseWithException() throws IOException { + final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); + final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected")); + + when(client.performRequest("PUT", endpoint, resource.getParameters(), entity)).thenThrow(e); + + assertThat(resource.putResource(client, logger, resourceBasePath, resourceName, body, resourceType, owner, ownerType), is(false)); + + verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType); + verify(client).performRequest("PUT", endpoint, resource.getParameters(), entity); + verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e)); + + verifyNoMoreInteractions(client, logger); + } + + public void testParameters() { + assertParameters(resource); + } + + public void testDoCheckAndPublishIgnoresPublishWhenCheckErrors() { + final PublishableHttpResource resource = + new MockHttpResource(owner, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS, CheckResponse.ERROR, true); + + assertThat(resource.doCheckAndPublish(client), is(false)); + } + + public void testDoCheckAndPublish() { + // not an error (the third state) + final PublishableHttpResource.CheckResponse exists = randomBoolean() ? CheckResponse.EXISTS : CheckResponse.DOES_NOT_EXIST; + final boolean publish = randomBoolean(); + + final PublishableHttpResource resource = + new MockHttpResource(owner, masterTimeout, PublishableHttpResource.NO_BODY_PARAMETERS, exists, publish); + + assertThat(resource.doCheckAndPublish(client), is(exists == CheckResponse.EXISTS || publish)); + } + + private void assertCheckForResource(final RestStatus status, final CheckResponse expected, final String debugLogMessage) + throws IOException { + final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); + final Response response = response("GET", endpoint, status); + + when(client.performRequest("GET", endpoint, resource.getParameters())).thenReturn(response); + + assertThat(resource.checkForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), + is(expected)); + + verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); + verify(client).performRequest("GET", endpoint, resource.getParameters()); + + if (expected == CheckResponse.EXISTS) { + verify(response).getStatusLine(); + } else { + // 3 times because it also is used in the exception message + verify(response, times(3)).getStatusLine(); + verify(response, times(2)).getRequestLine(); + verify(response).getHost(); + verify(response).getEntity(); + } + + verify(logger).debug(debugLogMessage, resourceType, resourceName, owner, ownerType); + + verifyNoMoreInteractions(client, response, logger); + } + + private void assertPutResource(final RestStatus status, final boolean expected) throws IOException { + final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); + final Response response = response("PUT", endpoint, status); + + when(client.performRequest("PUT", endpoint, resource.getParameters(), entity)).thenReturn(response); + + assertThat(resource.putResource(client, logger, resourceBasePath, resourceName, body, resourceType, owner, ownerType), + is(expected)); + + verify(client).performRequest("PUT", endpoint, resource.getParameters(), entity); + verify(response).getStatusLine(); + + verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType); + + if (expected) { + verify(logger).debug("{} [{}] uploaded to the [{}] {}", resourceType, resourceName, owner, ownerType); + } else { + ArgumentCaptor e = ArgumentCaptor.forClass(RuntimeException.class); + + verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), e.capture()); + + assertThat(e.getValue().getMessage(), + is("[" + resourceBasePath + "/" + resourceName + "] responded with [" + status.getStatus() + "]")); + } + + verifyNoMoreInteractions(client, response, logger, entity); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/SchemeTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/SchemeTests.java new file mode 100644 index 00000000000..af2fa2f667f --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/SchemeTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Locale; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; + +/** + * Tests {@link Scheme}. + */ +public class SchemeTests extends ESTestCase { + + public void testToString() { + for (final Scheme scheme : Scheme.values()) { + assertThat(scheme.toString(), equalTo(scheme.name().toLowerCase(Locale.ROOT))); + } + } + + public void testFromString() { + for (final Scheme scheme : Scheme.values()) { + assertThat(Scheme.fromString(scheme.name()), sameInstance(scheme)); + assertThat(Scheme.fromString(scheme.name().toLowerCase(Locale.ROOT)), sameInstance(scheme)); + } + } + + public void testFromStringMalformed() { + assertIllegalScheme("htp"); + assertIllegalScheme("htttp"); + assertIllegalScheme("httpd"); + assertIllegalScheme("ftp"); + assertIllegalScheme("ws"); + assertIllegalScheme("wss"); + assertIllegalScheme("gopher"); + } + + private void assertIllegalScheme(final String scheme) { + try { + Scheme.fromString(scheme); + fail("scheme should be unknown: [" + scheme + "]"); + } catch (final IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("[" + scheme + "]")); + } + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/SecurityHttpClientConfigCallbackTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/SecurityHttpClientConfigCallbackTests.java new file mode 100644 index 00000000000..5fc1ed15e4d --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/SecurityHttpClientConfigCallbackTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.client.CredentialsProvider; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; +import org.elasticsearch.test.ESTestCase; + +import static org.mockito.Mockito.mock; + +/** + * Tests {@link SecurityHttpClientConfigCallback}. + */ +public class SecurityHttpClientConfigCallbackTests extends ESTestCase { + + private final CredentialsProvider credentialsProvider = mock(CredentialsProvider.class); + private final SSLIOSessionStrategy sslStrategy = mock(SSLIOSessionStrategy.class); + /** + * HttpAsyncClientBuilder's methods are {@code final} and therefore not verifiable. + */ + private final HttpAsyncClientBuilder builder = mock(HttpAsyncClientBuilder.class); + + public void testSSLIOSessionStrategyNullThrowsException() { + final CredentialsProvider optionalCredentialsProvider = randomFrom(credentialsProvider, null); + + expectThrows(NullPointerException.class, () -> new SecurityHttpClientConfigCallback(null, optionalCredentialsProvider)); + } + + public void testCustomizeHttpClient() { + final SecurityHttpClientConfigCallback callback = new SecurityHttpClientConfigCallback(sslStrategy, credentialsProvider); + + assertSame(credentialsProvider, callback.getCredentialsProvider()); + assertSame(sslStrategy, callback.getSSLStrategy()); + + assertSame(builder, callback.customizeHttpClient(builder)); + } + + public void testCustomizeHttpClientWithOptionalParameters() { + final CredentialsProvider optionalCredentialsProvider = randomFrom(credentialsProvider, null); + + final SecurityHttpClientConfigCallback callback = + new SecurityHttpClientConfigCallback(sslStrategy, optionalCredentialsProvider); + + assertSame(builder, callback.customizeHttpClient(builder)); + assertSame(optionalCredentialsProvider, callback.getCredentialsProvider()); + assertSame(sslStrategy, callback.getSSLStrategy()); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResourceTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResourceTests.java new file mode 100644 index 00000000000..8f994de5571 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResourceTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; + +import java.io.IOException; +import java.io.InputStream; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.is; + +/** + * Tests {@link TemplateHttpResource}. + */ +public class TemplateHttpResourceTests extends AbstractPublishableHttpResourceTestCase { + + private final String templateName = ".my_template"; + private final String templateValue = "{\"template\":\".xyz-*\",\"mappings\":{}}"; + private final Supplier template = () -> templateValue; + + private final TemplateHttpResource resource = new TemplateHttpResource(owner, masterTimeout, templateName, template); + + public void testPipelineToHttpEntity() throws IOException { + final byte[] templateValueBytes = templateValue.getBytes(ContentType.APPLICATION_JSON.getCharset()); + final HttpEntity entity = resource.templateToHttpEntity(); + + assertThat(entity.getContentType().getValue(), is(ContentType.APPLICATION_JSON.toString())); + + final InputStream byteStream = entity.getContent(); + + assertThat(byteStream.available(), is(templateValueBytes.length)); + + for (final byte templateByte : templateValueBytes) { + assertThat(templateByte, is((byte)byteStream.read())); + } + + assertThat(byteStream.available(), is(0)); + } + + public void testDoCheckTrue() throws IOException { + assertCheckExists(resource, "/_template", templateName); + } + + public void testDoCheckFalse() throws IOException { + assertCheckDoesNotExist(resource, "/_template", templateName); + } + + public void testDoCheckNullWithException() throws IOException { + assertCheckWithException(resource, "/_template", templateName); + } + + public void testDoPublishTrue() throws IOException { + assertPublishSucceeds(resource, "/_template", templateName, StringEntity.class); + } + + public void testDoPublishFalse() throws IOException { + assertPublishFails(resource, "/_template", templateName, StringEntity.class); + } + + public void testDoPublishFalseWithException() throws IOException { + assertPublishWithException(resource, "/_template", templateName, StringEntity.class); + } + + public void testParameters() { + assertParameters(resource); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/TimeoutRequestConfigCallbackTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/TimeoutRequestConfigCallbackTests.java new file mode 100644 index 00000000000..bd04f84e560 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/TimeoutRequestConfigCallbackTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.client.config.RequestConfig; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import org.junit.Before; + +import static org.mockito.Matchers.anyInt; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Tests {@link TimeoutRequestConfigCallback}. + */ +public class TimeoutRequestConfigCallbackTests extends ESTestCase { + + private final TimeValue connectTimeout = mock(TimeValue.class); + private final int connectTimeoutMillis = randomInt(); + private final TimeValue socketTimeout = mock(TimeValue.class); + private final int socketTimeoutMillis = randomInt(); + private final RequestConfig.Builder builder = mock(RequestConfig.Builder.class); + + @Before + public void configureTimeouts() { + when(connectTimeout.millis()).thenReturn((long)connectTimeoutMillis); + when(socketTimeout.millis()).thenReturn((long)socketTimeoutMillis); + } + + public void testCustomizeRequestConfig() { + final TimeoutRequestConfigCallback callback = new TimeoutRequestConfigCallback(connectTimeout, socketTimeout); + + assertSame(builder, callback.customizeRequestConfig(builder)); + + verify(builder).setConnectTimeout(connectTimeoutMillis); + verify(builder).setSocketTimeout(socketTimeoutMillis); + } + + public void testCustomizeRequestConfigWithOptionalParameters() { + final TimeValue optionalConnectTimeout = randomFrom(connectTimeout, null); + // avoid making both null at the same time + final TimeValue optionalSocketTimeout = optionalConnectTimeout != null ? randomFrom(socketTimeout, null) : socketTimeout; + + final TimeoutRequestConfigCallback callback = new TimeoutRequestConfigCallback(optionalConnectTimeout, optionalSocketTimeout); + + assertSame(builder, callback.customizeRequestConfig(builder)); + assertSame(optionalConnectTimeout, callback.getConnectTimeout()); + assertSame(optionalSocketTimeout, callback.getSocketTimeout()); + + if (optionalConnectTimeout != null) { + verify(builder).setConnectTimeout(connectTimeoutMillis); + } else { + verify(builder, never()).setConnectTimeout(anyInt()); + } + + if (optionalSocketTimeout != null) { + verify(builder).setSocketTimeout(socketTimeoutMillis); + } else { + verify(builder, never()).setSocketTimeout(anyInt()); + } + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResourceTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResourceTests.java new file mode 100644 index 00000000000..801986548b9 --- /dev/null +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResourceTests.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.Version; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Tests {@link VersionHttpResource}. + */ +public class VersionHttpResourceTests extends ESTestCase { + + private final String owner = getClass().getSimpleName(); + private final RestClient client = mock(RestClient.class); + + public void testDoCheckAndPublishSuccess() throws IOException { + final Version minimumVersion = + randomFrom(Version.V_2_0_0, Version.V_2_0_0_beta1, Version.V_2_0_0_rc1, Version.V_2_3_3, Version.CURRENT); + final Version version = randomFrom(minimumVersion, Version.CURRENT); + final Response response = responseForVersion(version); + + final VersionHttpResource resource = new VersionHttpResource(owner, minimumVersion); + + assertTrue(resource.doCheckAndPublish(client)); + + verify(response).getEntity(); + } + + public void testDoCheckAndPublishFailedParsing() throws IOException { + // malformed JSON + final Response response = responseForJSON("{"); + + final VersionHttpResource resource = new VersionHttpResource(owner, Version.CURRENT); + + assertFalse(resource.doCheckAndPublish(client)); + + verify(response).getEntity(); + } + + public void testDoCheckAndPublishFailedFieldMissing() throws IOException { + // malformed response; imagining that we may change it in the future or someone breaks filter_path + final Response response = responseForJSON("{\"version.number\":\"" + Version.CURRENT + "\"}"); + + final VersionHttpResource resource = new VersionHttpResource(owner, Version.CURRENT); + + assertFalse(resource.doCheckAndPublish(client)); + + verify(response).getEntity(); + } + + public void testDoCheckAndPublishFailedFieldWrongType() throws IOException { + // malformed response (should be {version: { number : ... }}) + final Response response = responseForJSON("{\"version\":\"" + Version.CURRENT + "\"}"); + + final VersionHttpResource resource = new VersionHttpResource(owner, Version.CURRENT); + + assertFalse(resource.doCheckAndPublish(client)); + + verify(response).getEntity(); + } + + public void testDoCheckAndPublishFailedWithIOException() throws IOException { + // request fails for some reason + when(client.performRequest("GET", "/", VersionHttpResource.PARAMETERS)).thenThrow(new IOException("expected")); + + final VersionHttpResource resource = new VersionHttpResource(owner, Version.CURRENT); + + assertFalse(resource.doCheckAndPublish(client)); + } + + private Response responseForJSON(final String json) throws IOException { + final StringEntity entity = new StringEntity(json, ContentType.APPLICATION_JSON); + + final Response response = mock(Response.class); + when(response.getEntity()).thenReturn(entity); + + when(client.performRequest("GET", "/", VersionHttpResource.PARAMETERS)).thenReturn(response); + + return response; + } + + private Response responseForVersion(final Version version) throws IOException { + return responseForJSON("{\"version\":{\"number\":\"" + version + "\"}}"); + } + +} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoResolverTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoResolverTests.java index 52a7117d1a7..403023a0ecd 100644 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoResolverTests.java +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoResolverTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.License; +import org.elasticsearch.xpack.monitoring.MonitoringFeatureSet; import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterInfoMonitoringDoc; import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.resolver.MonitoringIndexNameResolverTestCase; @@ -49,6 +50,7 @@ public class ClusterInfoResolverTests extends MonitoringIndexNameResolverTestCas doc.setClusterName(randomAsciiOfLength(5)); doc.setClusterStats(new ClusterStatsResponse(Math.abs(randomLong()), ClusterName.CLUSTER_NAME_SETTING .getDefault(Settings.EMPTY), randomAsciiOfLength(5), Collections.emptyList(), Collections.emptyList())); + doc.setUsage(Collections.singletonList(new MonitoringFeatureSet.Usage(randomBoolean(), randomBoolean(), emptyMap()))); return doc; } catch (Exception e) { throw new IllegalStateException("Failed to generated random ClusterInfoMonitoringDoc", e); @@ -72,13 +74,14 @@ public class ClusterInfoResolverTests extends MonitoringIndexNameResolverTestCas assertThat(resolver.id(doc), equalTo(clusterUUID)); assertSource(resolver.source(doc, XContentType.JSON), - Sets.newHashSet( - "cluster_uuid", - "timestamp", - "source_node", - "cluster_name", - "version", - "license", - "cluster_stats")); + Sets.newHashSet( + "cluster_uuid", + "timestamp", + "source_node", + "cluster_name", + "version", + "license", + "cluster_stats", + "stack_stats.xpack")); } } diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoTests.java index 689f4e58297..ecbf9a5508b 100644 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoTests.java +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoTests.java @@ -27,6 +27,7 @@ import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.isEmptyOrNullString; import static org.hamcrest.Matchers.not; @@ -61,14 +62,14 @@ public class ClusterInfoTests extends MonitoringIntegTestCase { final String clusterUUID = client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID(); assertTrue(Strings.hasText(clusterUUID)); - logger.debug("--> waiting for the monitoring data index to be created (it should have been created by the ClusterInfoCollector)"); + // waiting for the monitoring data index to be created (it should have been created by the ClusterInfoCollector String dataIndex = ".monitoring-data-" + MonitoringTemplateUtils.TEMPLATE_VERSION; awaitIndexExists(dataIndex); - logger.debug("--> waiting for cluster info collector to collect data"); + // waiting for cluster info collector to collect data awaitMonitoringDocsCount(equalTo(1L), ClusterInfoResolver.TYPE); - logger.debug("--> retrieving cluster info document"); + // retrieving cluster info document GetResponse response = client().prepareGet(dataIndex, ClusterInfoResolver.TYPE, clusterUUID).get(); assertTrue("cluster_info document does not exist in data index", response.isExists()); @@ -80,20 +81,19 @@ public class ClusterInfoTests extends MonitoringIntegTestCase { assertThat(source.get(MonitoringIndexNameResolver.Fields.CLUSTER_UUID), notNullValue()); assertThat(source.get(MonitoringIndexNameResolver.Fields.TIMESTAMP), notNullValue()); assertThat(source.get(MonitoringIndexNameResolver.Fields.SOURCE_NODE), notNullValue()); - assertThat(source.get(ClusterInfoResolver.Fields.CLUSTER_NAME), equalTo(cluster().getClusterName())); - assertThat(source.get(ClusterInfoResolver.Fields.VERSION), equalTo(Version.CURRENT.toString())); + assertThat(source.get("cluster_name"), equalTo(cluster().getClusterName())); + assertThat(source.get("version"), equalTo(Version.CURRENT.toString())); - logger.debug("--> checking that the document contains license information"); - Object licenseObj = source.get(ClusterInfoResolver.Fields.LICENSE); + Object licenseObj = source.get("license"); assertThat(licenseObj, instanceOf(Map.class)); Map license = (Map) licenseObj; assertThat(license, instanceOf(Map.class)); - String uid = (String) license.get(ClusterInfoResolver.Fields.UID); + String uid = (String) license.get("uid"); assertThat(uid, not(isEmptyOrNullString())); - String type = (String) license.get(ClusterInfoResolver.Fields.TYPE); + String type = (String) license.get("type"); assertThat(type, not(isEmptyOrNullString())); String status = (String) license.get(License.Fields.STATUS); @@ -103,7 +103,7 @@ public class ClusterInfoTests extends MonitoringIntegTestCase { assertThat(expiryDate, greaterThan(0L)); // We basically recompute the hash here - String hkey = (String) license.get(ClusterInfoResolver.Fields.HKEY); + String hkey = (String) license.get("hkey"); String recalculated = ClusterInfoResolver.hash(status, uid, type, String.valueOf(expiryDate), clusterUUID); assertThat(hkey, equalTo(recalculated)); @@ -112,14 +112,30 @@ public class ClusterInfoTests extends MonitoringIntegTestCase { assertThat((Long) license.get(License.Fields.ISSUE_DATE_IN_MILLIS), greaterThan(0L)); assertThat((Integer) license.get(License.Fields.MAX_NODES), greaterThan(0)); - Object clusterStats = source.get(ClusterInfoResolver.Fields.CLUSTER_STATS); + Object clusterStats = source.get("cluster_stats"); assertNotNull(clusterStats); assertThat(clusterStats, instanceOf(Map.class)); assertThat(((Map) clusterStats).size(), greaterThan(0)); + Object stackStats = source.get("stack_stats"); + assertNotNull(stackStats); + assertThat(stackStats, instanceOf(Map.class)); + assertThat(((Map) stackStats).size(), equalTo(1)); + + Object xpack = ((Map)stackStats).get("xpack"); + assertNotNull(xpack); + assertThat(xpack, instanceOf(Map.class)); + // it must have at least monitoring, but others may be hidden + assertThat(((Map) xpack).size(), greaterThanOrEqualTo(1)); + + Object monitoring = ((Map)xpack).get("monitoring"); + assertNotNull(monitoring); + // we don't make any assumptions about what's in it, only that it's there + assertThat(monitoring, instanceOf(Map.class)); + waitForMonitoringTemplates(); - logger.debug("--> check that the cluster_info is not indexed"); + // check that the cluster_info is not indexed securedFlush(); securedRefresh(); @@ -131,8 +147,7 @@ public class ClusterInfoTests extends MonitoringIntegTestCase { .should(QueryBuilders.matchQuery(License.Fields.STATUS, License.Status.ACTIVE.label())) .should(QueryBuilders.matchQuery(License.Fields.STATUS, License.Status.INVALID.label())) .should(QueryBuilders.matchQuery(License.Fields.STATUS, License.Status.EXPIRED.label())) - .should(QueryBuilders.matchQuery(ClusterInfoResolver.Fields.CLUSTER_NAME, - cluster().getClusterName())) + .should(QueryBuilders.matchQuery("cluster_name", cluster().getClusterName())) .minimumNumberShouldMatch(1) ).get(), 0L); } diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsResolverTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsResolverTests.java index c84e8af6361..380f8ade03f 100644 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsResolverTests.java +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsResolverTests.java @@ -112,11 +112,12 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa BoundTransportAddress transportAddress = new BoundTransportAddress(new TransportAddress[]{LocalTransportAddress.buildUnique()}, LocalTransportAddress.buildUnique()); return new NodeInfo(Version.CURRENT, org.elasticsearch.Build.CURRENT, - new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), - Settings.EMPTY, DummyOsInfo.INSTANCE, new ProcessInfo(randomInt(), randomBoolean()), JvmInfo.jvmInfo(), + new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), Settings.EMPTY, + DummyOsInfo.INSTANCE, new ProcessInfo(randomInt(), randomBoolean(), randomPositiveLong()), JvmInfo.jvmInfo(), new ThreadPoolInfo(Collections.singletonList(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5))), new TransportInfo(transportAddress, Collections.emptyMap()), new HttpInfo(transportAddress, randomLong()), - new PluginsAndModules(), new IngestInfo(Collections.emptyList()), new ByteSizeValue(randomIntBetween(1, 1024))); + new PluginsAndModules(Collections.emptyList(), Collections.emptyList()), + new IngestInfo(Collections.emptyList()), new ByteSizeValue(randomIntBetween(1, 1024))); } diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolverTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolverTests.java index 260c98b152c..b6539a633d2 100644 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolverTests.java +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolverTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentType; @@ -81,6 +82,19 @@ public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase< if (Constants.WINDOWS && field.startsWith("node_stats.os.cpu.load_average")) { return; } + + // we only report IoStats and spins on Linux + if (Constants.LINUX == false) { + if (field.startsWith("node_stats.fs.io_stats")) { + return; + } + } + + // node_stats.fs.data.spins can be null and it's only reported on Linux + if (field.startsWith("node_stats.fs.data.spins")) { + return; + } + super.assertSourceField(field, sourceFields); } @@ -140,6 +154,22 @@ public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase< new NodeIndicesStats(new CommonStats(), statsByShard), OsProbe.getInstance().osStats(), ProcessProbe.getInstance().processStats(), JvmStats.jvmStats(), new ThreadPoolStats(threadPoolStats), - new FsInfo(0, null, pathInfo), null, null, null, null, null, null); + new FsInfo(0, randomIoStats(), pathInfo), null, null, null, null, null, null); + } + + @Nullable + private FsInfo.IoStats randomIoStats() { + if (Constants.LINUX) { + final int stats = randomIntBetween(1, 3); + final FsInfo.DeviceStats[] devices = new FsInfo.DeviceStats[stats]; + + for (int i = 0; i < devices.length; ++i) { + devices[i] = new FsInfo.DeviceStats(253, 0, "dm-" + i, 287734, 7185242, 8398869, 118857776, null); + } + + return new FsInfo.IoStats(devices); + } + + return null; } } diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/security/MonitoringInternalClientTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/security/MonitoringInternalClientTests.java index 7abe7872210..db0a9b4bfa7 100644 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/security/MonitoringInternalClientTests.java +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/security/MonitoringInternalClientTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.monitoring.security; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.rest.RestStatus; @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.monitoring.MonitoringSettings; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; import org.elasticsearch.xpack.security.InternalClient; -import java.util.ArrayList; +import java.util.stream.Collectors; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.is; @@ -86,7 +86,7 @@ public class MonitoringInternalClientTests extends MonitoringIntegTestCase { * @return the source of a random monitoring template */ private String randomTemplateSource() { - return randomFrom(new ArrayList<>(monitoringTemplates().values())); + return randomFrom(monitoringTemplates().stream().map(Tuple::v2).collect(Collectors.toList())); } } diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/support/VersionUtilsTests.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/support/VersionUtilsTests.java deleted file mode 100644 index e7f29f2d2ae..00000000000 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/support/VersionUtilsTests.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.monitoring.support; - -import org.elasticsearch.Version; -import org.elasticsearch.test.ESTestCase; - -import java.nio.charset.StandardCharsets; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; - -public class VersionUtilsTests extends ESTestCase { - - public void testParseVersion() { - List versions = randomSubsetOf(9, Version.V_2_0_0_beta1, Version.V_2_0_0_beta2, Version.V_2_0_0_rc1, Version.V_2_0_0, - Version.V_2_0_1, Version.V_2_0_2, Version.V_2_1_0, Version.V_2_1_1, Version.V_2_1_2, Version.V_2_2_0, Version.V_2_3_0, - Version.V_5_0_0_alpha1); - for (Version version : versions) { - String output = createOutput(VersionUtils.VERSION_NUMBER_FIELD, version.toString()); - assertThat(VersionUtils.parseVersion(output.getBytes(StandardCharsets.UTF_8)), equalTo(version)); - assertThat(VersionUtils.parseVersion(VersionUtils.VERSION_NUMBER_FIELD, output), equalTo(version)); - } - } - - private String createOutput(String fieldName, String value) { - return "{\n" + - " \"name\" : \"Blind Faith\",\n" + - " \"cluster_name\" : \"elasticsearch\",\n" + - " \"version\" : {\n" + - " \"" + fieldName + "\" : \"" + value + "\",\n" + - " \"build_hash\" : \"4092d253dddda0ff1ff3d1c09ac7678e757843f9\",\n" + - " \"build_timestamp\" : \"2015-10-13T08:53:10Z\",\n" + - " \"build_snapshot\" : true,\n" + - " \"lucene_version\" : \"5.2.1\"\n" + - " },\n" + - " \"tagline\" : \"You Know, for Search\"\n" + - "}\n"; - } -} diff --git a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java index a5b64b2a072..f8a44ed0568 100644 --- a/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java +++ b/elasticsearch/x-pack/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java @@ -10,6 +10,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; @@ -54,6 +55,7 @@ import java.nio.file.Path; import java.util.Collection; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -170,7 +172,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase { @Override protected Set excludeTemplates() { - return monitoringTemplates().keySet(); + return monitoringTemplateNames(); } @Before @@ -278,9 +280,17 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase { } } - protected Map monitoringTemplates() { + protected List> monitoringTemplates() { return StreamSupport.stream(new ResolversRegistry(Settings.EMPTY).spliterator(), false) - .collect(Collectors.toMap(MonitoringIndexNameResolver::templateName, MonitoringIndexNameResolver::template, (a, b) -> a)); + .map((resolver) -> new Tuple<>(resolver.templateName(), resolver.template())) + .distinct() + .collect(Collectors.toList()); + } + + protected Set monitoringTemplateNames() { + return StreamSupport.stream(new ResolversRegistry(Settings.EMPTY).spliterator(), false) + .map(MonitoringIndexNameResolver::templateName) + .collect(Collectors.toSet()); } protected void assertTemplateInstalled(String name) { @@ -303,7 +313,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase { } protected void waitForMonitoringTemplates() throws Exception { - assertBusy(() -> monitoringTemplates().keySet().forEach(this::assertTemplateInstalled), 30, TimeUnit.SECONDS); + assertBusy(() -> monitoringTemplateNames().forEach(this::assertTemplateInstalled), 30, TimeUnit.SECONDS); } protected void waitForMonitoringIndices() throws Exception { @@ -519,9 +529,6 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase { "\n" + "admin:\n" + " cluster: [ 'cluster:monitor/nodes/info', 'cluster:monitor/nodes/liveness' ]\n" + - "transport_client:\n" + - " cluster: [ 'cluster:monitor/nodes/info', 'cluster:monitor/nodes/liveness' ]\n" + - "\n" + "monitor:\n" + " cluster: [ 'cluster:monitor/nodes/info', 'cluster:monitor/nodes/liveness' ]\n" ; diff --git a/elasticsearch/x-pack/security/config/x-pack/log4j2.properties b/elasticsearch/x-pack/security/config/x-pack/log4j2.properties index 3b96b005c89..0f02b3a1d88 100644 --- a/elasticsearch/x-pack/security/config/x-pack/log4j2.properties +++ b/elasticsearch/x-pack/security/config/x-pack/log4j2.properties @@ -3,7 +3,7 @@ appender.audit_rolling.name = audit_rolling appender.audit_rolling.fileName = ${sys:es.logs}_access.log appender.audit_rolling.layout.type = PatternLayout appender.audit_rolling.layout.pattern = [%d{ISO8601}] %m%n -appender.audit_rolling.filePattern = ${sys:es.logs}-%d{yyyy-MM-dd}.log +appender.audit_rolling.filePattern = ${sys:es.logs}_access-%d{yyyy-MM-dd}.log appender.audit_rolling.policies.type = Policies appender.audit_rolling.policies.time.type = TimeBasedTriggeringPolicy appender.audit_rolling.policies.time.interval = 1 diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 88a7e17df60..17974ffcd8d 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -51,11 +51,13 @@ import org.elasticsearch.xpack.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.security.action.user.DeleteUserAction; import org.elasticsearch.xpack.security.action.user.GetUsersAction; import org.elasticsearch.xpack.security.action.user.PutUserAction; +import org.elasticsearch.xpack.security.action.user.SetEnabledAction; import org.elasticsearch.xpack.security.action.user.TransportAuthenticateAction; import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; import org.elasticsearch.xpack.security.action.user.TransportDeleteUserAction; import org.elasticsearch.xpack.security.action.user.TransportGetUsersAction; import org.elasticsearch.xpack.security.action.user.TransportPutUserAction; +import org.elasticsearch.xpack.security.action.user.TransportSetEnabledAction; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail; @@ -96,6 +98,7 @@ import org.elasticsearch.xpack.security.rest.action.user.RestChangePasswordActio import org.elasticsearch.xpack.security.rest.action.user.RestDeleteUserAction; import org.elasticsearch.xpack.security.rest.action.user.RestGetUsersAction; import org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction; +import org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction; import org.elasticsearch.xpack.security.transport.SecurityServerTransportService; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.elasticsearch.xpack.security.transport.netty3.SecurityNetty3HttpServerTransport; @@ -219,15 +222,15 @@ public class Security implements ActionPlugin, IngestPlugin { if (enabled == false) { return Collections.emptyList(); } - AnonymousUser.initialize(settings); // TODO: this is sketchy...testing is difficult b/c it is static.... List components = new ArrayList<>(); final SecurityContext securityContext = new SecurityContext(settings, threadPool, cryptoService); components.add(securityContext); // realms construction - final NativeUsersStore nativeUsersStore = new NativeUsersStore(settings, client, threadPool); - final ReservedRealm reservedRealm = new ReservedRealm(env, settings, nativeUsersStore); + final NativeUsersStore nativeUsersStore = new NativeUsersStore(settings, client); + final AnonymousUser anonymousUser = new AnonymousUser(settings); + final ReservedRealm reservedRealm = new ReservedRealm(env, settings, nativeUsersStore, anonymousUser); Map realmFactories = new HashMap<>(); realmFactories.put(FileRealm.TYPE, config -> new FileRealm(config, resourceWatcherService)); realmFactories.put(NativeRealm.TYPE, config -> new NativeRealm(config, nativeUsersStore)); @@ -246,6 +249,7 @@ public class Security implements ActionPlugin, IngestPlugin { final Realms realms = new Realms(settings, env, realmFactories, licenseState, reservedRealm); components.add(nativeUsersStore); components.add(realms); + components.add(reservedRealm); // audit trails construction IndexAuditTrail indexAuditTrail = null; @@ -294,7 +298,7 @@ public class Security implements ActionPlugin, IngestPlugin { } final AuthenticationService authcService = new AuthenticationService(settings, realms, auditTrailService, - cryptoService, failureHandler, threadPool); + cryptoService, failureHandler, threadPool, anonymousUser); components.add(authcService); final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService); @@ -302,7 +306,7 @@ public class Security implements ActionPlugin, IngestPlugin { final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(securityContext); final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore); final AuthorizationService authzService = new AuthorizationService(settings, allRolesStore, clusterService, - auditTrailService, failureHandler, threadPool); + auditTrailService, failureHandler, threadPool, anonymousUser); components.add(fileRolesStore); // has lifecycle components.add(nativeRolesStore); // used by roles actions components.add(reservedRolesStore); // used by roles actions @@ -458,7 +462,8 @@ public class Security implements ActionPlugin, IngestPlugin { new ActionHandler<>(PutRoleAction.INSTANCE, TransportPutRoleAction.class), new ActionHandler<>(DeleteRoleAction.INSTANCE, TransportDeleteRoleAction.class), new ActionHandler<>(ChangePasswordAction.INSTANCE, TransportChangePasswordAction.class), - new ActionHandler<>(AuthenticateAction.INSTANCE, TransportAuthenticateAction.class)); + new ActionHandler<>(AuthenticateAction.INSTANCE, TransportAuthenticateAction.class), + new ActionHandler<>(SetEnabledAction.INSTANCE, TransportSetEnabledAction.class)); } @Override @@ -487,7 +492,8 @@ public class Security implements ActionPlugin, IngestPlugin { RestGetRolesAction.class, RestPutRoleAction.class, RestDeleteRoleAction.class, - RestChangePasswordAction.class); + RestChangePasswordAction.class, + RestSetEnabledAction.class); } @Override diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java index 8c636cc8e00..61a21d2f84f 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java @@ -94,7 +94,7 @@ public class SecurityFeatureSet implements XPackFeatureSet { Map auditUsage = auditUsage(auditTrailService); Map ipFilterUsage = ipFilterUsage(ipFilter); Map systemKeyUsage = systemKeyUsage(cryptoService); - Map anonymousUsage = Collections.singletonMap("enabled", AnonymousUser.enabled()); + Map anonymousUsage = Collections.singletonMap("enabled", AnonymousUser.isAnonymousEnabled(settings)); return new Usage(available(), enabled(), realmsUsage, rolesStoreUsage, sslUsage, auditUsage, ipFilterUsage, systemKeyUsage, anonymousUsage); } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequest.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequest.java index 08237d1e476..6a7147eaa06 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequest.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequest.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.action.role; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -17,14 +18,25 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request delete a role from the security index */ -public class DeleteRoleRequest extends ActionRequest { +public class DeleteRoleRequest extends ActionRequest implements WriteRequest { private String name; - private boolean refresh = true; + private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; public DeleteRoleRequest() { } + @Override + public DeleteRoleRequest setRefreshPolicy(RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + @Override + public RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; @@ -42,25 +54,17 @@ public class DeleteRoleRequest extends ActionRequest { return name; } - public void refresh(boolean refresh) { - this.refresh = refresh; - } - - public boolean refresh() { - return refresh; - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); name = in.readString(); - refresh = in.readBoolean(); + refreshPolicy = RefreshPolicy.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(name); - out.writeBoolean(refresh); + refreshPolicy.writeTo(out); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequestBuilder.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequestBuilder.java index 646be72ca4f..77fe219c463 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequestBuilder.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequestBuilder.java @@ -6,12 +6,14 @@ package org.elasticsearch.xpack.security.action.role; import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; /** * A builder for requests to delete a role from the security index */ -public class DeleteRoleRequestBuilder extends ActionRequestBuilder { +public class DeleteRoleRequestBuilder extends ActionRequestBuilder + implements WriteRequestBuilder { public DeleteRoleRequestBuilder(ElasticsearchClient client) { this(client, DeleteRoleAction.INSTANCE); @@ -25,9 +27,4 @@ public class DeleteRoleRequestBuilder extends ActionRequestBuilder impl @Override public ActionRequestValidationException validate() { - Validation.Error error = Validation.Users.validateUsername(username); - if (error != null) { - return addValidationError(error.toString(), null); - } + // we cannot apply our validation rules here as an authenticate request could be for an LDAP user that doesn't fit our restrictions return null; } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequest.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequest.java index 54edc4c437c..5ac4e5d946b 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequest.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequest.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -17,10 +18,10 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to delete a native user. */ -public class DeleteUserRequest extends ActionRequest implements UserRequest { +public class DeleteUserRequest extends ActionRequest implements UserRequest, WriteRequest { private String username; - private boolean refresh = true; + private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; public DeleteUserRequest() { } @@ -29,6 +30,17 @@ public class DeleteUserRequest extends ActionRequest implemen this.username = username; } + @Override + public DeleteUserRequest setRefreshPolicy(RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + @Override + public RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; @@ -42,18 +54,10 @@ public class DeleteUserRequest extends ActionRequest implemen return this.username; } - public boolean refresh() { - return refresh; - } - public void username(String username) { this.username = username; } - public void refresh(boolean refresh) { - this.refresh = refresh; - } - @Override public String[] usernames() { return new String[] { username }; @@ -63,14 +67,14 @@ public class DeleteUserRequest extends ActionRequest implemen public void readFrom(StreamInput in) throws IOException { super.readFrom(in); username = in.readString(); - refresh = in.readBoolean(); + refreshPolicy = RefreshPolicy.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(username); - out.writeBoolean(refresh); + refreshPolicy.writeTo(out); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequestBuilder.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequestBuilder.java index b228e979eb4..5d44cdb3418 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequestBuilder.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequestBuilder.java @@ -6,9 +6,11 @@ package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; -public class DeleteUserRequestBuilder extends ActionRequestBuilder { +public class DeleteUserRequestBuilder extends ActionRequestBuilder + implements WriteRequestBuilder { public DeleteUserRequestBuilder(ElasticsearchClient client) { this(client, DeleteUserAction.INSTANCE); @@ -22,9 +24,4 @@ public class DeleteUserRequestBuilder extends ActionRequestBuilder implements Use if (roles == null) { validationException = addValidationError("roles are missing", validationException); } + if (metadata != null && MetadataUtils.containsReservedMetadata(metadata)) { + validationException = addValidationError("metadata keys may not start with [" + MetadataUtils.RESERVED_PREFIX + "]", + validationException); + } // we do not check for a password hash here since it is possible that the user exists and we don't want to update the password return validationException; } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledAction.java new file mode 100644 index 00000000000..c579b769e85 --- /dev/null +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledAction.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * This action is for setting the enabled flag on a native or reserved user + */ +public class SetEnabledAction extends Action { + + public static final SetEnabledAction INSTANCE = new SetEnabledAction(); + public static final String NAME = "cluster:admin/xpack/security/user/set_enabled"; + + private SetEnabledAction() { + super(NAME); + } + + @Override + public SetEnabledRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new SetEnabledRequestBuilder(client); + } + + @Override + public SetEnabledResponse newResponse() { + return new SetEnabledResponse(); + } +} diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequest.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequest.java new file mode 100644 index 00000000000..195fcd37c22 --- /dev/null +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequest.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.security.support.Validation.Error; +import org.elasticsearch.xpack.security.support.Validation.Users; + +import java.io.IOException; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * The request that allows to set a user as enabled or disabled + */ +public class SetEnabledRequest extends ActionRequest implements UserRequest, WriteRequest { + + private Boolean enabled; + private String username; + private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + Error error = Users.validateUsername(username, true, Settings.EMPTY); + if (error != null) { + validationException = addValidationError(error.toString(), validationException); + } + if (enabled == null) { + validationException = addValidationError("enabled must be set", validationException); + } + return validationException; + } + + /** + * @return whether the user should be set to enabled or not + */ + public Boolean enabled() { + return enabled; + } + + /** + * Set whether the user should be enabled or not. + */ + public void enabled(boolean enabled) { + this.enabled = enabled; + } + + /** + * @return the username that this request applies to. + */ + public String username() { + return username; + } + + /** + * Set the username that the request applies to. Must not be {@code null} + */ + public void username(String username) { + this.username = username; + } + + @Override + public String[] usernames() { + return new String[] { username }; + } + + /** + * Should this request trigger a refresh ({@linkplain RefreshPolicy#IMMEDIATE}, the default), wait for a refresh ( + * {@linkplain RefreshPolicy#WAIT_UNTIL}), or proceed ignore refreshes entirely ({@linkplain RefreshPolicy#NONE}). + */ + @Override + public RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } + + @Override + public SetEnabledRequest setRefreshPolicy(RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.enabled = in.readBoolean(); + this.username = in.readString(); + this.refreshPolicy = RefreshPolicy.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(enabled); + out.writeString(username); + refreshPolicy.writeTo(out); + } +} diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java new file mode 100644 index 00000000000..133069e2b31 --- /dev/null +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.WriteRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Request builder for setting a user as enabled or disabled + */ +public class SetEnabledRequestBuilder extends ActionRequestBuilder + implements WriteRequestBuilder { + + public SetEnabledRequestBuilder(ElasticsearchClient client) { + super(client, SetEnabledAction.INSTANCE, new SetEnabledRequest()); + } + + /** + * Set the username of the user that should enabled or disabled. Must not be {@code null} + */ + public SetEnabledRequestBuilder username(String username) { + request.username(username); + return this; + } + + /** + * Set whether the user should be enabled or not + */ + public SetEnabledRequestBuilder enabled(boolean enabled) { + request.enabled(enabled); + return this; + } +} diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledResponse.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledResponse.java new file mode 100644 index 00000000000..fe44f5f5197 --- /dev/null +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledResponse.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.action.ActionResponse; + +/** + * Empty response for a {@link SetEnabledRequest} + */ +public class SetEnabledResponse extends ActionResponse { +} diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java index b05959caec3..5f368564be9 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.security.user.XPackUser; /** */ @@ -36,7 +37,7 @@ public class TransportAuthenticateAction extends HandledTransportAction listener) { final User user = securityContext.getUser(); - if (SystemUser.is(user)) { + if (SystemUser.is(user) || XPackUser.is(user)) { listener.onFailure(new IllegalArgumentException("user [" + user.principal() + "] is internal")); return; } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 17d0356e5a0..b91206d24b3 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.security.user.XPackUser; /** */ @@ -35,10 +36,10 @@ public class TransportChangePasswordAction extends HandledTransportAction listener) { final String username = request.username(); - if (AnonymousUser.isAnonymousUsername(username)) { + if (AnonymousUser.isAnonymousUsername(username, settings)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified via the API")); return; - } else if (SystemUser.NAME.equals(username)) { + } else if (SystemUser.NAME.equals(username) || XPackUser.NAME.equals(username)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is internal")); return; } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java index 207bac9ba17..736f8301498 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.security.user.XPackUser; public class TransportDeleteUserAction extends HandledTransportAction { @@ -34,15 +35,15 @@ public class TransportDeleteUserAction extends HandledTransportAction listener) { final String username = request.username(); - if (ReservedRealm.isReserved(username)) { - if (AnonymousUser.isAnonymousUsername(username)) { + if (ReservedRealm.isReserved(username, settings)) { + if (AnonymousUser.isAnonymousUsername(username, settings)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be deleted")); return; } else { listener.onFailure(new IllegalArgumentException("user [" + username + "] is reserved and cannot be deleted")); return; } - } else if (SystemUser.NAME.equals(username)) { + } else if (SystemUser.NAME.equals(username) || XPackUser.NAME.equals(username)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is internal")); return; } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index 192828be9b4..82220a32cb7 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -17,9 +17,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; -import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; +import org.elasticsearch.xpack.security.user.XPackUser; import java.util.ArrayList; import java.util.List; @@ -29,14 +29,16 @@ import static org.elasticsearch.common.Strings.arrayToDelimitedString; public class TransportGetUsersAction extends HandledTransportAction { private final NativeUsersStore usersStore; + private final ReservedRealm reservedRealm; @Inject public TransportGetUsersAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, NativeUsersStore usersStore, - TransportService transportService) { + TransportService transportService, ReservedRealm reservedRealm) { super(settings, GetUsersAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, GetUsersRequest::new); this.usersStore = usersStore; + this.reservedRealm = reservedRealm; } @Override @@ -48,16 +50,13 @@ public class TransportGetUsersAction extends HandledTransportAction { @@ -35,8 +36,8 @@ public class TransportPutUserAction extends HandledTransportAction listener) { final String username = request.username(); - if (ReservedRealm.isReserved(username)) { - if (AnonymousUser.isAnonymousUsername(username)) { + if (ReservedRealm.isReserved(username, settings)) { + if (AnonymousUser.isAnonymousUsername(username, settings)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified via the API")); return; } else { @@ -44,7 +45,7 @@ public class TransportPutUserAction extends HandledTransportAction { + + private final NativeUsersStore usersStore; + + @Inject + public TransportSetEnabledAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + NativeUsersStore usersStore) { + super(settings, SetEnabledAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + SetEnabledRequest::new); + this.usersStore = usersStore; + } + + @Override + protected void doExecute(SetEnabledRequest request, ActionListener listener) { + final String username = request.username(); + // make sure the user is not disabling themselves + if (Authentication.getAuthentication(threadPool.getThreadContext()).getRunAsUser().principal().equals(request.username())) { + listener.onFailure(new IllegalArgumentException("users may not update the enabled status of their own account")); + return; + } else if (SystemUser.NAME.equals(username) || XPackUser.NAME.equals(username)) { + listener.onFailure(new IllegalArgumentException("user [" + username + "] is internal")); + return; + } else if (AnonymousUser.isAnonymousUsername(username, settings)) { + listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified using the api")); + return; + } + + usersStore.setEnabled(username, request.enabled(), request.getRefreshPolicy(), new ActionListener() { + @Override + public void onResponse(Void v) { + listener.onResponse(new SetEnabledResponse()); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } +} diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditLevel.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditLevel.java similarity index 76% rename from elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditLevel.java rename to elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditLevel.java index 30a68b04777..d1c66a12d87 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditLevel.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditLevel.java @@ -3,17 +3,18 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.audit.index; +package org.elasticsearch.xpack.security.audit; import java.util.Arrays; import java.util.EnumSet; import java.util.List; import java.util.Locale; -public enum IndexAuditLevel { +public enum AuditLevel { ANONYMOUS_ACCESS_DENIED, AUTHENTICATION_FAILED, + REALM_AUTHENTICATION_FAILED, ACCESS_GRANTED, ACCESS_DENIED, TAMPERED_REQUEST, @@ -23,13 +24,13 @@ public enum IndexAuditLevel { RUN_AS_GRANTED, RUN_AS_DENIED; - static EnumSet parse(List levels) { - EnumSet enumSet = EnumSet.noneOf(IndexAuditLevel.class); + static EnumSet parse(List levels) { + EnumSet enumSet = EnumSet.noneOf(AuditLevel.class); for (String level : levels) { String lowerCaseLevel = level.trim().toLowerCase(Locale.ROOT); switch (lowerCaseLevel) { case "_all": - enumSet.addAll(Arrays.asList(IndexAuditLevel.values())); + enumSet.addAll(Arrays.asList(AuditLevel.values())); break; case "anonymous_access_denied": enumSet.add(ANONYMOUS_ACCESS_DENIED); @@ -37,6 +38,9 @@ public enum IndexAuditLevel { case "authentication_failed": enumSet.add(AUTHENTICATION_FAILED); break; + case "realm_authentication_failed": + enumSet.add(REALM_AUTHENTICATION_FAILED); + break; case "access_granted": enumSet.add(ACCESS_GRANTED); break; @@ -68,9 +72,9 @@ public enum IndexAuditLevel { return enumSet; } - public static EnumSet parse(List includeLevels, List excludeLevels) { - EnumSet included = parse(includeLevels); - EnumSet excluded = parse(excludeLevels); + public static EnumSet parse(List includeLevels, List excludeLevels) { + EnumSet included = parse(includeLevels); + EnumSet excluded = parse(excludeLevels); included.removeAll(excluded); return included; } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java index 819382190c1..22727c0008e 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java @@ -51,6 +51,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportMessage; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.security.InternalClient; +import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.authc.AuthenticationToken; import org.elasticsearch.xpack.security.authz.privilege.SystemPrivilege; @@ -85,19 +86,20 @@ import java.util.concurrent.locks.ReentrantLock; import java.util.function.Function; import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.audit.AuditLevel.REALM_AUTHENTICATION_FAILED; import static org.elasticsearch.xpack.security.audit.AuditUtil.indices; import static org.elasticsearch.xpack.security.audit.AuditUtil.restRequestContent; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.ACCESS_DENIED; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.ACCESS_GRANTED; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.ANONYMOUS_ACCESS_DENIED; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.AUTHENTICATION_FAILED; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.CONNECTION_DENIED; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.CONNECTION_GRANTED; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.RUN_AS_DENIED; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.RUN_AS_GRANTED; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.SYSTEM_ACCESS_GRANTED; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.TAMPERED_REQUEST; -import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.parse; +import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_DENIED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_GRANTED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.ANONYMOUS_ACCESS_DENIED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.AUTHENTICATION_FAILED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.CONNECTION_DENIED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.CONNECTION_GRANTED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.RUN_AS_DENIED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.RUN_AS_GRANTED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.SYSTEM_ACCESS_GRANTED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.TAMPERED_REQUEST; +import static org.elasticsearch.xpack.security.audit.AuditLevel.parse; import static org.elasticsearch.xpack.security.audit.index.IndexNameResolver.resolve; /** @@ -105,27 +107,30 @@ import static org.elasticsearch.xpack.security.audit.index.IndexNameResolver.res */ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, ClusterStateListener { - public static final int DEFAULT_BULK_SIZE = 1000; - public static final int MAX_BULK_SIZE = 10000; - public static final int DEFAULT_MAX_QUEUE_SIZE = 1000; - public static final TimeValue DEFAULT_FLUSH_INTERVAL = TimeValue.timeValueSeconds(1); - public static final IndexNameResolver.Rollover DEFAULT_ROLLOVER = IndexNameResolver.Rollover.DAILY; + public static final String NAME = "index"; public static final String INDEX_NAME_PREFIX = ".security_audit_log"; public static final String DOC_TYPE = "event"; - public static final Setting ROLLOVER_SETTING = + public static final String INDEX_TEMPLATE_NAME = "security_audit_log"; + + private static final int DEFAULT_BULK_SIZE = 1000; + private static final int MAX_BULK_SIZE = 10000; + private static final int DEFAULT_MAX_QUEUE_SIZE = 1000; + private static final TimeValue DEFAULT_FLUSH_INTERVAL = TimeValue.timeValueSeconds(1); + private static final IndexNameResolver.Rollover DEFAULT_ROLLOVER = IndexNameResolver.Rollover.DAILY; + private static final Setting ROLLOVER_SETTING = new Setting<>(setting("audit.index.rollover"), (s) -> DEFAULT_ROLLOVER.name(), s -> IndexNameResolver.Rollover.valueOf(s.toUpperCase(Locale.ENGLISH)), Property.NodeScope); - public static final Setting QUEUE_SIZE_SETTING = + private static final Setting QUEUE_SIZE_SETTING = Setting.intSetting(setting("audit.index.queue_max_size"), DEFAULT_MAX_QUEUE_SIZE, 1, Property.NodeScope); - public static final String INDEX_TEMPLATE_NAME = "security_audit_log"; - public static final String DEFAULT_CLIENT_NAME = "security-audit-client"; + private static final String DEFAULT_CLIENT_NAME = "security-audit-client"; - static final List DEFAULT_EVENT_INCLUDES = Arrays.asList( + private static final List DEFAULT_EVENT_INCLUDES = Arrays.asList( ACCESS_DENIED.toString(), ACCESS_GRANTED.toString(), ANONYMOUS_ACCESS_DENIED.toString(), AUTHENTICATION_FAILED.toString(), + REALM_AUTHENTICATION_FAILED.toString(), CONNECTION_DENIED.toString(), CONNECTION_GRANTED.toString(), TAMPERED_REQUEST.toString(), @@ -134,23 +139,24 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl ); private static final String FORBIDDEN_INDEX_SETTING = "index.mapper.dynamic"; - public static final Setting INDEX_SETTINGS = + private static final Setting INDEX_SETTINGS = Setting.groupSetting(setting("audit.index.settings.index."), Property.NodeScope); - public static final Setting> INCLUDE_EVENT_SETTINGS = + private static final Setting> INCLUDE_EVENT_SETTINGS = Setting.listSetting(setting("audit.index.events.include"), DEFAULT_EVENT_INCLUDES, Function.identity(), Property.NodeScope); - public static final Setting> EXCLUDE_EVENT_SETTINGS = + private static final Setting> EXCLUDE_EVENT_SETTINGS = Setting.listSetting(setting("audit.index.events.exclude"), Collections.emptyList(), Function.identity(), Property.NodeScope); - public static final Setting REMOTE_CLIENT_SETTINGS = + private static final Setting INCLUDE_REQUEST_BODY = + Setting.boolSetting(setting("audit.index.events.emit_request_body"), false, Property.NodeScope); + private static final Setting REMOTE_CLIENT_SETTINGS = Setting.groupSetting(setting("audit.index.client."), Property.NodeScope); - public static final Setting BULK_SIZE_SETTING = + private static final Setting BULK_SIZE_SETTING = Setting.intSetting(setting("audit.index.bulk_size"), DEFAULT_BULK_SIZE, 1, MAX_BULK_SIZE, Property.NodeScope); - public static final Setting FLUSH_TIMEOUT_SETTING = + private static final Setting FLUSH_TIMEOUT_SETTING = Setting.timeSetting(setting("audit.index.flush_interval"), DEFAULT_FLUSH_INTERVAL, TimeValue.timeValueMillis(1L), Property.NodeScope); - private final AtomicReference state = new AtomicReference<>(State.INITIALIZED); private final String nodeName; private final Client client; @@ -160,12 +166,13 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl private final Lock putMappingLock = new ReentrantLock(); private final ClusterService clusterService; private final boolean indexToRemoteCluster; + private final EnumSet events; + private final IndexNameResolver.Rollover rollover; + private final boolean includeRequestBody; private BulkProcessor bulkProcessor; - private IndexNameResolver.Rollover rollover; private String nodeHostName; private String nodeHostAddress; - private EnumSet events; @Override public String name() { @@ -180,25 +187,10 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl this.queueConsumer = new QueueConsumer(EsExecutors.threadName(settings, "audit-queue-consumer")); int maxQueueSize = QUEUE_SIZE_SETTING.get(settings); this.eventQueue = createQueue(maxQueueSize); - - // we have to initialize this here since we use rollover in determining if we can start... - rollover = ROLLOVER_SETTING.get(settings); - - // we have to initialize the events here since we can receive events before starting... - List includedEvents = INCLUDE_EVENT_SETTINGS.get(settings); - List excludedEvents = EXCLUDE_EVENT_SETTINGS.get(settings); - try { - events = parse(includedEvents, excludedEvents); - } catch (IllegalArgumentException e) { - logger.warn( - (Supplier) () -> new ParameterizedMessage( - "invalid event type specified, using default for audit index output. include events [{}], exclude events [{}]", - includedEvents, - excludedEvents), - e); - events = parse(DEFAULT_EVENT_INCLUDES, Collections.emptyList()); - } + this.rollover = ROLLOVER_SETTING.get(settings); + this.events = parse(INCLUDE_EVENT_SETTINGS.get(settings), EXCLUDE_EVENT_SETTINGS.get(settings)); this.indexToRemoteCluster = REMOTE_CLIENT_SETTINGS.get(settings).names().size() > 0; + this.includeRequestBody = INCLUDE_REQUEST_BODY.get(settings); if (indexToRemoteCluster == false) { // in the absence of client settings for remote indexing, fall back to the client that was passed in. @@ -391,7 +383,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl @Override public void authenticationFailed(String realm, AuthenticationToken token, String action, TransportMessage message) { - if (events.contains(AUTHENTICATION_FAILED)) { + if (events.contains(REALM_AUTHENTICATION_FAILED)) { if (XPackUser.is(token.principal()) == false) { try { enqueue(message("authentication_failed", action, token, realm, indices(message), message), "authentication_failed"); @@ -404,7 +396,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl @Override public void authenticationFailed(String realm, AuthenticationToken token, RestRequest request) { - if (events.contains(AUTHENTICATION_FAILED)) { + if (events.contains(REALM_AUTHENTICATION_FAILED)) { if (XPackUser.is(token.principal()) == false) { try { enqueue(message("authentication_failed", null, token, realm, null, request), "authentication_failed"); @@ -610,7 +602,9 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl if (indices != null) { msg.builder.array(Field.INDICES, indices.toArray(Strings.EMPTY_ARRAY)); } - msg.builder.field(Field.REQUEST_BODY, restRequestContent(request)); + if (includeRequestBody) { + msg.builder.field(Field.REQUEST_BODY, restRequestContent(request)); + } msg.builder.field(Field.ORIGIN_TYPE, "rest"); SocketAddress address = request.getRemoteAddress(); if (address instanceof InetSocketAddress) { @@ -630,7 +624,9 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl common("rest", type, msg.builder); msg.builder.field(Field.PRINCIPAL, user.principal()); - msg.builder.field(Field.REQUEST_BODY, restRequestContent(request)); + if (includeRequestBody) { + msg.builder.field(Field.REQUEST_BODY, restRequestContent(request)); + } msg.builder.field(Field.ORIGIN_TYPE, "rest"); SocketAddress address = request.getRemoteAddress(); if (address instanceof InetSocketAddress) { @@ -905,6 +901,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl settings.add(FLUSH_TIMEOUT_SETTING); settings.add(QUEUE_SIZE_SETTING); settings.add(REMOTE_CLIENT_SETTINGS); + settings.add(INCLUDE_REQUEST_BODY); } private class QueueConsumer extends Thread { diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index bd6b2a20b55..d18df6f560e 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportMessage; +import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.authc.AuthenticationToken; import org.elasticsearch.xpack.security.authz.privilege.SystemPrivilege; @@ -32,11 +33,27 @@ import org.elasticsearch.xpack.security.user.XPackUser; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; +import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; import java.util.List; import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_DENIED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_GRANTED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.ANONYMOUS_ACCESS_DENIED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.AUTHENTICATION_FAILED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.REALM_AUTHENTICATION_FAILED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.CONNECTION_DENIED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.CONNECTION_GRANTED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.RUN_AS_DENIED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.RUN_AS_GRANTED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.SYSTEM_ACCESS_GRANTED; +import static org.elasticsearch.xpack.security.audit.AuditLevel.TAMPERED_REQUEST; +import static org.elasticsearch.xpack.security.audit.AuditLevel.parse; import static org.elasticsearch.xpack.security.audit.AuditUtil.indices; import static org.elasticsearch.xpack.security.audit.AuditUtil.restRequestContent; @@ -52,10 +69,28 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail { Setting.boolSetting(setting("audit.logfile.prefix.emit_node_host_name"), false, Property.NodeScope); public static final Setting NODE_NAME_SETTING = Setting.boolSetting(setting("audit.logfile.prefix.emit_node_name"), true, Property.NodeScope); + private static final List DEFAULT_EVENT_INCLUDES = Arrays.asList( + ACCESS_DENIED.toString(), + ACCESS_GRANTED.toString(), + ANONYMOUS_ACCESS_DENIED.toString(), + AUTHENTICATION_FAILED.toString(), + CONNECTION_DENIED.toString(), + TAMPERED_REQUEST.toString(), + RUN_AS_DENIED.toString(), + RUN_AS_GRANTED.toString() + ); + private static final Setting> INCLUDE_EVENT_SETTINGS = + Setting.listSetting(setting("audit.logfile.events.include"), DEFAULT_EVENT_INCLUDES, Function.identity(), Property.NodeScope); + private static final Setting> EXCLUDE_EVENT_SETTINGS = + Setting.listSetting(setting("audit.logfile.events.exclude"), Collections.emptyList(), Function.identity(), Property.NodeScope); + private static final Setting INCLUDE_REQUEST_BODY = + Setting.boolSetting(setting("audit.logfile.events.emit_request_body"), false, Property.NodeScope); private final Logger logger; private final ClusterService clusterService; private final ThreadContext threadContext; + private final EnumSet events; + private final boolean includeRequestBody; private String prefix; @@ -73,6 +108,8 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail { this.logger = logger; this.clusterService = clusterService; this.threadContext = threadContext; + this.events = parse(INCLUDE_EVENT_SETTINGS.get(settings), EXCLUDE_EVENT_SETTINGS.get(settings)); + this.includeRequestBody = INCLUDE_REQUEST_BODY.get(settings); } private String getPrefix() { @@ -84,300 +121,240 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail { @Override public void anonymousAccessDenied(String action, TransportMessage message) { - String indices = indicesString(message); - if (indices != null) { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [anonymous_access_denied]\t{}, action=[{}], indices=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action, indices, + if (events.contains(ANONYMOUS_ACCESS_DENIED)) { + String indices = indicesString(message); + if (indices != null) { + logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], indices=[{}], request=[{}]", getPrefix(), + originAttributes(message, clusterService.localNode(), threadContext), action, indices, message.getClass().getSimpleName()); } else { - logger.warn("{}[transport] [anonymous_access_denied]\t{}, action=[{}], indices=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action, indices); - } - } else { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [anonymous_access_denied]\t{}, action=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action, message.getClass().getSimpleName()); - } else { - logger.warn("{}[transport] [anonymous_access_denied]\t{}, action=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action); + logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], request=[{}]", getPrefix(), + originAttributes(message, clusterService.localNode(), threadContext), action, message.getClass().getSimpleName()); } } } @Override public void anonymousAccessDenied(RestRequest request) { - if (logger.isDebugEnabled()) { - logger.debug("{}[rest] [anonymous_access_denied]\t{}, uri=[{}], request_body=[{}]", getPrefix(), - hostAttributes(request), request.uri(), restRequestContent(request)); - } else { - logger.warn("{}[rest] [anonymous_access_denied]\t{}, uri=[{}]", getPrefix(), hostAttributes(request), request.uri()); + if (events.contains(ANONYMOUS_ACCESS_DENIED)) { + if (includeRequestBody) { + logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}], request_body=[{}]", getPrefix(), + hostAttributes(request), request.uri(), restRequestContent(request)); + } else { + logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}]", getPrefix(), hostAttributes(request), request.uri()); + } } } @Override public void authenticationFailed(AuthenticationToken token, String action, TransportMessage message) { - String indices = indicesString(message); - if (indices != null) { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], indices=[{}], request=[{}]", + if (events.contains(AUTHENTICATION_FAILED)) { + String indices = indicesString(message); + if (indices != null) { + logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], indices=[{}], request=[{}]", getPrefix(), originAttributes(message, clusterService.localNode(), threadContext), token.principal(), - action, indices, message.getClass().getSimpleName()); + action, indices, message.getClass().getSimpleName()); } else { - logger.error("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], indices=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), token.principal(), action, indices); - } - } else { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), token.principal(), action, + logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], request=[{}]", getPrefix(), + originAttributes(message, clusterService.localNode(), threadContext), token.principal(), action, message.getClass().getSimpleName()); - } else { - logger.error("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), token.principal(), action); } + } } @Override public void authenticationFailed(RestRequest request) { - if (logger.isDebugEnabled()) { - logger.debug("{}[rest] [authentication_failed]\t{}, uri=[{}], request_body=[{}]", getPrefix(), hostAttributes(request), - request.uri(), restRequestContent(request)); - } else { - logger.error("{}[rest] [authentication_failed]\t{}, uri=[{}]", getPrefix(), hostAttributes(request), request.uri()); + if (events.contains(AUTHENTICATION_FAILED)) { + if (includeRequestBody) { + logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}], request_body=[{}]", getPrefix(), hostAttributes(request), + request.uri(), restRequestContent(request)); + } else { + logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}]", getPrefix(), hostAttributes(request), request.uri()); + } } } @Override public void authenticationFailed(String action, TransportMessage message) { - String indices = indicesString(message); - if (indices != null) { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [authentication_failed]\t{}, action=[{}], indices=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action, indices, + if (events.contains(AUTHENTICATION_FAILED)) { + String indices = indicesString(message); + if (indices != null) { + logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], indices=[{}], request=[{}]", getPrefix(), + originAttributes(message, clusterService.localNode(), threadContext), action, indices, message.getClass().getSimpleName()); } else { - logger.error("{}[transport] [authentication_failed]\t{}, action=[{}], indices=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action, indices); - } - } else { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [authentication_failed]\t{}, action=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action, message.getClass().getSimpleName()); - } else { - logger.error("{}[transport] [authentication_failed]\t{}, action=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action); + logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], request=[{}]", getPrefix(), + originAttributes(message, clusterService.localNode(), threadContext), action, message.getClass().getSimpleName()); } } } @Override public void authenticationFailed(AuthenticationToken token, RestRequest request) { - if (logger.isDebugEnabled()) { - logger.debug("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}], request_body=[{}]", getPrefix(), - hostAttributes(request), token.principal(), request.uri(), restRequestContent(request)); - } else { - logger.error("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}]", getPrefix(), hostAttributes(request), - token.principal(), request.uri()); + if (events.contains(AUTHENTICATION_FAILED)) { + if (includeRequestBody) { + logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}], request_body=[{}]", getPrefix(), + hostAttributes(request), token.principal(), request.uri(), restRequestContent(request)); + } else { + logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}]", getPrefix(), hostAttributes(request), + token.principal(), request.uri()); + } } } @Override public void authenticationFailed(String realm, AuthenticationToken token, String action, TransportMessage message) { - if (logger.isTraceEnabled()) { + if (events.contains(REALM_AUTHENTICATION_FAILED)) { String indices = indicesString(message); if (indices != null) { - logger.trace("{}[transport] [authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], indices=[{}], " + - "request=[{}]", getPrefix(), realm, originAttributes(message, clusterService.localNode(), threadContext), - token.principal(), action, indices, message.getClass().getSimpleName()); + logger.info("{}[transport] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], indices=[{}], " + + "request=[{}]", getPrefix(), realm, originAttributes(message, clusterService.localNode(), threadContext), + token.principal(), action, indices, message.getClass().getSimpleName()); } else { - logger.trace("{}[transport] [authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], request=[{}]", - getPrefix(), realm, originAttributes(message, clusterService.localNode(), threadContext), token.principal(), - action, message.getClass().getSimpleName()); + logger.info("{}[transport] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], request=[{}]", + getPrefix(), realm, originAttributes(message, clusterService.localNode(), threadContext), token.principal(), + action, message.getClass().getSimpleName()); } } } @Override public void authenticationFailed(String realm, AuthenticationToken token, RestRequest request) { - if (logger.isTraceEnabled()) { - logger.trace("{}[rest] [authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}], request_body=[{}]", getPrefix(), - realm, hostAttributes(request), token.principal(), request.uri(), restRequestContent(request)); + if (events.contains(REALM_AUTHENTICATION_FAILED)) { + if (includeRequestBody) { + logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}], request_body=[{}]", + getPrefix(), realm, hostAttributes(request), token.principal(), request.uri(), restRequestContent(request)); + } else { + logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}]", getPrefix(), + realm, hostAttributes(request), token.principal(), request.uri()); + } } } @Override public void accessGranted(User user, String action, TransportMessage message) { - String indices = indicesString(message); - - // special treatment for internal system actions - only log on trace - if ((SystemUser.is(user) && SystemPrivilege.INSTANCE.predicate().test(action)) || XPackUser.is(user)) { - if (logger.isTraceEnabled()) { - if (indices != null) { - logger.trace("{}[transport] [access_granted]\t{}, {}, action=[{}], indices=[{}], request=[{}]", getPrefix(), + final boolean isSystem = (SystemUser.is(user) && SystemPrivilege.INSTANCE.predicate().test(action)) || XPackUser.is(user); + final boolean logSystemAccessGranted = isSystem && events.contains(SYSTEM_ACCESS_GRANTED); + final boolean shouldLog = logSystemAccessGranted || (isSystem == false && events.contains(ACCESS_GRANTED)); + if (shouldLog) { + String indices = indicesString(message); + if (indices != null) { + logger.info("{}[transport] [access_granted]\t{}, {}, action=[{}], indices=[{}], request=[{}]", getPrefix(), originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, indices, - message.getClass().getSimpleName()); - } else { - logger.trace("{}[transport] [access_granted]\t{}, {}, action=[{}], request=[{}]", getPrefix(), + message.getClass().getSimpleName()); + } else { + logger.info("{}[transport] [access_granted]\t{}, {}, action=[{}], request=[{}]", getPrefix(), originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, - message.getClass().getSimpleName()); - } - } - return; - } - - if (indices != null) { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [access_granted]\t{}, {}, action=[{}], indices=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, indices, message.getClass().getSimpleName()); - } else { - logger.info("{}[transport] [access_granted]\t{}, {}, action=[{}], indices=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, indices); - } - } else { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [access_granted]\t{}, {}, action=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, - message.getClass().getSimpleName()); - } else { - logger.info("{}[transport] [access_granted]\t{}, {}, action=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), principal(user), action); } } } @Override public void accessDenied(User user, String action, TransportMessage message) { - String indices = indicesString(message); - if (indices != null) { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [access_denied]\t{}, {}, action=[{}], indices=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, indices, + if (events.contains(ACCESS_DENIED)) { + String indices = indicesString(message); + if (indices != null) { + logger.info("{}[transport] [access_denied]\t{}, {}, action=[{}], indices=[{}], request=[{}]", getPrefix(), + originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, indices, message.getClass().getSimpleName()); } else { - logger.error("{}[transport] [access_denied]\t{}, {}, action=[{}], indices=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, indices); - } - } else { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [access_denied]\t{}, {}, action=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, + logger.info("{}[transport] [access_denied]\t{}, {}, action=[{}], request=[{}]", getPrefix(), + originAttributes(message, clusterService.localNode(), threadContext), principal(user), action, message.getClass().getSimpleName()); - } else { - logger.error("{}[transport] [access_denied]\t{}, {}, action=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), principal(user), action); } } } @Override public void tamperedRequest(RestRequest request) { - if (logger.isDebugEnabled()) { - logger.debug("{}[rest] [tampered_request]\t{}, uri=[{}], request_body=[{}]", getPrefix(), hostAttributes(request), - request.uri(), restRequestContent(request)); - } else { - logger.error("{}[rest] [tampered_request]\t{}, uri=[{}]", getPrefix(), hostAttributes(request), request.uri()); + if (events.contains(TAMPERED_REQUEST)) { + if (includeRequestBody) { + logger.info("{}[rest] [tampered_request]\t{}, uri=[{}], request_body=[{}]", getPrefix(), hostAttributes(request), + request.uri(), restRequestContent(request)); + } else { + logger.info("{}[rest] [tampered_request]\t{}, uri=[{}]", getPrefix(), hostAttributes(request), request.uri()); + } } } @Override public void tamperedRequest(String action, TransportMessage message) { - String indices = indicesString(message); - if (indices != null) { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [tampered_request]\t{}, action=[{}], indices=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action, indices, + if (events.contains(TAMPERED_REQUEST)) { + String indices = indicesString(message); + if (indices != null) { + logger.info("{}[transport] [tampered_request]\t{}, action=[{}], indices=[{}], request=[{}]", getPrefix(), + originAttributes(message, clusterService.localNode(), threadContext), action, indices, message.getClass().getSimpleName()); } else { - logger.error("{}[transport] [tampered_request]\t{}, action=[{}], indices=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action, indices); - } - } else { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [tampered_request]\t{}, action=[{}], request=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action, + logger.info("{}[transport] [tampered_request]\t{}, action=[{}], request=[{}]", getPrefix(), + originAttributes(message, clusterService.localNode(), threadContext), action, message.getClass().getSimpleName()); - } else { - logger.error("{}[transport] [tampered_request]\t{}, action=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), action); } } } @Override public void tamperedRequest(User user, String action, TransportMessage request) { - String indices = indicesString(request); - if (indices != null) { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [tampered_request]\t{}, {}, action=[{}], indices=[{}], request=[{}]", getPrefix(), - originAttributes(request, clusterService.localNode(), threadContext), principal(user), action, indices, + if (events.contains(TAMPERED_REQUEST)) { + String indices = indicesString(request); + if (indices != null) { + logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], indices=[{}], request=[{}]", getPrefix(), + originAttributes(request, clusterService.localNode(), threadContext), principal(user), action, indices, request.getClass().getSimpleName()); } else { - logger.error("{}[transport] [tampered_request]\t{}, {}, action=[{}], indices=[{}]", getPrefix(), - originAttributes(request, clusterService.localNode(), threadContext), principal(user), action, indices); - } - } else { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [tampered_request]\t{}, {}, action=[{}], request=[{}]", getPrefix(), - originAttributes(request, clusterService.localNode(), threadContext), principal(user), action, + logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], request=[{}]", getPrefix(), + originAttributes(request, clusterService.localNode(), threadContext), principal(user), action, request.getClass().getSimpleName()); - } else { - logger.error("{}[transport] [tampered_request]\t{}, {}, action=[{}]", getPrefix(), - originAttributes(request, clusterService.localNode(), threadContext), principal(user), action); } } } @Override public void connectionGranted(InetAddress inetAddress, String profile, SecurityIpFilterRule rule) { - if (logger.isTraceEnabled()) { - logger.trace("{}[ip_filter] [connection_granted]\torigin_address=[{}], transport_profile=[{}], rule=[{}]", getPrefix(), + if (events.contains(CONNECTION_GRANTED)) { + logger.info("{}[ip_filter] [connection_granted]\torigin_address=[{}], transport_profile=[{}], rule=[{}]", getPrefix(), NetworkAddress.format(inetAddress), profile, rule); } } @Override public void connectionDenied(InetAddress inetAddress, String profile, SecurityIpFilterRule rule) { - logger.error("{}[ip_filter] [connection_denied]\torigin_address=[{}], transport_profile=[{}], rule=[{}]", getPrefix(), - NetworkAddress.format(inetAddress), profile, rule); + if (events.contains(CONNECTION_DENIED)) { + logger.info("{}[ip_filter] [connection_denied]\torigin_address=[{}], transport_profile=[{}], rule=[{}]", getPrefix(), + NetworkAddress.format(inetAddress), profile, rule); + } } @Override public void runAsGranted(User user, String action, TransportMessage message) { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [run_as_granted]\t{}, principal=[{}], run_as_principal=[{}], action=[{}], request=[{}]", + if (events.contains(RUN_AS_GRANTED)) { + logger.info("{}[transport] [run_as_granted]\t{}, principal=[{}], run_as_principal=[{}], action=[{}], request=[{}]", getPrefix(), originAttributes(message, clusterService.localNode(), threadContext), user.principal(), user.runAs().principal(), action, message.getClass().getSimpleName()); - } else { - logger.info("{}[transport] [run_as_granted]\t{}, principal=[{}], run_as_principal=[{}], action=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), user.principal(), - user.runAs().principal(), action); } } @Override public void runAsDenied(User user, String action, TransportMessage message) { - if (logger.isDebugEnabled()) { - logger.debug("{}[transport] [run_as_denied]\t{}, principal=[{}], run_as_principal=[{}], action=[{}], request=[{}]", + if (events.contains(RUN_AS_DENIED)) { + logger.info("{}[transport] [run_as_denied]\t{}, principal=[{}], run_as_principal=[{}], action=[{}], request=[{}]", getPrefix(), originAttributes(message, clusterService.localNode(), threadContext), user.principal(), user.runAs().principal(), action, message.getClass().getSimpleName()); - } else { - logger.info("{}[transport] [run_as_denied]\t{}, principal=[{}], run_as_principal=[{}], action=[{}]", getPrefix(), - originAttributes(message, clusterService.localNode(), threadContext), user.principal(), - user.runAs().principal(), action); } } @Override public void runAsDenied(User user, RestRequest request) { - if (logger.isDebugEnabled()) { - logger.debug("{}[rest] [run_as_denied]\t{}, principal=[{}], uri=[{}], request_body=[{}]", getPrefix(), + if (events.contains(RUN_AS_DENIED)) { + if (includeRequestBody) { + logger.info("{}[rest] [run_as_denied]\t{}, principal=[{}], uri=[{}], request_body=[{}]", getPrefix(), hostAttributes(request), user.principal(), request.uri(), restRequestContent(request)); - } else { - logger.info("{}[transport] [run_as_denied]\t{}, principal=[{}], uri=[{}]", getPrefix(), - hostAttributes(request), user.principal(), request.uri()); + } else { + logger.info("{}[rest] [run_as_denied]\t{}, principal=[{}], uri=[{}]", getPrefix(), + hostAttributes(request), user.principal(), request.uri()); + } } } @@ -465,5 +442,8 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail { settings.add(HOST_ADDRESS_SETTING); settings.add(HOST_NAME_SETTING); settings.add(NODE_NAME_SETTING); + settings.add(INCLUDE_EVENT_SETTINGS); + settings.add(EXCLUDE_EVENT_SETTINGS); + settings.add(INCLUDE_REQUEST_BODY); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index 08f86226729..b884b9f7699 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -50,11 +50,13 @@ public class AuthenticationService extends AbstractComponent { private final AuthenticationFailureHandler failureHandler; private final ThreadContext threadContext; private final String nodeName; + private final AnonymousUser anonymousUser; private final boolean signUserHeader; private final boolean runAsEnabled; + private final boolean isAnonymousUserEnabled; public AuthenticationService(Settings settings, Realms realms, AuditTrailService auditTrail, CryptoService cryptoService, - AuthenticationFailureHandler failureHandler, ThreadPool threadPool) { + AuthenticationFailureHandler failureHandler, ThreadPool threadPool, AnonymousUser anonymousUser) { super(settings); this.nodeName = Node.NODE_NAME_SETTING.get(settings); this.realms = realms; @@ -62,8 +64,10 @@ public class AuthenticationService extends AbstractComponent { this.cryptoService = cryptoService; this.failureHandler = failureHandler; this.threadContext = threadPool.getThreadContext(); + this.anonymousUser = anonymousUser; this.signUserHeader = SIGN_USER_HEADER.get(settings); this.runAsEnabled = RUN_AS_ENABLED.get(settings); + this.isAnonymousUserEnabled = AnonymousUser.isAnonymousEnabled(settings); } /** @@ -157,6 +161,7 @@ public class AuthenticationService extends AbstractComponent { throw handleNullUser(token); } user = lookupRunAsUserIfNecessary(user, token); + checkIfUserIsDisabled(user, token); final Authentication authentication = new Authentication(user, authenticatedBy, lookedupBy); authentication.writeToContext(threadContext, cryptoService, signUserHeader); @@ -204,9 +209,9 @@ public class AuthenticationService extends AbstractComponent { if (fallbackUser != null) { RealmRef authenticatedBy = new RealmRef("__fallback", "__fallback", nodeName); authentication = new Authentication(fallbackUser, authenticatedBy, null); - } else if (AnonymousUser.enabled()) { + } else if (isAnonymousUserEnabled) { RealmRef authenticatedBy = new RealmRef("__anonymous", "__anonymous", nodeName); - authentication = new Authentication(AnonymousUser.INSTANCE, authenticatedBy, null); + authentication = new Authentication(anonymousUser, authenticatedBy, null); } if (authentication != null) { @@ -297,6 +302,13 @@ public class AuthenticationService extends AbstractComponent { return user; } + void checkIfUserIsDisabled(User user, AuthenticationToken token) { + if (user.enabled() == false || (user.runAs() != null && user.runAs().enabled() == false)) { + logger.debug("user [{}] is disabled. failing authentication", user); + throw request.authenticationFailed(token); + } + } + abstract class AuditableRequest { abstract void realmAuthenticationFailed(AuthenticationToken token, String realm); diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java index 0d896971b23..df502a83d94 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java @@ -229,7 +229,7 @@ public class ESNativeRealmMigrateTool extends MultiCommand { Path usersFile = FileUserPasswdStore.resolveFile(env); Path usersRolesFile = FileUserRolesStore.resolveFile(env); terminal.println("importing users from [" + usersFile + "]..."); - Map userToHashedPW = FileUserPasswdStore.parseFile(usersFile, null); + Map userToHashedPW = FileUserPasswdStore.parseFile(usersFile, null, settings); Map userToRoles = FileUserRolesStore.parseFile(usersRolesFile, null); Set existingUsers; try { diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java index e018c5e3ac8..172e5c9a9a3 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.security.authc.esnative; -import java.util.List; - import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; @@ -19,12 +17,11 @@ public class NativeRealm extends CachingUsernamePasswordRealm { public static final String TYPE = "native"; - final NativeUsersStore userStore; + private final NativeUsersStore userStore; public NativeRealm(RealmConfig config, NativeUsersStore usersStore) { super(TYPE, config); this.userStore = usersStore; - usersStore.addListener(new Listener()); } @Override @@ -41,14 +38,4 @@ public class NativeRealm extends CachingUsernamePasswordRealm { protected User doAuthenticate(UsernamePasswordToken token) { return userStore.verifyPassword(token.principal(), token.credentials()); } - - class Listener implements NativeUsersStore.ChangeListener { - - @Override - public void onUsersChanged(List usernames) { - for (String username : usernames) { - expire(username); - } - } - } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 1eabb7937fa..682b5d6d9ef 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -5,16 +5,13 @@ */ package org.elasticsearch.xpack.security.authc.esnative; -import com.carrotsearch.hppc.ObjectHashSet; -import com.carrotsearch.hppc.ObjectLongHashMap; -import com.carrotsearch.hppc.ObjectLongMap; -import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.DocWriteResponse.Result; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; @@ -28,7 +25,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -41,16 +37,12 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Cancellable; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.security.SecurityTemplateService; import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheRequest; @@ -64,14 +56,14 @@ import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.security.user.User.Fields; +import org.elasticsearch.xpack.security.user.XPackUser; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Iterator; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -81,25 +73,20 @@ import static org.elasticsearch.xpack.security.Security.setting; import static org.elasticsearch.xpack.security.SecurityTemplateService.securityIndexMappingAndTemplateUpToDate; /** - * ESNativeUsersStore is a {@code UserStore} that, instead of reading from a - * file, reads from an Elasticsearch index instead. This {@code UserStore} in - * particular implements both a User store and a UserRoles store, which means it - * is responsible for fetching not only {@code User} objects, but also - * retrieving the roles for a given username. + * NativeUsersStore is a store for users that reads from an Elasticsearch index. This store is responsible for fetching the full + * {@link User} object, which includes the names of the roles assigned to the user. *

- * No caching is done by this class, it is handled at a higher level + * No caching is done by this class, it is handled at a higher level and no polling for changes is done by this class. Modification + * operations make a best effort attempt to clear the cache on all nodes for the user that was modified. */ public class NativeUsersStore extends AbstractComponent implements ClusterStateListener { - public static final Setting SCROLL_SIZE_SETTING = + private static final Setting SCROLL_SIZE_SETTING = Setting.intSetting(setting("authc.native.scroll.size"), 1000, Property.NodeScope); - public static final Setting SCROLL_KEEP_ALIVE_SETTING = + private static final Setting SCROLL_KEEP_ALIVE_SETTING = Setting.timeSetting(setting("authc.native.scroll.keep_alive"), TimeValue.timeValueSeconds(10L), Property.NodeScope); - public static final Setting POLL_INTERVAL_SETTING = - Setting.timeSetting(setting("authc.native.reload.interval"), TimeValue.timeValueSeconds(30L), Property.NodeScope); - public enum State { INITIALIZED, STARTING, @@ -109,25 +96,20 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL FAILED } - public static final String USER_DOC_TYPE = "user"; - static final String RESERVED_USER_DOC_TYPE = "reserved-user"; + private static final String USER_DOC_TYPE = "user"; + private static final String RESERVED_USER_DOC_TYPE = "reserved-user"; private final Hasher hasher = Hasher.BCRYPT; - private final List listeners = new CopyOnWriteArrayList<>(); private final AtomicReference state = new AtomicReference<>(State.INITIALIZED); private final InternalClient client; - private final ThreadPool threadPool; - - private Cancellable pollerCancellable; private int scrollSize; private TimeValue scrollKeepAlive; private volatile boolean securityIndexExists = false; - public NativeUsersStore(Settings settings, InternalClient client, ThreadPool threadPool) { + public NativeUsersStore(Settings settings, InternalClient client) { super(settings); this.client = client; - this.threadPool = threadPool; } /** @@ -249,6 +231,9 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL } } + /** + * Blocking method to get the user and their password hash + */ private UserAndPassword getUserAndPassword(final String username) { final AtomicReference userRef = new AtomicReference<>(null); final CountDownLatch latch = new CountDownLatch(1); @@ -278,6 +263,9 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL return userRef.get(); } + /** + * Async method to retrieve a user and their password + */ private void getUserAndPassword(final String user, final ActionListener listener) { try { GetRequest request = client.prepareGet(SecurityTemplateService.SECURITY_INDEX_NAME, USER_DOC_TYPE, user).request(); @@ -310,17 +298,16 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL } } + /** + * Async method to change the password of a native or reserved user. If a reserved user does not exist, the document will be created + * with a hash of the provided password. + */ public void changePassword(final ChangePasswordRequest request, final ActionListener listener) { final String username = request.username(); - if (SystemUser.NAME.equals(username)) { - ValidationException validationException = new ValidationException(); - validationException.addValidationError("changing the password for [" + username + "] is not allowed"); - listener.onFailure(validationException); - return; - } + assert SystemUser.NAME.equals(username) == false && XPackUser.NAME.equals(username) == false : username + "is internal!"; final String docType; - if (ReservedRealm.isReserved(username)) { + if (ReservedRealm.isReserved(username, settings)) { docType = RESERVED_USER_DOC_TYPE; } else { docType = USER_DOC_TYPE; @@ -338,33 +325,30 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL @Override public void onFailure(Exception e) { - Throwable cause = e; - if (e instanceof ElasticsearchException) { - cause = ExceptionsHelper.unwrapCause(e); - if ((cause instanceof IndexNotFoundException) == false - && (cause instanceof DocumentMissingException) == false) { - listener.onFailure(e); - return; + if (isIndexNotFoundOrDocumentMissing(e)) { + if (docType.equals(RESERVED_USER_DOC_TYPE)) { + createReservedUser(username, request.passwordHash(), request.getRefreshPolicy(), listener); + } else { + logger.debug((Supplier) () -> + new ParameterizedMessage("failed to change password for user [{}]", request.username()), e); + ValidationException validationException = new ValidationException(); + validationException.addValidationError("user must exist in order to change password"); + listener.onFailure(validationException); } - } - - if (docType.equals(RESERVED_USER_DOC_TYPE)) { - createReservedUser(username, request.passwordHash(), request.getRefreshPolicy(), listener); } else { - logger.debug( - (Supplier) () -> new ParameterizedMessage( - "failed to change password for user [{}]", request.username()), cause); - ValidationException validationException = new ValidationException(); - validationException.addValidationError("user must exist in order to change password"); - listener.onFailure(validationException); + listener.onFailure(e); } } }); } + /** + * Asynchronous method to create a reserved user with the given password hash. The cache for the user will be cleared after the document + * has been indexed + */ private void createReservedUser(String username, char[] passwordHash, RefreshPolicy refresh, ActionListener listener) { client.prepareIndex(SecurityTemplateService.SECURITY_INDEX_NAME, RESERVED_USER_DOC_TYPE, username) - .setSource(Fields.PASSWORD.getPreferredName(), String.valueOf(passwordHash)) + .setSource(Fields.PASSWORD.getPreferredName(), String.valueOf(passwordHash), Fields.ENABLED.getPreferredName(), true) .setRefreshPolicy(refresh) .execute(new ActionListener() { @Override @@ -379,6 +363,12 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL }); } + /** + * Asynchronous method to put a user. A put user request without a password hash is treated as an update and will fail with a + * {@link ValidationException} if the user does not exist. If a password hash is provided, then we issue a update request with an + * upsert document as well; the upsert document sets the enabled flag of the user to true but if the document already exists, this + * method will not modify the enabled value. + */ public void putUser(final PutUserRequest request, final ActionListener listener) { if (state() != State.STARTED) { listener.onFailure(new IllegalStateException("user cannot be added as native user service has not been started")); @@ -389,7 +379,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL if (request.passwordHash() == null) { updateUserWithoutPassword(request, listener); } else { - indexUser(request, listener); + upsertUser(request, listener); } } catch (Exception e) { logger.error((Supplier) () -> new ParameterizedMessage("unable to put user [{}]", request.username()), e); @@ -397,6 +387,9 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL } } + /** + * Handles updating a user that should already exist where their password should not change + */ private void updateUserWithoutPassword(final PutUserRequest putUserRequest, final ActionListener listener) { assert putUserRequest.passwordHash() == null; // We must have an existing document @@ -416,52 +409,43 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL @Override public void onFailure(Exception e) { - Throwable cause = e; - if (e instanceof ElasticsearchException) { - cause = ExceptionsHelper.unwrapCause(e); - if ((cause instanceof IndexNotFoundException) == false - && (cause instanceof DocumentMissingException) == false) { - listener.onFailure(e); - return; - } + Exception failure = e; + if (isIndexNotFoundOrDocumentMissing(e)) { + // if the index doesn't exist we can never update a user + // if the document doesn't exist, then this update is not valid + logger.debug((Supplier) () -> new ParameterizedMessage("failed to update user document with username [{}]", + putUserRequest.username()), e); + ValidationException validationException = new ValidationException(); + validationException.addValidationError("password must be specified unless you are updating an existing user"); + failure = validationException; } - - // if the index doesn't exist we can never update a user - // if the document doesn't exist, then this update is not valid - logger.debug( - (Supplier) () -> new ParameterizedMessage( - "failed to update user document with username [{}]", - putUserRequest.username()), - cause); - ValidationException validationException = new ValidationException(); - validationException.addValidationError("password must be specified unless you are updating an existing user"); - listener.onFailure(validationException); + listener.onFailure(failure); } }); } - private void indexUser(final PutUserRequest putUserRequest, final ActionListener listener) { + private void upsertUser(final PutUserRequest putUserRequest, final ActionListener listener) { assert putUserRequest.passwordHash() != null; - client.prepareIndex(SecurityTemplateService.SECURITY_INDEX_NAME, + client.prepareUpdate(SecurityTemplateService.SECURITY_INDEX_NAME, USER_DOC_TYPE, putUserRequest.username()) - .setSource(User.Fields.USERNAME.getPreferredName(), putUserRequest.username(), + .setDoc(User.Fields.USERNAME.getPreferredName(), putUserRequest.username(), User.Fields.PASSWORD.getPreferredName(), String.valueOf(putUserRequest.passwordHash()), User.Fields.ROLES.getPreferredName(), putUserRequest.roles(), User.Fields.FULL_NAME.getPreferredName(), putUserRequest.fullName(), User.Fields.EMAIL.getPreferredName(), putUserRequest.email(), User.Fields.METADATA.getPreferredName(), putUserRequest.metadata()) + .setUpsert(User.Fields.USERNAME.getPreferredName(), putUserRequest.username(), + User.Fields.PASSWORD.getPreferredName(), String.valueOf(putUserRequest.passwordHash()), + User.Fields.ROLES.getPreferredName(), putUserRequest.roles(), + User.Fields.FULL_NAME.getPreferredName(), putUserRequest.fullName(), + User.Fields.EMAIL.getPreferredName(), putUserRequest.email(), + User.Fields.METADATA.getPreferredName(), putUserRequest.metadata(), + User.Fields.ENABLED.getPreferredName(), true) .setRefreshPolicy(putUserRequest.getRefreshPolicy()) - .execute(new ActionListener() { + .execute(new ActionListener() { @Override - public void onResponse(IndexResponse indexResponse) { - // if the document was just created, then we don't need to clear cache - boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; - if (created) { - listener.onResponse(true); - return; - } - - clearRealmCache(putUserRequest.username(), listener, created); + public void onResponse(UpdateResponse updateResponse) { + clearRealmCache(putUserRequest.username(), listener, updateResponse.getResult() == DocWriteResponse.Result.CREATED); } @Override @@ -471,6 +455,82 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL }); } + /** + * Asynchronous method that will update the enabled flag of a user. If the user is reserved and the document does not exist, a document + * will be created. If the user is not reserved, the user must exist otherwise the operation will fail. + */ + public void setEnabled(final String username, final boolean enabled, final RefreshPolicy refreshPolicy, + final ActionListener listener) { + if (state() != State.STARTED) { + listener.onFailure(new IllegalStateException("enabled status cannot be changed as native user service has not been started")); + return; + } + + if (ReservedRealm.isReserved(username, settings)) { + setReservedUserEnabled(username, enabled, refreshPolicy, listener); + } else { + setRegularUserEnabled(username, enabled, refreshPolicy, listener); + } + } + + private void setRegularUserEnabled(final String username, final boolean enabled, final RefreshPolicy refreshPolicy, + final ActionListener listener) { + try { + client.prepareUpdate(SecurityTemplateService.SECURITY_INDEX_NAME, USER_DOC_TYPE, username) + .setDoc(User.Fields.ENABLED.getPreferredName(), enabled) + .setRefreshPolicy(refreshPolicy) + .execute(new ActionListener() { + @Override + public void onResponse(UpdateResponse updateResponse) { + assert updateResponse.getResult() == Result.UPDATED; + clearRealmCache(username, listener, null); + } + + @Override + public void onFailure(Exception e) { + Exception failure = e; + if (isIndexNotFoundOrDocumentMissing(e)) { + // if the index doesn't exist we can never update a user + // if the document doesn't exist, then this update is not valid + logger.debug((Supplier) () -> + new ParameterizedMessage("failed to {} user [{}]", enabled ? "enable" : "disable", username), e); + ValidationException validationException = new ValidationException(); + validationException.addValidationError("only existing users can be " + (enabled ? "enabled" : "disabled")); + failure = validationException; + } + listener.onFailure(failure); + } + }); + } catch (Exception e) { + listener.onFailure(e); + } + } + + private void setReservedUserEnabled(final String username, final boolean enabled, final RefreshPolicy refreshPolicy, + final ActionListener listener) { + try { + client.prepareUpdate(SecurityTemplateService.SECURITY_INDEX_NAME, RESERVED_USER_DOC_TYPE, username) + .setDoc(User.Fields.ENABLED.getPreferredName(), enabled) + .setUpsert(User.Fields.PASSWORD.getPreferredName(), String.valueOf(ReservedRealm.DEFAULT_PASSWORD_HASH), + User.Fields.ENABLED.getPreferredName(), enabled) + .setRefreshPolicy(refreshPolicy) + .execute(new ActionListener() { + @Override + public void onResponse(UpdateResponse updateResponse) { + assert updateResponse.getResult() == Result.UPDATED || updateResponse.getResult() == Result.CREATED; + clearRealmCache(username, listener, null); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } catch (Exception e) { + listener.onFailure(e); + } + } + public void deleteUser(final DeleteUserRequest deleteUserRequest, final ActionListener listener) { if (state() != State.STARTED) { listener.onFailure(new IllegalStateException("user cannot be deleted as native user service has not been started")); @@ -481,7 +541,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL DeleteRequest request = client.prepareDelete(SecurityTemplateService.SECURITY_INDEX_NAME, USER_DOC_TYPE, deleteUserRequest.username()).request(); request.indicesOptions().ignoreUnavailable(); - request.setRefreshPolicy(deleteUserRequest.refresh() ? RefreshPolicy.IMMEDIATE : RefreshPolicy.WAIT_UNTIL); + request.setRefreshPolicy(deleteUserRequest.getRefreshPolicy()); client.delete(request, new ActionListener() { @Override public void onResponse(DeleteResponse deleteResponse) { @@ -537,15 +597,6 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL if (state.compareAndSet(State.INITIALIZED, State.STARTING)) { this.scrollSize = SCROLL_SIZE_SETTING.get(settings); this.scrollKeepAlive = SCROLL_KEEP_ALIVE_SETTING.get(settings); - - UserStorePoller poller = new UserStorePoller(); - try { - poller.doRun(); - } catch (Exception e) { - logger.warn("failed to do initial poll of users", e); - } - TimeValue interval = settings.getAsTime("shield.authc.native.reload.interval", TimeValue.timeValueSeconds(30L)); - pollerCancellable = threadPool.scheduleWithFixedDelay(poller, interval, Names.GENERIC); state.set(State.STARTED); } } catch (Exception e) { @@ -556,14 +607,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL public void stop() { if (state.compareAndSet(State.STARTED, State.STOPPING)) { - try { - pollerCancellable.cancel(); - } catch (Exception e) { - state.set(State.FAILED); - throw e; - } finally { - state.set(State.STOPPED); - } + state.set(State.STOPPED); } } @@ -574,7 +618,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL * @param password the plaintext password to verify * @return {@link} User object if successful or {@code null} if verification fails */ - public User verifyPassword(String username, final SecuredString password) { + User verifyPassword(String username, final SecuredString password) { if (state() != State.STARTED) { logger.trace("attempted to verify user credentials for [{}] but service was not started", username); return null; @@ -590,11 +634,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL return null; } - public void addListener(ChangeListener listener) { - listeners.add(listener); - } - - boolean started() { + public boolean started() { return state() == State.STARTED; } @@ -602,9 +642,9 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL return securityIndexExists; } - char[] reservedUserPassword(String username) throws Exception { + ReservedUserInfo getReservedUserInfo(String username) throws Exception { assert started(); - final AtomicReference passwordHash = new AtomicReference<>(); + final AtomicReference userInfoRef = new AtomicReference<>(); final AtomicReference failure = new AtomicReference<>(); final CountDownLatch latch = new CountDownLatch(1); client.prepareGet(SecurityTemplateService.SECURITY_INDEX_NAME, RESERVED_USER_DOC_TYPE, username) @@ -614,26 +654,26 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL if (getResponse.isExists()) { Map sourceMap = getResponse.getSourceAsMap(); String password = (String) sourceMap.get(User.Fields.PASSWORD.getPreferredName()); + Boolean enabled = (Boolean) sourceMap.get(Fields.ENABLED.getPreferredName()); if (password == null || password.isEmpty()) { failure.set(new IllegalStateException("password hash must not be empty!")); - return; + } else if (enabled == null) { + failure.set(new IllegalStateException("enabled must not be null!")); + } else { + userInfoRef.set(new ReservedUserInfo(password.toCharArray(), enabled)); } - passwordHash.set(password.toCharArray()); } } @Override public void onFailure(Exception e) { if (e instanceof IndexNotFoundException) { - logger.trace( - (Supplier) () -> new ParameterizedMessage( - "could not retrieve built in user [{}] password since security index does not exist", - username), - e); + logger.trace((Supplier) () -> new ParameterizedMessage( + "could not retrieve built in user [{}] info since security index does not exist", username), e); } else { logger.error( (Supplier) () -> new ParameterizedMessage( - "failed to retrieve built in user [{}] password", username), e); + "failed to retrieve built in user [{}] info", username), e); failure.set(e); } } @@ -653,7 +693,65 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL // if there is any sort of failure we need to throw an exception to prevent the fallback to the default password... throw failureCause; } - return passwordHash.get(); + return userInfoRef.get(); + } + + Map getAllReservedUserInfo() throws Exception { + assert started(); + final Map userInfos = new HashMap<>(); + final AtomicReference failure = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + client.prepareSearch(SecurityTemplateService.SECURITY_INDEX_NAME) + .setTypes(RESERVED_USER_DOC_TYPE) + .setQuery(QueryBuilders.matchAllQuery()) + .setFetchSource(true) + .execute(new LatchedActionListener<>(new ActionListener() { + @Override + public void onResponse(SearchResponse searchResponse) { + assert searchResponse.getHits().getTotalHits() <= 10 : "there are more than 10 reserved users we need to change " + + "this to retrieve them all!"; + for (SearchHit searchHit : searchResponse.getHits().getHits()) { + Map sourceMap = searchHit.getSource(); + String password = (String) sourceMap.get(User.Fields.PASSWORD.getPreferredName()); + Boolean enabled = (Boolean) sourceMap.get(Fields.ENABLED.getPreferredName()); + if (password == null || password.isEmpty()) { + failure.set(new IllegalStateException("password hash must not be empty!")); + break; + } else if (enabled == null) { + failure.set(new IllegalStateException("enabled must not be null!")); + break; + } else { + userInfos.put(searchHit.getId(), new ReservedUserInfo(password.toCharArray(), enabled)); + } + } + } + + @Override + public void onFailure(Exception e) { + if (e instanceof IndexNotFoundException) { + logger.trace("could not retrieve built in users since security index does not exist", e); + } else { + logger.error("failed to retrieve built in users", e); + failure.set(e); + } + } + }, latch)); + + try { + final boolean responseReceived = latch.await(30, TimeUnit.SECONDS); + if (responseReceived == false) { + failure.set(new TimeoutException("timed out trying to get built in users")); + } + } catch (InterruptedException e) { + failure.set(e); + } + + Exception failureCause = failure.get(); + if (failureCause != null) { + // if there is any sort of failure we need to throw an exception to prevent the fallback to the default password... + throw failureCause; + } + return userInfos; } private void clearScrollResponse(String scrollId) { @@ -716,7 +814,6 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL if (state != State.STOPPED && state != State.FAILED) { throw new IllegalStateException("can only reset if stopped!!!"); } - this.listeners.clear(); this.securityIndexExists = false; this.state.set(State.INITIALIZED); } @@ -731,158 +828,42 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL String[] roles = ((List) sourceMap.get(User.Fields.ROLES.getPreferredName())).toArray(Strings.EMPTY_ARRAY); String fullName = (String) sourceMap.get(User.Fields.FULL_NAME.getPreferredName()); String email = (String) sourceMap.get(User.Fields.EMAIL.getPreferredName()); + Boolean enabled = (Boolean) sourceMap.get(User.Fields.ENABLED.getPreferredName()); + if (enabled == null) { + // fallback mechanism as a user from 2.x may not have the enabled field + enabled = Boolean.TRUE; + } Map metadata = (Map) sourceMap.get(User.Fields.METADATA.getPreferredName()); - return new UserAndPassword(new User(username, roles, fullName, email, metadata), password.toCharArray()); + return new UserAndPassword(new User(username, roles, fullName, email, metadata, enabled), password.toCharArray()); } catch (Exception e) { logger.error((Supplier) () -> new ParameterizedMessage("error in the format of data for user [{}]", username), e); return null; } } - private class UserStorePoller extends AbstractRunnable { - - // this map contains the mapping for username -> version, which is used when polling the index to easily detect of - // any changes that may have been missed since the last update. - private final ObjectLongHashMap userVersionMap = new ObjectLongHashMap<>(); - private final ObjectLongHashMap reservedUserVersionMap = new ObjectLongHashMap<>(); - - @Override - public void doRun() { - // hold a reference to the client since the poller may run after the class is stopped (we don't interrupt it running) and - // we reset when we test which sets the client to null... - final Client client = NativeUsersStore.this.client; - if (isStopped()) { - return; + private static boolean isIndexNotFoundOrDocumentMissing(Exception e) { + if (e instanceof ElasticsearchException) { + Throwable cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof IndexNotFoundException || cause instanceof DocumentMissingException) { + return true; } - if (securityIndexExists == false) { - logger.trace("cannot poll for user changes since security index [{}] does not exist", SecurityTemplateService - .SECURITY_INDEX_NAME); - return; - } - - logger.trace("starting polling of user index to check for changes"); - List changedUsers = scrollForModifiedUsers(client, USER_DOC_TYPE, userVersionMap); - if (isStopped()) { - return; - } - - changedUsers.addAll(scrollForModifiedUsers(client, RESERVED_USER_DOC_TYPE, reservedUserVersionMap)); - if (isStopped()) { - return; - } - - notifyListeners(changedUsers); - logger.trace("finished polling of user index"); - } - - private List scrollForModifiedUsers(Client client, String docType, ObjectLongMap usersMap) { - // create a copy of all known users - ObjectHashSet knownUsers = new ObjectHashSet<>(usersMap.keys()); - List changedUsers = new ArrayList<>(); - - SearchResponse response = null; - try { - client.admin().indices().prepareRefresh(SecurityTemplateService.SECURITY_INDEX_NAME).get(); - response = client.prepareSearch(SecurityTemplateService.SECURITY_INDEX_NAME) - .setScroll(scrollKeepAlive) - .setQuery(QueryBuilders.typeQuery(docType)) - .setSize(scrollSize) - .setVersion(true) - .setFetchSource(false) // we only need id and version - .get(); - - boolean keepScrolling = response.getHits().getHits().length > 0; - while (keepScrolling) { - for (SearchHit hit : response.getHits().getHits()) { - final String username = hit.id(); - final long version = hit.version(); - if (knownUsers.contains(username)) { - final long lastKnownVersion = usersMap.get(username); - if (version != lastKnownVersion) { - // version is only changed by this method - assert version > lastKnownVersion; - usersMap.put(username, version); - // there is a chance that the user's cache has already been cleared and we'll clear it again but - // this should be ok in most cases as user changes should not be that frequent - changedUsers.add(username); - } - knownUsers.remove(username); - } else { - usersMap.put(username, version); - } - } - - if (isStopped()) { - // bail here - return Collections.emptyList(); - } - response = client.prepareSearchScroll(response.getScrollId()).setScroll(scrollKeepAlive).get(); - keepScrolling = response.getHits().getHits().length > 0; - } - } catch (IndexNotFoundException e) { - logger.trace("security index does not exist", e); - } finally { - if (response != null && response.getScrollId() != null) { - ClearScrollRequest clearScrollRequest = client.prepareClearScroll().addScrollId(response.getScrollId()).request(); - client.clearScroll(clearScrollRequest).actionGet(); - } - } - - // we now have a list of users that were in our version map and have been deleted - Iterator> userIter = knownUsers.iterator(); - while (userIter.hasNext()) { - String user = userIter.next().value; - usersMap.remove(user); - changedUsers.add(user); - } - - return changedUsers; - } - - private void notifyListeners(List changedUsers) { - if (changedUsers.isEmpty()) { - return; - } - - // make the list unmodifiable to prevent modifications by any listeners - changedUsers = Collections.unmodifiableList(changedUsers); - if (logger.isDebugEnabled()) { - logger.debug("changes detected for users [{}]", changedUsers); - } - - // call listeners - RuntimeException ex = null; - for (ChangeListener listener : listeners) { - try { - listener.onUsersChanged(changedUsers); - } catch (Exception e) { - if (ex == null) ex = new RuntimeException("exception while notifying listeners"); - ex.addSuppressed(e); - } - } - - if (ex != null) throw ex; - } - - @Override - public void onFailure(Exception e) { - logger.error("error occurred while checking the native users for changes", e); - } - - private boolean isStopped() { - State state = state(); - return state == State.STOPPED || state == State.STOPPING; } + return false; } - interface ChangeListener { + static class ReservedUserInfo { - void onUsersChanged(List username); + final char[] passwordHash; + final boolean enabled; + + ReservedUserInfo(char[] passwordHash, boolean enabled) { + this.passwordHash = passwordHash; + this.enabled = enabled; + } } public static void addSettings(List> settings) { settings.add(SCROLL_SIZE_SETTING); settings.add(SCROLL_KEEP_ALIVE_SETTING); - settings.add(POLL_INTERVAL_SETTING); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index 1fbe87bdf8a..acb93901917 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -10,7 +10,7 @@ import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.security.authc.RealmConfig; -import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ChangeListener; +import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ReservedUserInfo; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; import org.elasticsearch.xpack.security.authc.support.Hasher; import org.elasticsearch.xpack.security.authc.support.SecuredString; @@ -21,9 +21,12 @@ import org.elasticsearch.xpack.security.user.ElasticUser; import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.xpack.security.user.User; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; +import java.util.Map; /** * A realm for predefined users. These users can only be modified in terms of changing their passwords; no other modifications are allowed. @@ -32,40 +35,35 @@ import java.util.List; public class ReservedRealm extends CachingUsernamePasswordRealm { public static final String TYPE = "reserved"; - private static final char[] DEFAULT_PASSWORD_HASH = Hasher.BCRYPT.hash(new SecuredString("changeme".toCharArray())); + static final char[] DEFAULT_PASSWORD_HASH = Hasher.BCRYPT.hash(new SecuredString("changeme".toCharArray())); + private static final ReservedUserInfo DEFAULT_USER_INFO = new ReservedUserInfo(DEFAULT_PASSWORD_HASH, true); private final NativeUsersStore nativeUsersStore; + private final AnonymousUser anonymousUser; + private final boolean anonymousEnabled; - public ReservedRealm(Environment env, Settings settings, NativeUsersStore nativeUsersStore) { + public ReservedRealm(Environment env, Settings settings, NativeUsersStore nativeUsersStore, AnonymousUser anonymousUser) { super(TYPE, new RealmConfig(TYPE, Settings.EMPTY, settings, env)); this.nativeUsersStore = nativeUsersStore; - nativeUsersStore.addListener(new ChangeListener() { - @Override - public void onUsersChanged(List changedUsers) { - changedUsers.stream() - .filter(ReservedRealm::isReserved) - .forEach(ReservedRealm.this::expire); - } - }); - + this.anonymousUser = anonymousUser; + this.anonymousEnabled = AnonymousUser.isAnonymousEnabled(settings); } @Override protected User doAuthenticate(UsernamePasswordToken token) { - final User user = getUser(token.principal()); - if (user == null) { + if (isReserved(token.principal(), config.globalSettings()) == false) { return null; } - final char[] passwordHash = getPasswordHash(user.principal()); - if (passwordHash != null) { + final ReservedUserInfo userInfo = getUserInfo(token.principal()); + if (userInfo != null) { try { - if (Hasher.BCRYPT.verify(token.credentials(), passwordHash)) { - return user; + if (Hasher.BCRYPT.verify(token.credentials(), userInfo.passwordHash)) { + return getUser(token.principal(), userInfo); } } finally { - if (passwordHash != DEFAULT_PASSWORD_HASH) { - Arrays.fill(passwordHash, (char) 0); + if (userInfo.passwordHash != DEFAULT_PASSWORD_HASH) { + Arrays.fill(userInfo.passwordHash, (char) 0); } } } @@ -75,7 +73,20 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { @Override protected User doLookupUser(String username) { - return getUser(username); + if (isReserved(username, config.globalSettings()) == false) { + return null; + } + + if (AnonymousUser.isAnonymousUsername(username, config.globalSettings())) { + return anonymousEnabled ? anonymousUser : null; + } + + final ReservedUserInfo userInfo = getUserInfo(username); + if (userInfo != null) { + return getUser(username, userInfo); + } + // this was a reserved username - don't allow this to go to another realm... + throw Exceptions.authenticationError("failed to lookup user [{}]", username); } @Override @@ -83,54 +94,71 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { return true; } - public static boolean isReserved(String username) { + public static boolean isReserved(String username, Settings settings) { assert username != null; switch (username) { case ElasticUser.NAME: case KibanaUser.NAME: return true; default: - return AnonymousUser.isAnonymousUsername(username); + return AnonymousUser.isAnonymousUsername(username, settings); } } - public static User getUser(String username) { + User getUser(String username, ReservedUserInfo userInfo) { assert username != null; switch (username) { case ElasticUser.NAME: - return ElasticUser.INSTANCE; + return new ElasticUser(userInfo.enabled); case KibanaUser.NAME: - return KibanaUser.INSTANCE; + return new KibanaUser(userInfo.enabled); default: - if (AnonymousUser.enabled() && AnonymousUser.isAnonymousUsername(username)) { - return AnonymousUser.INSTANCE; + if (anonymousEnabled && anonymousUser.principal().equals(username)) { + return anonymousUser; } return null; } } - public static Collection users() { - if (AnonymousUser.enabled()) { - return Arrays.asList(ElasticUser.INSTANCE, KibanaUser.INSTANCE, AnonymousUser.INSTANCE); + public Collection users() { + if (nativeUsersStore.started() == false) { + return anonymousEnabled ? Collections.singletonList(anonymousUser) : Collections.emptyList(); } - return Arrays.asList(ElasticUser.INSTANCE, KibanaUser.INSTANCE); + + List users = new ArrayList<>(3); + try { + Map reservedUserInfos = nativeUsersStore.getAllReservedUserInfo(); + ReservedUserInfo userInfo = reservedUserInfos.get(ElasticUser.NAME); + users.add(new ElasticUser(userInfo == null || userInfo.enabled)); + userInfo = reservedUserInfos.get(KibanaUser.NAME); + users.add(new KibanaUser(userInfo == null || userInfo.enabled)); + if (anonymousEnabled) { + users.add(anonymousUser); + } + } catch (Exception e) { + logger.error("failed to retrieve reserved users", e); + return anonymousEnabled ? Collections.singletonList(anonymousUser) : Collections.emptyList(); + } + + return users; } - private char[] getPasswordHash(final String username) { + private ReservedUserInfo getUserInfo(final String username) { if (nativeUsersStore.started() == false) { // we need to be able to check for the user store being started... return null; } if (nativeUsersStore.securityIndexExists() == false) { - return DEFAULT_PASSWORD_HASH; + return DEFAULT_USER_INFO; } + try { - char[] passwordHash = nativeUsersStore.reservedUserPassword(username); - if (passwordHash == null) { - return DEFAULT_PASSWORD_HASH; + ReservedUserInfo userInfo = nativeUsersStore.getReservedUserInfo(username); + if (userInfo == null) { + return DEFAULT_USER_INFO; } - return passwordHash; + return userInfo; } catch (Exception e) { logger.error( (Supplier) () -> new ParameterizedMessage("failed to retrieve password hash for reserved user [{}]", username), e); diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java index d64111657d5..9cff9389a84 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.inject.internal.Nullable; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.watcher.FileChangesListener; import org.elasticsearch.watcher.FileWatcher; @@ -43,7 +44,8 @@ public class FileUserPasswdStore { private final Logger logger; private final Path file; - final Hasher hasher = Hasher.BCRYPT; + private final Hasher hasher = Hasher.BCRYPT; + private final Settings settings; private volatile Map users; @@ -56,7 +58,8 @@ public class FileUserPasswdStore { FileUserPasswdStore(RealmConfig config, ResourceWatcherService watcherService, RefreshListener listener) { logger = config.logger(FileUserPasswdStore.class); file = resolveFile(config.env()); - users = parseFileLenient(file, logger); + settings = config.globalSettings(); + users = parseFileLenient(file, logger, settings); FileWatcher watcher = new FileWatcher(file.getParent()); watcher.addListener(new FileListener()); try { @@ -80,9 +83,6 @@ public class FileUserPasswdStore { } public boolean verifyPassword(String username, SecuredString password) { - if (users == null) { - return false; - } char[] hash = users.get(username); return hash != null && hasher.verify(password, hash); } @@ -99,9 +99,9 @@ public class FileUserPasswdStore { * Internally in this class, we try to load the file, but if for some reason we can't, we're being more lenient by * logging the error and skipping all users. This is aligned with how we handle other auto-loaded files in security. */ - static Map parseFileLenient(Path path, Logger logger) { + static Map parseFileLenient(Path path, Logger logger, Settings settings) { try { - return parseFile(path, logger); + return parseFile(path, logger, settings); } catch (Exception e) { logger.error( (Supplier) () -> new ParameterizedMessage( @@ -114,7 +114,7 @@ public class FileUserPasswdStore { * parses the users file. Should never return {@code null}, if the file doesn't exist an * empty map is returned */ - public static Map parseFile(Path path, @Nullable Logger logger) { + public static Map parseFile(Path path, @Nullable Logger logger, Settings settings) { if (logger == null) { logger = NoOpLogger.INSTANCE; } @@ -149,7 +149,7 @@ public class FileUserPasswdStore { continue; } String username = line.substring(0, i); - Validation.Error validationError = Users.validateUsername(username); + Validation.Error validationError = Users.validateUsername(username, false, settings); if (validationError != null) { logger.error("invalid username [{}] in users file [{}], skipping... ({})", username, path.toAbsolutePath(), validationError); @@ -194,7 +194,7 @@ public class FileUserPasswdStore { public void onFileChanged(Path file) { if (file.equals(FileUserPasswdStore.this.file)) { logger.info("users file [{}] changed. updating users... )", file.toAbsolutePath()); - users = parseFileLenient(file, logger); + users = parseFileLenient(file, logger, settings); notifyRefresh(); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java index 636dd93eb68..4848263577b 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java @@ -143,7 +143,7 @@ public class FileUserRolesStore { continue; } String role = line.substring(0, i).trim(); - Validation.Error validationError = Validation.Roles.validateRoleName(role); + Validation.Error validationError = Validation.Roles.validateRoleName(role, true); if (validationError != null) { logger.error("invalid role entry in users_roles file [{}], line [{}] - {}. skipping...", path.toAbsolutePath(), lineNr, validationError); diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/tool/UsersTool.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/tool/UsersTool.java index a6775c5624b..496a2767149 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/tool/UsersTool.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/file/tool/UsersTool.java @@ -84,21 +84,21 @@ public class UsersTool extends MultiCommand { @Override protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { - String username = parseUsername(arguments.values(options)); - Validation.Error validationError = Users.validateUsername(username); + Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); + String username = parseUsername(arguments.values(options), env.settings()); + Validation.Error validationError = Users.validateUsername(username, false, Settings.EMPTY); if (validationError != null) { throw new UserException(ExitCodes.DATA_ERROR, "Invalid username [" + username + "]... " + validationError); } char[] password = parsePassword(terminal, passwordOption.value(options)); - Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); String[] roles = parseRoles(terminal, env, rolesOption.value(options)); Path passwordFile = FileUserPasswdStore.resolveFile(env); Path rolesFile = FileUserRolesStore.resolveFile(env); FileAttributesChecker attributesChecker = new FileAttributesChecker(passwordFile, rolesFile); - Map users = new HashMap<>(FileUserPasswdStore.parseFile(passwordFile, null)); + Map users = new HashMap<>(FileUserPasswdStore.parseFile(passwordFile, null, env.settings())); if (users.containsKey(username)) { throw new UserException(ExitCodes.CODE_ERROR, "User [" + username + "] already exists"); } @@ -138,13 +138,13 @@ public class UsersTool extends MultiCommand { @Override protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { - String username = parseUsername(arguments.values(options)); Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); + String username = parseUsername(arguments.values(options), env.settings()); Path passwordFile = FileUserPasswdStore.resolveFile(env); Path rolesFile = FileUserRolesStore.resolveFile(env); FileAttributesChecker attributesChecker = new FileAttributesChecker(passwordFile, rolesFile); - Map users = new HashMap<>(FileUserPasswdStore.parseFile(passwordFile, null)); + Map users = new HashMap<>(FileUserPasswdStore.parseFile(passwordFile, null, env.settings())); if (users.containsKey(username) == false) { throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist"); } @@ -193,13 +193,13 @@ public class UsersTool extends MultiCommand { @Override protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { - String username = parseUsername(arguments.values(options)); + Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); + String username = parseUsername(arguments.values(options), env.settings()); char[] password = parsePassword(terminal, passwordOption.value(options)); - Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); Path file = FileUserPasswdStore.resolveFile(env); FileAttributesChecker attributesChecker = new FileAttributesChecker(file); - Map users = new HashMap<>(FileUserPasswdStore.parseFile(file, null)); + Map users = new HashMap<>(FileUserPasswdStore.parseFile(file, null, env.settings())); if (users.containsKey(username) == false) { throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist"); } @@ -237,8 +237,8 @@ public class UsersTool extends MultiCommand { @Override protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { - String username = parseUsername(arguments.values(options)); Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); + String username = parseUsername(arguments.values(options), env.settings()); String[] addRoles = parseRoles(terminal, env, addOption.value(options)); String[] removeRoles = parseRoles(terminal, env, removeOption.value(options)); @@ -254,7 +254,7 @@ public class UsersTool extends MultiCommand { Path rolesFile = FileUserRolesStore.resolveFile(env); FileAttributesChecker attributesChecker = new FileAttributesChecker(usersFile, rolesFile); - Map usersMap = FileUserPasswdStore.parseFile(usersFile, null); + Map usersMap = FileUserPasswdStore.parseFile(usersFile, null, env.settings()); if (!usersMap.containsKey(username)) { throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist"); } @@ -312,7 +312,7 @@ public class UsersTool extends MultiCommand { Map userRoles = FileUserRolesStore.parseFile(userRolesFilePath, null); Path userFilePath = FileUserPasswdStore.resolveFile(env); - Set users = FileUserPasswdStore.parseFile(userFilePath, null).keySet(); + Set users = FileUserPasswdStore.parseFile(userFilePath, null, env.settings()).keySet(); Path rolesFilePath = FileRolesStore.resolveFile(env); Set knownRoles = Sets.union(FileRolesStore.parseFileForRoleNames(rolesFilePath, null), ReservedRolesStore.names()); @@ -388,14 +388,14 @@ public class UsersTool extends MultiCommand { } // pkg private for testing - static String parseUsername(List args) throws UserException { + static String parseUsername(List args, Settings settings) throws UserException { if (args.isEmpty()) { throw new UserException(ExitCodes.USAGE, "Missing username argument"); } else if (args.size() > 1) { throw new UserException(ExitCodes.USAGE, "Expected a single username argument, found extra: " + args.toString()); } String username = args.get(0); - Validation.Error validationError = Users.validateUsername(username); + Validation.Error validationError = Users.validateUsername(username, false, settings); if (validationError != null) { throw new UserException(ExitCodes.DATA_ERROR, "Invalid username [" + username + "]... " + validationError); } @@ -446,7 +446,7 @@ public class UsersTool extends MultiCommand { } String[] roles = rolesStr.split(","); for (String role : roles) { - Validation.Error validationError = Validation.Roles.validateRoleName(role); + Validation.Error validationError = Validation.Roles.validateRoleName(role, true); if (validationError != null) { throw new UserException(ExitCodes.DATA_ERROR, "Invalid role [" + role + "]... " + validationError); } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java index 6926f7c2bbd..142276af267 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.cache.CacheLoader; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.security.authc.AuthenticationToken; import org.elasticsearch.xpack.security.authc.RealmConfig; -import org.elasticsearch.xpack.security.support.Exceptions; import org.elasticsearch.xpack.security.user.User; import java.util.Map; @@ -149,11 +148,11 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm CacheLoader callback = key -> { if (logger.isDebugEnabled()) { - logger.debug("user not found in cache, proceeding with normal lookup"); + logger.debug("user [{}] not found in cache, proceeding with normal lookup", username); } User user = doLookupUser(username); if (user == null) { - throw Exceptions.authenticationError("could not lookup [{}]", username); + return null; } return new UserWithHash(user, null, null); }; @@ -162,10 +161,15 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm UserWithHash userWithHash = cache.computeIfAbsent(username, callback); return userWithHash.user; } catch (ExecutionException ee) { + if (ee.getCause() instanceof ElasticsearchSecurityException) { + // this should bubble out + throw (ElasticsearchSecurityException) ee.getCause(); + } + if (logger.isTraceEnabled()) { logger.trace((Supplier) () -> new ParameterizedMessage("realm [{}] could not lookup [{}]", name(), username), ee); } else if (logger.isDebugEnabled()) { - logger.debug("realm [{}] could not authenticate [{}]", name(), username); + logger.debug("realm [{}] could not lookup [{}]", name(), username); } return null; } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index ad2c9b1fc06..21c4ea1e1fd 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.SearchScrollAction; +import org.elasticsearch.action.support.replication.TransportReplicationAction.ConcreteShardRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -77,11 +78,13 @@ public class AuthorizationService extends AbstractComponent { private final IndicesAndAliasesResolver[] indicesAndAliasesResolvers; private final AuthenticationFailureHandler authcFailureHandler; private final ThreadContext threadContext; + private final AnonymousUser anonymousUser; + private final boolean isAnonymousEnabled; private final boolean anonymousAuthzExceptionEnabled; public AuthorizationService(Settings settings, CompositeRolesStore rolesStore, ClusterService clusterService, AuditTrailService auditTrail, AuthenticationFailureHandler authcFailureHandler, - ThreadPool threadPool) { + ThreadPool threadPool, AnonymousUser anonymousUser) { super(settings); this.rolesStore = rolesStore; this.clusterService = clusterService; @@ -91,6 +94,8 @@ public class AuthorizationService extends AbstractComponent { }; this.authcFailureHandler = authcFailureHandler; this.threadContext = threadPool.getThreadContext(); + this.anonymousUser = anonymousUser; + this.isAnonymousEnabled = AnonymousUser.isAnonymousEnabled(settings); this.anonymousAuthzExceptionEnabled = ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING.get(settings); } @@ -101,7 +106,7 @@ public class AuthorizationService extends AbstractComponent { * @param action The action */ public List authorizedIndicesAndAliases(User user, String action) { - final String[] anonymousRoles = AnonymousUser.enabled() ? AnonymousUser.getRoles() : Strings.EMPTY_ARRAY; + final String[] anonymousRoles = isAnonymousEnabled ? anonymousUser.roles() : Strings.EMPTY_ARRAY; String[] rolesNames = user.roles(); if (rolesNames.length == 0 && anonymousRoles.length == 0) { return Collections.emptyList(); @@ -114,7 +119,7 @@ public class AuthorizationService extends AbstractComponent { predicates.add(role.indices().allowedIndicesMatcher(action)); } } - if (AnonymousUser.is(user) == false) { + if (anonymousUser.equals(user) == false) { for (String roleName : anonymousRoles) { Role role = rolesStore.role(roleName); if (role != null) { @@ -155,6 +160,10 @@ public class AuthorizationService extends AbstractComponent { * @throws ElasticsearchSecurityException If the given user is no allowed to execute the given request */ public void authorize(Authentication authentication, String action, TransportRequest request) throws ElasticsearchSecurityException { + final TransportRequest originalRequest = request; + if (request instanceof ConcreteShardRequest) { + request = ((ConcreteShardRequest) request).getRequest(); + } // prior to doing any authorization lets set the originating action in the context only setOriginatingAction(action); @@ -280,7 +289,7 @@ public class AuthorizationService extends AbstractComponent { } } - grant(authentication, action, request); + grant(authentication, action, originalRequest); } private void setIndicesAccessControl(IndicesAccessControl accessControl) { @@ -360,7 +369,7 @@ public class AuthorizationService extends AbstractComponent { private ElasticsearchSecurityException denialException(Authentication authentication, String action) { final User user = authentication.getUser(); // Special case for anonymous user - if (AnonymousUser.enabled() && AnonymousUser.is(user)) { + if (isAnonymousEnabled && anonymousUser.equals(user)) { if (anonymousAuthzExceptionEnabled == false) { throw authcFailureHandler.authenticationRequired(action, threadContext); } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java index 06aa4edf4a2..a37ae85ee55 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java @@ -168,7 +168,7 @@ public class RoleDescriptor implements ToXContent { public static RoleDescriptor parse(String name, XContentParser parser) throws IOException { // validate name - Validation.Error validationError = Validation.Roles.validateRoleName(name); + Validation.Error validationError = Validation.Roles.validateRoleName(name, true); if (validationError != null) { ValidationException ve = new ValidationException(); ve.addValidationError(validationError.toString()); diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 3e11e946663..49e9c64e314 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.search.MultiSearchResponse.Item; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -74,7 +73,7 @@ import static org.elasticsearch.xpack.security.Security.setting; import static org.elasticsearch.xpack.security.SecurityTemplateService.securityIndexMappingAndTemplateUpToDate; /** - * ESNativeRolesStore is a {@code RolesStore} that, instead of reading from a + * NativeRolesStore is a {@code RolesStore} that, instead of reading from a * file, reads from an Elasticsearch index instead. Unlike the file-based roles * store, ESNativeRolesStore can be used to add a role to the store by inserting * the document into the administrative index. @@ -264,7 +263,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C try { DeleteRequest request = client.prepareDelete(SecurityTemplateService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, deleteRoleRequest.name()).request(); - request.setRefreshPolicy(deleteRoleRequest.refresh() ? RefreshPolicy.IMMEDIATE : RefreshPolicy.WAIT_UNTIL); + request.setRefreshPolicy(deleteRoleRequest.getRefreshPolicy()); client.delete(request, new ActionListener() { @Override public void onResponse(DeleteResponse deleteResponse) { diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java index d6ed72968a7..d1a56cdbfc7 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java @@ -24,12 +24,14 @@ import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; import org.elasticsearch.xpack.security.authz.permission.TransportClientRole; import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.xpack.security.user.SystemUser; +import org.elasticsearch.xpack.security.user.User; /** * */ public class ReservedRolesStore implements RolesStore { + private static final User DEFAULT_ENABLED_KIBANA_USER = new KibanaUser(true); private final SecurityContext securityContext; public ReservedRolesStore(SecurityContext securityContext) { @@ -54,8 +56,9 @@ public class ReservedRolesStore implements RolesStore { case KibanaRole.NAME: // The only user that should know about this role is the kibana user itself (who has this role). The reason we want to hide // this role is that it was created specifically for kibana, with all the permissions that the kibana user needs. - // We don't want it to be assigned to other users. - if (KibanaUser.is(securityContext.getUser())) { + // We don't want it to be assigned to other users. The Kibana user here must always be enabled if it is in the + // security context + if (DEFAULT_ENABLED_KIBANA_USER.equals(securityContext.getUser())) { return KibanaRole.INSTANCE; } return null; @@ -87,7 +90,7 @@ public class ReservedRolesStore implements RolesStore { // The only user that should know about this role is the kibana user itself (who has this role). The reason we want to hide // this role is that it was created specifically for kibana, with all the permissions that the kibana user needs. // We don't want it to be assigned to other users. - if (KibanaUser.is(securityContext.getUser())) { + if (DEFAULT_ENABLED_KIBANA_USER.equals(securityContext.getUser())) { return KibanaRole.DESCRIPTOR; } return null; @@ -97,7 +100,7 @@ public class ReservedRolesStore implements RolesStore { } public Collection roleDescriptors() { - if (KibanaUser.is(securityContext.getUser())) { + if (DEFAULT_ENABLED_KIBANA_USER.equals(securityContext.getUser())) { return Arrays.asList(SuperuserRole.DESCRIPTOR, TransportClientRole.DESCRIPTOR, KibanaUserRole.DESCRIPTOR, KibanaRole.DESCRIPTOR, MonitoringUserRole.DESCRIPTOR, RemoteMonitoringAgentRole.DESCRIPTOR, IngestAdminRole.DESCRIPTOR); diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/client/SecurityClient.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/client/SecurityClient.java index 9463e89f417..e20ff763f3f 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/client/SecurityClient.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/client/SecurityClient.java @@ -45,6 +45,10 @@ import org.elasticsearch.xpack.security.action.user.PutUserAction; import org.elasticsearch.xpack.security.action.user.PutUserRequest; import org.elasticsearch.xpack.security.action.user.PutUserRequestBuilder; import org.elasticsearch.xpack.security.action.user.PutUserResponse; +import org.elasticsearch.xpack.security.action.user.SetEnabledAction; +import org.elasticsearch.xpack.security.action.user.SetEnabledRequest; +import org.elasticsearch.xpack.security.action.user.SetEnabledRequestBuilder; +import org.elasticsearch.xpack.security.action.user.SetEnabledResponse; import java.io.IOException; @@ -163,6 +167,14 @@ public class SecurityClient { client.execute(ChangePasswordAction.INSTANCE, request, listener); } + public SetEnabledRequestBuilder prepareSetEnabled(String username, boolean enabled) { + return new SetEnabledRequestBuilder(client).username(username).enabled(enabled); + } + + public void setEnabled(SetEnabledRequest request, ActionListener listener) { + client.execute(SetEnabledAction.INSTANCE, request, listener); + } + /** Role Management */ public GetRolesRequestBuilder prepareGetRoles(String... names) { diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java index fac5f081ff0..fa1a8108572 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; -import org.elasticsearch.xpack.security.action.role.DeleteRoleRequestBuilder; import org.elasticsearch.xpack.security.action.role.DeleteRoleResponse; import org.elasticsearch.xpack.security.client.SecurityClient; @@ -42,18 +41,16 @@ public class RestDeleteRoleAction extends BaseRestHandler { @Override public void handleRequest(RestRequest request, final RestChannel channel, NodeClient client) throws Exception { - DeleteRoleRequestBuilder requestBuilder = new SecurityClient(client).prepareDeleteRole(request.param("name")); - if (request.hasParam("refresh")) { - requestBuilder.refresh(request.paramAsBoolean("refresh", true)); - } - requestBuilder.execute(new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(DeleteRoleResponse response, XContentBuilder builder) throws Exception { - return new BytesRestResponse(response.found() ? RestStatus.OK : RestStatus.NOT_FOUND, - builder.startObject() - .field("found", response.found()) - .endObject()); - } - }); + new SecurityClient(client).prepareDeleteRole(request.param("name")) + .setRefreshPolicy(request.param("refresh")) + .execute(new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(DeleteRoleResponse response, XContentBuilder builder) throws Exception { + return new BytesRestResponse(response.found() ? RestStatus.OK : RestStatus.NOT_FOUND, + builder.startObject() + .field("found", response.found()) + .endObject()); + } + }); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java index 844f98d8497..7b04fc6f011 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java @@ -46,7 +46,7 @@ public class RestChangePasswordAction extends BaseRestHandler { final User user = securityContext.getUser(); String username = request.param("username"); if (username == null) { - username = user.runAs() == null ? user.principal() : user.runAs().principal();; + username = user.runAs() == null ? user.principal() : user.runAs().principal(); } new SecurityClient(client).prepareChangePassword(username, request.content()) diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java index cf900911559..4198aecebfc 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestDeleteUserAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; -import org.elasticsearch.xpack.security.action.user.DeleteUserRequestBuilder; import org.elasticsearch.xpack.security.action.user.DeleteUserResponse; import org.elasticsearch.xpack.security.client.SecurityClient; @@ -42,20 +41,16 @@ public class RestDeleteUserAction extends BaseRestHandler { @Override public void handleRequest(RestRequest request, final RestChannel channel, NodeClient client) throws Exception { - String username = request.param("username"); - - DeleteUserRequestBuilder requestBuilder = new SecurityClient(client).prepareDeleteUser(username); - if (request.hasParam("refresh")) { - requestBuilder.refresh(request.paramAsBoolean("refresh", true)); - } - requestBuilder.execute(new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(DeleteUserResponse response, XContentBuilder builder) throws Exception { - return new BytesRestResponse(response.found() ? RestStatus.OK : RestStatus.NOT_FOUND, - builder.startObject() - .field("found", response.found()) - .endObject()); - } - }); + new SecurityClient(client).prepareDeleteUser(request.param("username")) + .setRefreshPolicy(request.param("refresh")) + .execute(new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(DeleteUserResponse response, XContentBuilder builder) throws Exception { + return new BytesRestResponse(response.found() ? RestStatus.OK : RestStatus.NOT_FOUND, + builder.startObject() + .field("found", response.found()) + .endObject()); + } + }); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java index cb6bee30f65..6a9dc220a0e 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestPutUserAction.java @@ -49,9 +49,7 @@ public class RestPutUserAction extends BaseRestHandler { @Override public void handleRequest(RestRequest request, final RestChannel channel, NodeClient client) throws Exception { PutUserRequestBuilder requestBuilder = new SecurityClient(client).preparePutUser(request.param("username"), request.content()); - if (request.hasParam("refresh")) { - requestBuilder.setRefreshPolicy(request.param("refresh")); - } + requestBuilder.setRefreshPolicy(request.param("refresh")); requestBuilder.execute(new RestBuilderListener(channel) { @Override public RestResponse buildResponse(PutUserResponse putUserResponse, XContentBuilder builder) throws Exception { diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java new file mode 100644 index 00000000000..e7e8cc96628 --- /dev/null +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.rest.action.user; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.xpack.security.action.user.SetEnabledResponse; +import org.elasticsearch.xpack.security.client.SecurityClient; + +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +/** + * REST handler for enabling and disabling users. The username is required and we use the path to determine if the user is being + * enabled or disabled. + */ +public class RestSetEnabledAction extends BaseRestHandler { + + @Inject + public RestSetEnabledAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(POST, "/_xpack/security/user/{username}/_enable", this); + controller.registerHandler(PUT, "/_xpack/security/user/{username}/_enable", this); + controller.registerHandler(POST, "/_xpack/security/user/{username}/_disable", this); + controller.registerHandler(PUT, "/_xpack/security/user/{username}/_disable", this); + } + + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + final boolean enabled = request.path().endsWith("_enable"); + assert enabled || request.path().endsWith("_disable"); + new SecurityClient(client).prepareSetEnabled(request.param("username"), enabled) + .execute(new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(SetEnabledResponse setEnabledResponse, XContentBuilder builder) throws Exception { + return new BytesRestResponse(RestStatus.OK, channel.newBuilder().startObject().endObject()); + } + }); + } +} diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/support/MetadataUtils.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/support/MetadataUtils.java index 03715aec196..b80fd0aa252 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/support/MetadataUtils.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/support/MetadataUtils.java @@ -55,7 +55,7 @@ public class MetadataUtils { public static void verifyNoReservedMetadata(Map metadata) { for (String key : metadata.keySet()) { if (key.startsWith(RESERVED_PREFIX)) { - throw new IllegalArgumentException("invalid user metadata. [" + key + "] is a reserved for internal uses"); + throw new IllegalArgumentException("invalid user metadata. [" + key + "] is a reserved for internal use"); } } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/support/Validation.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/support/Validation.java index acb0057b6de..54ebbf9a119 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/support/Validation.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/support/Validation.java @@ -5,6 +5,10 @@ */ package org.elasticsearch.xpack.security.support; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; +import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; + import java.util.regex.Pattern; /** @@ -18,13 +22,24 @@ public final class Validation { private static final int MIN_PASSWD_LENGTH = 6; - public static Error validateUsername(String username) { - return COMMON_NAME_PATTERN.matcher(username).matches() ? - null : - new Error("A valid username must be at least 1 character and no longer than 30 characters. " + - "It must begin with a letter (`a-z` or `A-Z`) or an underscore (`_`). Subsequent " + - "characters can be letters, underscores (`_`), digits (`0-9`) or any of the following " + - "symbols `@`, `-`, `.` or `$`"); + /** + * Validate the username + * @param username the username to validate + * @param allowReserved whether or not to allow reserved user names + * @param settings the settings which may contain information about reserved users + * @return {@code null} if valid + */ + public static Error validateUsername(String username, boolean allowReserved, Settings settings) { + if (COMMON_NAME_PATTERN.matcher(username).matches() == false) { + return new Error("A valid username must be at least 1 character and no longer than 30 characters. " + + "It must begin with a letter (`a-z` or `A-Z`) or an underscore (`_`). Subsequent " + + "characters can be letters, underscores (`_`), digits (`0-9`) or any of the following " + + "symbols `@`, `-`, `.` or `$`"); + } + if (allowReserved == false && ReservedRealm.isReserved(username, settings)) { + return new Error("Username [" + username + "] is reserved and may not be used."); + } + return null; } public static Error validatePassword(char[] password) { @@ -38,12 +53,20 @@ public final class Validation { public static final class Roles { public static Error validateRoleName(String roleName) { - return COMMON_NAME_PATTERN.matcher(roleName).matches() ? - null : - new Error("A valid role name must be at least 1 character and no longer than 30 characters. " + - "It must begin with a letter (`a-z` or `A-Z`) or an underscore (`_`). Subsequent " + - "characters can be letters, underscores (`_`), digits (`0-9`) or any of the following " + - "symbols `@`, `-`, `.` or `$`"); + return validateRoleName(roleName, false); + } + + public static Error validateRoleName(String roleName, boolean allowReserved) { + if (COMMON_NAME_PATTERN.matcher(roleName).matches() == false) { + return new Error("A valid role name must be at least 1 character and no longer than 30 characters. " + + "It must begin with a letter (`a-z` or `A-Z`) or an underscore (`_`). Subsequent " + + "characters can be letters, underscores (`_`), digits (`0-9`) or any of the following " + + "symbols `@`, `-`, `.` or `$`"); + } + if (allowReserved == false && ReservedRolesStore.isReserved(roleName)) { + return new Error("Role [" + roleName + "] is reserved and may not be used."); + } + return null; } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java index 505f76161db..1b6545fcf4d 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java @@ -9,22 +9,17 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.xpack.security.user.User.ReservedUser; +import org.elasticsearch.xpack.security.support.MetadataUtils; -import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.elasticsearch.xpack.security.Security.setting; /** - * The user object for the anonymous user. This class needs to be instantiated with the initialize method since the values - * of the user depends on the settings. However, this is still a singleton instance. Ideally we would assert that an instance of this class - * is only initialized once, but with the way our tests work the same class will be initialized multiple times (one for each node in a - * integration test). + * The user object for the anonymous user. */ -public class AnonymousUser extends ReservedUser { +public class AnonymousUser extends User { public static final String DEFAULT_ANONYMOUS_USERNAME = "_anonymous"; public static final Setting USERNAME_SETTING = @@ -32,57 +27,18 @@ public class AnonymousUser extends ReservedUser { public static final Setting> ROLES_SETTING = Setting.listSetting(setting("authc.anonymous.roles"), Collections.emptyList(), s -> s, Property.NodeScope); - private static String username = DEFAULT_ANONYMOUS_USERNAME; - private static String[] roles = null; - - public static final AnonymousUser INSTANCE = new AnonymousUser(); - - private AnonymousUser() { - super(DEFAULT_ANONYMOUS_USERNAME); + public AnonymousUser(Settings settings) { + super(USERNAME_SETTING.get(settings), ROLES_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY), null, null, + MetadataUtils.DEFAULT_RESERVED_METADATA, isAnonymousEnabled(settings)); } - @Override - public String principal() { - return username; + public static boolean isAnonymousEnabled(Settings settings) { + return ROLES_SETTING.exists(settings) && ROLES_SETTING.get(settings).isEmpty() == false; } - @Override - public String[] roles() { - return roles; - } - - public static boolean enabled() { - return roles != null; - } - - public static boolean is(User user) { - return INSTANCE == user; - } - - public static boolean isAnonymousUsername(String username) { - return AnonymousUser.username.equals(username); - } - - /** - * This method should be used to initialize the AnonymousUser instance with the correct username and password - * @param settings the settings to initialize the anonymous user with - */ - public static synchronized void initialize(Settings settings) { - username = USERNAME_SETTING.get(settings); - List rolesList = ROLES_SETTING.get(settings); - if (rolesList.isEmpty()) { - roles = null; - } else { - roles = rolesList.toArray(Strings.EMPTY_ARRAY); - } - } - - public static String[] getRoles() { - return roles; - } - - public static List> getSettings() { - return Arrays.asList(); + public static boolean isAnonymousUsername(String username, Settings settings) { + // this is possibly the same check but we should not let anything use the default name either + return USERNAME_SETTING.get(settings).equals(username) || DEFAULT_ANONYMOUS_USERNAME.equals(username); } public static void addSettings(List> settingsList) { diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java index 6f99a7cb493..ff740d44ef8 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java @@ -6,37 +6,18 @@ package org.elasticsearch.xpack.security.user; import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; -import org.elasticsearch.xpack.security.user.User.ReservedUser; +import org.elasticsearch.xpack.security.support.MetadataUtils; /** - * The reserved {@code elastic} superuser. As full permission/access to the cluster/indices and can + * The reserved {@code elastic} superuser. Has full permission/access to the cluster/indices and can * run as any other user. */ -public class ElasticUser extends ReservedUser { +public class ElasticUser extends User { public static final String NAME = "elastic"; public static final String ROLE_NAME = SuperuserRole.NAME; - public static final ElasticUser INSTANCE = new ElasticUser(); - private ElasticUser() { - super(NAME, ROLE_NAME); - } - - @Override - public boolean equals(Object o) { - return INSTANCE == o; - } - - @Override - public int hashCode() { - return System.identityHashCode(this); - } - - public static boolean is(User user) { - return INSTANCE.equals(user); - } - - public static boolean is(String principal) { - return NAME.equals(principal); + public ElasticUser(boolean enabled) { + super(NAME, new String[] { ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java index 4a462e7acee..614a0d0abd6 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java @@ -6,32 +6,17 @@ package org.elasticsearch.xpack.security.user; import org.elasticsearch.xpack.security.authz.permission.KibanaRole; -import org.elasticsearch.xpack.security.user.User.ReservedUser; +import org.elasticsearch.xpack.security.support.MetadataUtils; /** - * + * Built in user for the kibana server */ -public class KibanaUser extends ReservedUser { +public class KibanaUser extends User { public static final String NAME = "kibana"; public static final String ROLE_NAME = KibanaRole.NAME; - public static final KibanaUser INSTANCE = new KibanaUser(); - KibanaUser() { - super(NAME, ROLE_NAME); - } - - @Override - public boolean equals(Object o) { - return INSTANCE == o; - } - - @Override - public int hashCode() { - return System.identityHashCode(this); - } - - public static boolean is(User user) { - return INSTANCE.equals(user); + public KibanaUser(boolean enabled) { + super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/User.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/User.java index 861a84609ce..143997f12f9 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/User.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/User.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.support.MetadataUtils; @@ -31,40 +30,41 @@ public class User implements ToXContent { private final String[] roles; private final User runAs; private final Map metadata; + private final boolean enabled; @Nullable private final String fullName; @Nullable private final String email; public User(String username, String... roles) { - this(username, roles, null, null, null); + this(username, roles, null, null, null, true); } public User(String username, String[] roles, User runAs) { - this(username, roles, null, null, null, runAs); + this(username, roles, null, null, null, true, runAs); } - public User(String username, String[] roles, String fullName, String email, Map metadata) { + public User(String username, String[] roles, String fullName, String email, Map metadata, boolean enabled) { this.username = username; this.roles = roles == null ? Strings.EMPTY_ARRAY : roles; this.metadata = metadata != null ? Collections.unmodifiableMap(metadata) : Collections.emptyMap(); this.fullName = fullName; this.email = email; + this.enabled = enabled; this.runAs = null; - verifyNoReservedMetadata(this.metadata); } - public User(String username, String[] roles, String fullName, String email, Map metadata, User runAs) { + public User(String username, String[] roles, String fullName, String email, Map metadata, boolean enabled, User runAs) { this.username = username; this.roles = roles == null ? Strings.EMPTY_ARRAY : roles; this.metadata = metadata != null ? Collections.unmodifiableMap(metadata) : Collections.emptyMap(); this.fullName = fullName; this.email = email; + this.enabled = enabled; assert (runAs == null || runAs.runAs() == null) : "the run_as user should not be a user that can run as"; if (runAs == SystemUser.INSTANCE) { throw new ElasticsearchSecurityException("invalid run_as user"); } this.runAs = runAs; - verifyNoReservedMetadata(this.metadata); } /** @@ -105,6 +105,13 @@ public class User implements ToXContent { return email; } + /** + * @return whether the user is enabled or not + */ + public boolean enabled() { + return enabled; + } + /** * @return The user that will be used for run as functionality. If run as * functionality is not being used, then null will be @@ -133,7 +140,7 @@ public class User implements ToXContent { @Override public boolean equals(Object o) { if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (o instanceof User == false) return false; User user = (User) o; @@ -166,46 +173,28 @@ public class User implements ToXContent { builder.field(Fields.FULL_NAME.getPreferredName(), fullName()); builder.field(Fields.EMAIL.getPreferredName(), email()); builder.field(Fields.METADATA.getPreferredName(), metadata()); + builder.field(Fields.ENABLED.getPreferredName(), enabled()); return builder.endObject(); } - void verifyNoReservedMetadata(Map metadata) { - if (this instanceof ReservedUser) { - return; - } - - MetadataUtils.verifyNoReservedMetadata(metadata); - } - public static User readFrom(StreamInput input) throws IOException { - if (input.readBoolean()) { - String name = input.readString(); - if (SystemUser.is(name)) { + final boolean isInternalUser = input.readBoolean(); + final String username = input.readString(); + if (isInternalUser) { + if (SystemUser.is(username)) { return SystemUser.INSTANCE; - } else if (XPackUser.is(name)) { + } else if (XPackUser.is(username)) { return XPackUser.INSTANCE; } - User user = ReservedRealm.getUser(name); - if (user == null) { - throw new IllegalStateException("invalid reserved user"); - } - return user; + throw new IllegalStateException("user [" + username + "] is not an internal user"); } - String username = input.readString(); String[] roles = input.readStringArray(); Map metadata = input.readMap(); String fullName = input.readOptionalString(); String email = input.readOptionalString(); - if (input.readBoolean()) { - String runAsUsername = input.readString(); - String[] runAsRoles = input.readStringArray(); - Map runAsMetadata = input.readMap(); - String runAsFullName = input.readOptionalString(); - String runAsEmail = input.readOptionalString(); - User runAs = new User(runAsUsername, runAsRoles, runAsFullName, runAsEmail, runAsMetadata); - return new User(username, roles, fullName, email, metadata, runAs); - } - return new User(username, roles, fullName, email, metadata); + boolean enabled = input.readBoolean(); + User runAs = input.readBoolean() ? readFrom(input) : null; + return new User(username, roles, fullName, email, metadata, enabled, runAs); } public static void writeTo(User user, StreamOutput output) throws IOException { @@ -215,9 +204,6 @@ public class User implements ToXContent { } else if (XPackUser.is(user)) { output.writeBoolean(true); output.writeString(XPackUser.NAME); - } else if (ReservedRealm.isReserved(user.principal())) { - output.writeBoolean(true); - output.writeString(user.principal()); } else { output.writeBoolean(false); output.writeString(user.username); @@ -225,26 +211,16 @@ public class User implements ToXContent { output.writeMap(user.metadata); output.writeOptionalString(user.fullName); output.writeOptionalString(user.email); + output.writeBoolean(user.enabled); if (user.runAs == null) { output.writeBoolean(false); } else { output.writeBoolean(true); - output.writeString(user.runAs.username); - output.writeStringArray(user.runAs.roles); - output.writeMap(user.runAs.metadata); - output.writeOptionalString(user.runAs.fullName); - output.writeOptionalString(user.runAs.email); + writeTo(user.runAs, output); } } } - abstract static class ReservedUser extends User { - - ReservedUser(String username, String... roles) { - super(username, roles, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA); - } - } - public interface Fields { ParseField USERNAME = new ParseField("username"); ParseField PASSWORD = new ParseField("password"); @@ -253,5 +229,6 @@ public class User implements ToXContent { ParseField FULL_NAME = new ParseField("full_name"); ParseField EMAIL = new ParseField("email"); ParseField METADATA = new ParseField("metadata"); + ParseField ENABLED = new ParseField("enabled"); } } diff --git a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java index 42006c30828..c96bd9b3c4d 100644 --- a/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java +++ b/elasticsearch/x-pack/security/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.security.user; import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; -import org.elasticsearch.xpack.security.user.User.ReservedUser; /** * XPack internal user that manages xpack. Has all cluster/indices permissions for x-pack to operate. @@ -17,7 +16,7 @@ public class XPackUser extends User { public static final String ROLE_NAME = SuperuserRole.NAME; public static final XPackUser INSTANCE = new XPackUser(); - XPackUser() { + private XPackUser() { super(NAME, ROLE_NAME); } diff --git a/elasticsearch/x-pack/security/src/main/resources/security-index-template.json b/elasticsearch/x-pack/security/src/main/resources/security-index-template.json index 2a44a1b1aa6..a9e72dd3877 100644 --- a/elasticsearch/x-pack/security/src/main/resources/security-index-template.json +++ b/elasticsearch/x-pack/security/src/main/resources/security-index-template.json @@ -58,6 +58,9 @@ "metadata" : { "type" : "object", "dynamic" : true + }, + "enabled": { + "type": "boolean" } } }, @@ -109,6 +112,9 @@ "type" : "keyword", "index" : false, "doc_values" : false + }, + "enabled": { + "type": "boolean" } } } diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/AbstractOldXPackIndicesBackwardsCompatibilityTestCase.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/AbstractOldXPackIndicesBackwardsCompatibilityTestCase.java new file mode 100644 index 00000000000..979d79cb82c --- /dev/null +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/AbstractOldXPackIndicesBackwardsCompatibilityTestCase.java @@ -0,0 +1,159 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch; + +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.TestUtil; +import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; +import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.test.VersionUtils; +import org.junit.Before; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; + +import static org.elasticsearch.test.OldIndexUtils.copyIndex; +import static org.elasticsearch.test.OldIndexUtils.loadDataFilesList; + +/** + * Base class for tests against clusters coming from old versions of xpack and Elasticsearch. + */ +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) // We'll start the nodes manually +public abstract class AbstractOldXPackIndicesBackwardsCompatibilityTestCase extends SecurityIntegTestCase { + protected List dataFiles; + + @Override + protected final boolean ignoreExternalCluster() { + return true; + } + + @Before + public final void initIndexesList() throws Exception { + dataFiles = loadDataFilesList("x-pack", getBwcIndicesPath()); + } + + @Override + public Settings nodeSettings(int ord) { + // speed up recoveries + return Settings.builder() + .put(super.nodeSettings(ord)) + .put(ThrottlingAllocationDecider + .CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 30) + .put(ThrottlingAllocationDecider + .CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 30) + .build(); + } + + @Override + protected int maxNumberOfNodes() { + try { + return SecurityIntegTestCase.defaultMaxNumberOfNodes() + loadDataFilesList("x-pack", getBwcIndicesPath()).size(); + } catch (IOException e) { + throw new RuntimeException("couldn't enumerate bwc indices", e); + } + } + + public void testAllVersionsTested() throws Exception { + SortedSet expectedVersions = new TreeSet<>(); + for (Version v : VersionUtils.allVersions()) { + if (false == shouldTestVersion(v)) continue; + if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself + if (v.isBeta() == true || v.isAlpha() == true || v.isRC() == true) continue; // don't check alphas etc + expectedVersions.add("x-pack-" + v.toString() + ".zip"); + } + expectedVersions.removeAll(dataFiles); + if (expectedVersions.isEmpty() == false) { + StringBuilder msg = new StringBuilder("Old index tests are missing indexes:"); + for (String expected : expectedVersions) { + msg.append("\n" + expected); + } + fail(msg.toString()); + } + } + + public void testOldIndexes() throws Exception { + Collections.shuffle(dataFiles, random()); + for (String dataFile : dataFiles) { + Version version = Version.fromString(dataFile.replace("x-pack-", "").replace(".zip", "")); + if (false == shouldTestVersion(version)) continue; + setupCluster(dataFile); + ensureYellow(); + long startTime = System.nanoTime(); + try { + checkVersion(version); + } catch (Throwable t) { + throw new AssertionError("Failed while checking [" + version + "]", t); + } + logger.info("--> Done testing {}, took {} millis", version, TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime)); + } + } + + /** + * Should we test this version at all? Called before loading the data directory. Return false to skip it entirely. + */ + protected boolean shouldTestVersion(Version version) { + return true; + } + + /** + * Actually test this version. + */ + protected abstract void checkVersion(Version version) throws Exception; + + private void setupCluster(String pathToZipFile) throws Exception { + // shutdown any nodes from previous zip files + while (internalCluster().size() > 0) { + internalCluster().stopRandomNode(s -> true); + } + // first create the data directory and unzip the data there + // we put the whole cluster state and indexes because if we only copy indexes and import them as dangling then + // the native realm services will start because there is no security index and nothing is recovering + // but we want them to not start! + Path dataPath = createTempDir(); + Settings.Builder nodeSettings = Settings.builder() + .put("path.data", dataPath.toAbsolutePath()); + // unzip data + Path backwardsIndex = getBwcIndicesPath().resolve(pathToZipFile); + // decompress the index + try (InputStream stream = Files.newInputStream(backwardsIndex)) { + logger.info("unzipping {}", backwardsIndex.toString()); + TestUtil.unzip(stream, dataPath); + // now we need to copy the whole thing so that it looks like an actual data path + try (Stream unzippedFiles = Files.list(dataPath.resolve("data"))) { + Path dataDir = unzippedFiles.findFirst().get(); + // this is not actually an index but the copy does the job anyway + copyIndex(logger, dataDir.resolve("nodes"), "nodes", dataPath); + // remove the original unzipped directory + } + IOUtils.rm(dataPath.resolve("data")); + } + + // check it is unique + assertTrue(Files.exists(dataPath)); + Path[] list = FileSystemUtils.files(dataPath); + if (list.length != 1) { + throw new IllegalStateException("Backwards index must contain exactly one node"); + } + + // start the node + logger.info("--> Data path for importing node: {}", dataPath); + String importingNodeName = internalCluster().startNode(nodeSettings.build()); + Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, importingNodeName).nodeDataPaths(); + assertEquals(1, nodePaths.length); + } +} diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/OldSecurityIndexBackwardsCompatibilityIT.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/OldSecurityIndexBackwardsCompatibilityIT.java index 9384027d250..7c5ad14f9f9 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/OldSecurityIndexBackwardsCompatibilityIT.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/OldSecurityIndexBackwardsCompatibilityIT.java @@ -5,19 +5,9 @@ */ package org.elasticsearch; -import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.TestUtil; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.security.action.role.GetRolesResponse; import org.elasticsearch.xpack.security.action.role.PutRoleResponse; import org.elasticsearch.xpack.security.action.user.GetUsersResponse; @@ -28,26 +18,13 @@ import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.user.User; -import org.junit.AfterClass; -import org.junit.Before; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; import java.util.Collections; -import java.util.List; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.concurrent.TimeUnit; -import java.util.stream.Stream; import static java.util.Collections.singletonMap; -import static org.elasticsearch.test.OldIndexUtils.copyIndex; -import static org.elasticsearch.test.OldIndexUtils.loadIndexesList; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordTokenTests.basicAuthHeaderValue; import static org.hamcrest.Matchers.arrayWithSize; -import static org.hamcrest.Matchers.equalTo; /** * Backwards compatibility test that loads some data from a pre-5.0 cluster and attempts to do some basic security stuff with it. It @@ -73,126 +50,13 @@ import static org.hamcrest.Matchers.equalTo; *

  • This document in {@code index3}: {@code {"title": "bwc_test_user should not see this index"}}
  • * **/ -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) // We'll start the nodes manually -public class OldSecurityIndexBackwardsCompatibilityIT extends SecurityIntegTestCase { - - List indexes; - static String importingNodeName; - static Path dataPath; - +public class OldSecurityIndexBackwardsCompatibilityIT extends AbstractOldXPackIndicesBackwardsCompatibilityTestCase { @Override - protected boolean ignoreExternalCluster() { - return true; + protected boolean shouldTestVersion(Version version) { + return version.onOrAfter(Version.V_2_3_0); // native realm only supported from 2.3.0 on } - @Before - public void initIndexesList() throws Exception { - indexes = loadIndexesList("x-pack", getBwcIndicesPath()); - } - - @AfterClass - public static void tearDownStatics() { - importingNodeName = null; - dataPath = null; - } - - @Override - public Settings nodeSettings(int ord) { - Settings settings = super.nodeSettings(ord); - // speed up recoveries - return Settings.builder() - .put(ThrottlingAllocationDecider - .CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 30) - .put(ThrottlingAllocationDecider - .CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 30) - .put(settings).build(); - } - - @Override - protected int maxNumberOfNodes() { - try { - return SecurityIntegTestCase.defaultMaxNumberOfNodes() + loadIndexesList("x-pack", getBwcIndicesPath()).size(); - } catch (IOException e) { - throw new RuntimeException("couldn't enumerate bwc indices", e); - } - } - - void setupCluster(String pathToZipFile) throws Exception { - // shutdown any nodes from previous zip files - while (internalCluster().size() > 0) { - internalCluster().stopRandomNode(s -> true); - } - // first create the data directory and unzip the data there - // we put the whole cluster state and indexes because if we only copy indexes and import them as dangling then - // the native realm services will start because there is no security index and nothing is recovering - // but we want them to not start! - dataPath = createTempDir(); - Settings.Builder nodeSettings = Settings.builder() - .put("path.data", dataPath.toAbsolutePath()); - // unzip data - Path backwardsIndex = getBwcIndicesPath().resolve(pathToZipFile); - // decompress the index - try (InputStream stream = Files.newInputStream(backwardsIndex)) { - logger.info("unzipping {}", backwardsIndex.toString()); - TestUtil.unzip(stream, dataPath); - // now we need to copy the whole thing so that it looks like an actual data path - try (Stream unzippedFiles = Files.list(dataPath.resolve("data"))) { - Path dataDir = unzippedFiles.findFirst().get(); - // this is not actually an index but the copy does the job anyway - copyIndex(logger, dataDir.resolve("nodes"), "nodes", dataPath); - // remove the original unzipped directory - } - IOUtils.rm(dataPath.resolve("data")); - } - - // check it is unique - assertTrue(Files.exists(dataPath)); - Path[] list = FileSystemUtils.files(dataPath); - if (list.length != 1) { - throw new IllegalStateException("Backwards index must contain exactly one node"); - } - - // start the node - logger.info("--> Data path for importing node: {}", dataPath); - importingNodeName = internalCluster().startNode(nodeSettings.build()); - Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, importingNodeName).nodeDataPaths(); - assertEquals(1, nodePaths.length); - } - - public void testAllVersionsTested() throws Exception { - SortedSet expectedVersions = new TreeSet<>(); - for (Version v : VersionUtils.allVersions()) { - if (v.before(Version.V_2_3_0)) continue; // native realm only supported from 2.3.0 on - if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself - if (v.isBeta() == true || v.isAlpha() == true || v.isRC() == true) continue; // don't check alphas etc - expectedVersions.add("x-pack-" + v.toString() + ".zip"); - } - for (String index : indexes) { - if (expectedVersions.remove(index) == false) { - logger.warn("Old indexes tests contain extra index: {}", index); - } - } - if (expectedVersions.isEmpty() == false) { - StringBuilder msg = new StringBuilder("Old index tests are missing indexes:"); - for (String expected : expectedVersions) { - msg.append("\n" + expected); - } - fail(msg.toString()); - } - } - - public void testOldIndexes() throws Exception { - Collections.shuffle(indexes, random()); - for (String index : indexes) { - setupCluster(index); - ensureYellow(); - long startTime = System.nanoTime(); - assertBasicSecurityWorks(); - logger.info("--> Done testing {}, took {} millis", index, TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime)); - } - } - - void assertBasicSecurityWorks() throws Exception { + protected void checkVersion(Version version) throws Exception { // test that user and roles are there logger.info("Getting roles..."); SecurityClient securityClient = new SecurityClient(client()); @@ -225,7 +89,7 @@ public class OldSecurityIndexBackwardsCompatibilityIT extends SecurityIntegTestC assertEquals("bwc_test_user", user.principal()); // check that documents are there - assertThat(client().prepareSearch().get().getHits().getTotalHits(), equalTo(5L)); + assertHitCount(client().prepareSearch("index1", "index2", "index3").get(), 5); Client bwcTestUserClient = client().filterWithHeader( singletonMap(UsernamePasswordToken.BASIC_AUTH_HEADER, basicAuthHeaderValue("bwc_test_user", "9876543210"))); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/integration/PermissionPrecedenceTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/integration/PermissionPrecedenceTests.java index be3299b34d8..cea4385d3ad 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/integration/PermissionPrecedenceTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/integration/PermissionPrecedenceTests.java @@ -43,11 +43,6 @@ public class PermissionPrecedenceTests extends SecurityIntegTestCase { " - names: '*'\n" + " privileges: [ all ]" + "\n" + - "transport_client:\n" + - " cluster:\n" + - " - cluster:monitor/nodes/info\n" + - " - cluster:monitor/state\n" + - "\n" + "user:\n" + " indices:\n" + " - names: 'test_*'\n" + diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index 1f0610373d1..f5178e97396 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -57,7 +57,7 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas public static final String DEFAULT_PASSWORD_HASHED = new String(Hasher.BCRYPT.hash(new SecuredString(DEFAULT_PASSWORD.toCharArray()))); public static final String DEFAULT_ROLE = "user"; - public static final String DEFAULT_TRANSPORT_CLIENT_ROLE = "trans_client_user"; + public static final String DEFAULT_TRANSPORT_CLIENT_ROLE = "transport_client"; public static final String DEFAULT_TRANSPORT_CLIENT_USER_NAME = "test_trans_client_user"; public static final String CONFIG_STANDARD_USER = @@ -73,10 +73,7 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas " cluster: [ ALL ]\n" + " indices:\n" + " - names: '*'\n" + - " privileges: [ ALL ]\n" + - DEFAULT_TRANSPORT_CLIENT_ROLE + ":\n" + - " cluster:\n" + - " - transport_client"; + " privileges: [ ALL ]\n"; private final Path parentFolder; private final String subfolderPrefix; diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java index 38df2751180..cb7e0438a52 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.security.crypto.CryptoService; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; -import org.junit.After; import org.junit.Before; import static org.hamcrest.CoreMatchers.nullValue; @@ -56,11 +55,6 @@ public class SecurityFeatureSetTests extends ESTestCase { cryptoService = mock(CryptoService.class); } - @After - public void resetAnonymous() { - AnonymousUser.initialize(Settings.EMPTY); - } - public void testAvailable() throws Exception { SecurityFeatureSet featureSet = new SecurityFeatureSet(settings, licenseState, realms, rolesStore, ipFilter, auditTrail, cryptoService); @@ -150,7 +144,7 @@ public class SecurityFeatureSetTests extends ESTestCase { final boolean anonymousEnabled = randomBoolean(); if (anonymousEnabled) { - AnonymousUser.initialize(Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "foo").build()); + settings.put(AnonymousUser.ROLES_SETTING.getKey(), "foo"); } SecurityFeatureSet featureSet = new SecurityFeatureSet(settings.build(), licenseState, realms, rolesStore, diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 2205f0cbcf7..054a5037994 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.authz.permission.KibanaRole; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.user.ElasticUser; import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -56,7 +57,9 @@ public class TransportGetRolesActionTests extends ESTestCase { final boolean isKibanaUser = randomBoolean(); if (isKibanaUser) { - when(context.getUser()).thenReturn(KibanaUser.INSTANCE); + when(context.getUser()).thenReturn(new KibanaUser(true)); + } else { + when(context.getUser()).thenReturn(new ElasticUser(true)); } final int size = randomIntBetween(1, ReservedRolesStore.names().size()); final List names = randomSubsetOf(size, ReservedRolesStore.names()); @@ -116,7 +119,9 @@ public class TransportGetRolesActionTests extends ESTestCase { final boolean isKibanaUser = randomBoolean(); if (isKibanaUser) { - when(context.getUser()).thenReturn(KibanaUser.INSTANCE); + when(context.getUser()).thenReturn(new KibanaUser(true)); + } else { + when(context.getUser()).thenReturn(new ElasticUser(true)); } GetRolesRequest request = new GetRolesRequest(); @@ -199,9 +204,10 @@ public class TransportGetRolesActionTests extends ESTestCase { } if (isKibanaUser) { - when(context.getUser()).thenReturn(KibanaUser.INSTANCE); + when(context.getUser()).thenReturn(new KibanaUser(true)); } else { expectedNames.remove(KibanaRole.NAME); + when(context.getUser()).thenReturn(new ElasticUser(true)); } GetRolesRequest request = new GetRolesRequest(); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java index 236dce76b97..ca83e386469 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.security.user.XPackUser; import java.util.concurrent.atomic.AtomicReference; @@ -31,9 +32,9 @@ import static org.mockito.Mockito.when; public class TransportAuthenticateActionTests extends ESTestCase { - public void testSystemUser() { + public void testInternalUser() { SecurityContext securityContext = mock(SecurityContext.class); - when(securityContext.getUser()).thenReturn(SystemUser.INSTANCE); + when(securityContext.getUser()).thenReturn(randomFrom(SystemUser.INSTANCE, XPackUser.INSTANCE)); TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), mock(TransportService.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), securityContext); @@ -83,7 +84,7 @@ public class TransportAuthenticateActionTests extends ESTestCase { } public void testValidUser() { - final User user = randomFrom(ElasticUser.INSTANCE, KibanaUser.INSTANCE, new User("joe")); + final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); SecurityContext securityContext = mock(SecurityContext.class); when(securityContext.getUser()).thenReturn(user); TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java index 35344ba2394..d3a8f452e35 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.junit.After; +import org.elasticsearch.xpack.security.user.XPackUser; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -43,20 +43,15 @@ import static org.mockito.Mockito.verifyZeroInteractions; public class TransportChangePasswordActionTests extends ESTestCase { - @After - public void resetAnonymous() { - AnonymousUser.initialize(Settings.EMPTY); - } - public void testAnonymousUser() { Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build(); - AnonymousUser.initialize(settings); + AnonymousUser anonymousUser = new AnonymousUser(settings); NativeUsersStore usersStore = mock(NativeUsersStore.class); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), + TransportChangePasswordAction action = new TransportChangePasswordAction(settings, mock(ThreadPool.class), mock(TransportService.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); - request.username(AnonymousUser.INSTANCE.principal()); + request.username(anonymousUser.principal()); request.passwordHash(Hasher.BCRYPT.hash(new SecuredString("changeme".toCharArray()))); final AtomicReference throwableRef = new AtomicReference<>(); @@ -79,13 +74,13 @@ public class TransportChangePasswordActionTests extends ESTestCase { verifyZeroInteractions(usersStore); } - public void testSystemUser() { + public void testInternalUsers() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), mock(TransportService.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); - request.username(SystemUser.INSTANCE.principal()); + request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); request.passwordHash(Hasher.BCRYPT.hash(new SecuredString("changeme".toCharArray()))); final AtomicReference throwableRef = new AtomicReference<>(); @@ -109,7 +104,7 @@ public class TransportChangePasswordActionTests extends ESTestCase { } public void testValidUser() { - final User user = randomFrom(ElasticUser.INSTANCE, KibanaUser.INSTANCE, new User("joe")); + final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); NativeUsersStore usersStore = mock(NativeUsersStore.class); ChangePasswordRequest request = new ChangePasswordRequest(); request.username(user.principal()); @@ -147,7 +142,7 @@ public class TransportChangePasswordActionTests extends ESTestCase { } public void testException() { - final User user = randomFrom(ElasticUser.INSTANCE, KibanaUser.INSTANCE, new User("joe")); + final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); NativeUsersStore usersStore = mock(NativeUsersStore.class); ChangePasswordRequest request = new ChangePasswordRequest(); request.username(user.principal()); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java index 614681491df..6b647461fc9 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java @@ -11,14 +11,15 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.user.AnonymousUser; +import org.elasticsearch.xpack.security.user.ElasticUser; +import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.junit.After; +import org.elasticsearch.xpack.security.user.XPackUser; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -40,19 +41,13 @@ import static org.mockito.Mockito.verifyZeroInteractions; public class TransportDeleteUserActionTests extends ESTestCase { - @After - public void resetAnonymous() { - AnonymousUser.initialize(Settings.EMPTY); - } - public void testAnonymousUser() { Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build(); - AnonymousUser.initialize(settings); NativeUsersStore usersStore = mock(NativeUsersStore.class); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), + TransportDeleteUserAction action = new TransportDeleteUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); - DeleteUserRequest request = new DeleteUserRequest(AnonymousUser.INSTANCE.principal()); + DeleteUserRequest request = new DeleteUserRequest(new AnonymousUser(settings).principal()); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -74,12 +69,12 @@ public class TransportDeleteUserActionTests extends ESTestCase { verifyZeroInteractions(usersStore); } - public void testSystemUser() { + public void testInternalUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); - DeleteUserRequest request = new DeleteUserRequest(SystemUser.INSTANCE.principal()); + DeleteUserRequest request = new DeleteUserRequest(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -102,7 +97,7 @@ public class TransportDeleteUserActionTests extends ESTestCase { } public void testReservedUser() { - final User reserved = randomFrom(ReservedRealm.users().toArray(new User[0])); + final User reserved = randomFrom(new ElasticUser(true), new KibanaUser(true)); NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index 603e54e7dc3..7f6090165ab 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.user.AnonymousUser; @@ -20,13 +21,14 @@ import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.junit.After; +import org.elasticsearch.xpack.security.user.XPackUser; import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicReference; @@ -48,32 +50,34 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; public class TransportGetUsersActionTests extends ESTestCase { private boolean anonymousEnabled; + private Settings settings; @Before public void maybeEnableAnonymous() { anonymousEnabled = randomBoolean(); if (anonymousEnabled) { - Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build(); - AnonymousUser.initialize(settings); + settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build(); + } else { + settings = Settings.EMPTY; } } - @After - public void resetAnonymous() { - AnonymousUser.initialize(Settings.EMPTY); - } - public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); + when(usersStore.started()).thenReturn(true); + AnonymousUser anonymousUser = new AnonymousUser(settings); + ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); + mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class), + reservedRealm); GetUsersRequest request = new GetUsersRequest(); - request.usernames(AnonymousUser.INSTANCE.principal()); + request.usernames(anonymousUser.principal()); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -93,20 +97,21 @@ public class TransportGetUsersActionTests extends ESTestCase { assertThat(responseRef.get(), is(notNullValue())); final User[] users = responseRef.get().users(); if (anonymousEnabled) { - assertThat("expected array with anonymous but got: " + Arrays.toString(users), users, arrayContaining(AnonymousUser.INSTANCE)); + assertThat("expected array with anonymous but got: " + Arrays.toString(users), users, arrayContaining(anonymousUser)); } else { assertThat("expected an empty array but got: " + Arrays.toString(users), users, emptyArray()); } verifyZeroInteractions(usersStore); } - public void testSystemUser() { + public void testInternalUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); + mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, + mock(TransportService.class), mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); - request.usernames(SystemUser.INSTANCE.principal()); + request.usernames(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -129,13 +134,18 @@ public class TransportGetUsersActionTests extends ESTestCase { } public void testReservedUsersOnly() { - final int size = randomIntBetween(1, ReservedRealm.users().size()); - final List reservedUsers = randomSubsetOf(size, ReservedRealm.users()); - final List names = reservedUsers.stream().map(User::principal).collect(Collectors.toList()); NativeUsersStore usersStore = mock(NativeUsersStore.class); + when(usersStore.started()).thenReturn(true); + ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings)); + final Collection allReservedUsers = reservedRealm.users(); + final int size = randomIntBetween(1, allReservedUsers.size()); + final List reservedUsers = randomSubsetOf(size, allReservedUsers); + final List names = reservedUsers.stream().map(User::principal).collect(Collectors.toList()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); + mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class), + reservedRealm); + logger.error("names {}", names); GetUsersRequest request = new GetUsersRequest(); request.usernames(names.toArray(new String[names.size()])); @@ -156,15 +166,17 @@ public class TransportGetUsersActionTests extends ESTestCase { assertThat(throwableRef.get(), is(nullValue())); assertThat(responseRef.get(), is(notNullValue())); assertThat(responseRef.get().users(), arrayContaining(reservedUsers.toArray(new User[reservedUsers.size()]))); - verifyZeroInteractions(usersStore); } public void testGetAllUsers() { final List storeUsers = randomFrom(Collections.emptyList(), Collections.singletonList(new User("joe")), Arrays.asList(new User("jane"), new User("fred")), randomUsers()); NativeUsersStore usersStore = mock(NativeUsersStore.class); + when(usersStore.started()).thenReturn(true); + ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings)); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); + mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class), + reservedRealm); GetUsersRequest request = new GetUsersRequest(); doAnswer(new Answer() { @@ -192,7 +204,7 @@ public class TransportGetUsersActionTests extends ESTestCase { }); final List expectedList = new ArrayList<>(); - expectedList.addAll(ReservedRealm.users()); + expectedList.addAll(reservedRealm.users()); expectedList.addAll(storeUsers); assertThat(throwableRef.get(), is(nullValue())); @@ -207,7 +219,8 @@ public class TransportGetUsersActionTests extends ESTestCase { final String[] storeUsernames = storeUsers.stream().map(User::principal).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY); NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); + mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class), + mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); request.usernames(storeUsernames); @@ -268,7 +281,8 @@ public class TransportGetUsersActionTests extends ESTestCase { final String[] storeUsernames = storeUsers.stream().map(User::principal).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY); NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); + mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class), + mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); request.usernames(storeUsernames); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index 52b7a2e247f..d4f386bfe6f 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authc.support.Hasher; @@ -21,7 +22,7 @@ import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.junit.After; +import org.elasticsearch.xpack.security.user.XPackUser; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -40,23 +41,19 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; public class TransportPutUserActionTests extends ESTestCase { - @After - public void resetAnonymous() { - AnonymousUser.initialize(Settings.EMPTY); - } - public void testAnonymousUser() { Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build(); - AnonymousUser.initialize(settings); + final AnonymousUser anonymousUser = new AnonymousUser(settings); NativeUsersStore usersStore = mock(NativeUsersStore.class); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), + TransportPutUserAction action = new TransportPutUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); PutUserRequest request = new PutUserRequest(); - request.username(AnonymousUser.INSTANCE.principal()); + request.username(anonymousUser.principal()); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -84,7 +81,7 @@ public class TransportPutUserActionTests extends ESTestCase { mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); PutUserRequest request = new PutUserRequest(); - request.username(SystemUser.INSTANCE.principal()); + request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -107,8 +104,11 @@ public class TransportPutUserActionTests extends ESTestCase { } public void testReservedUser() { - final User reserved = randomFrom(ReservedRealm.users().toArray(new User[0])); NativeUsersStore usersStore = mock(NativeUsersStore.class); + when(usersStore.started()).thenReturn(true); + Settings settings = Settings.builder().put("path.home", createTempDir()).build(); + ReservedRealm reservedRealm = new ReservedRealm(new Environment(settings), settings, usersStore, new AnonymousUser(settings)); + final User reserved = randomFrom(reservedRealm.users().toArray(new User[0])); TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore, mock(TransportService.class)); @@ -132,7 +132,7 @@ public class TransportPutUserActionTests extends ESTestCase { assertThat(responseRef.get(), is(nullValue())); assertThat(throwableRef.get(), instanceOf(IllegalArgumentException.class)); assertThat(throwableRef.get().getMessage(), containsString("is reserved and only the password")); - verifyZeroInteractions(usersStore); + verify(usersStore).started(); } public void testValidUser() { diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java new file mode 100644 index 00000000000..bef25e4c3bc --- /dev/null +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java @@ -0,0 +1,259 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.security.authc.Authentication; +import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; +import org.elasticsearch.xpack.security.user.AnonymousUser; +import org.elasticsearch.xpack.security.user.ElasticUser; +import org.elasticsearch.xpack.security.user.KibanaUser; +import org.elasticsearch.xpack.security.user.SystemUser; +import org.elasticsearch.xpack.security.user.User; +import org.elasticsearch.xpack.security.user.XPackUser; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +/** + * Unit tests for the {@link TransportSetEnabledAction} + */ +public class TransportSetEnabledActionTests extends ESTestCase { + + public void testAnonymousUser() { + Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build(); + final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); + ThreadPool threadPool = mock(ThreadPool.class); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + Authentication authentication = mock(Authentication.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); + threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + when(authentication.getRunAsUser()).thenReturn(user); + NativeUsersStore usersStore = mock(NativeUsersStore.class); + TransportSetEnabledAction action = new TransportSetEnabledAction(settings, threadPool, + mock(TransportService.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + + SetEnabledRequest request = new SetEnabledRequest(); + request.username(new AnonymousUser(settings).principal()); + request.enabled(randomBoolean()); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(request, new ActionListener() { + @Override + public void onResponse(SetEnabledResponse setEnabledResponse) { + responseRef.set(setEnabledResponse); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), instanceOf(IllegalArgumentException.class)); + assertThat(throwableRef.get().getMessage(), containsString("is anonymous")); + verifyZeroInteractions(usersStore); + } + + public void testInternalUser() { + final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); + ThreadPool threadPool = mock(ThreadPool.class); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + Authentication authentication = mock(Authentication.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); + threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + when(authentication.getRunAsUser()).thenReturn(user); + NativeUsersStore usersStore = mock(NativeUsersStore.class); + TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, + mock(TransportService.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + + SetEnabledRequest request = new SetEnabledRequest(); + request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); + request.enabled(randomBoolean()); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(request, new ActionListener() { + @Override + public void onResponse(SetEnabledResponse setEnabledResponse) { + responseRef.set(setEnabledResponse); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), instanceOf(IllegalArgumentException.class)); + assertThat(throwableRef.get().getMessage(), containsString("is internal")); + verifyZeroInteractions(usersStore); + } + + public void testValidUser() { + ThreadPool threadPool = mock(ThreadPool.class); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + Authentication authentication = mock(Authentication.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); + threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + when(authentication.getRunAsUser()).thenReturn(new User("the runner")); + + final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); + NativeUsersStore usersStore = mock(NativeUsersStore.class); + SetEnabledRequest request = new SetEnabledRequest(); + request.username(user.principal()); + request.enabled(randomBoolean()); + request.setRefreshPolicy(randomFrom(RefreshPolicy.values())); + // mock the setEnabled call on the native users store so that it will invoke the action listener with a response + doAnswer(new Answer() { + public Void answer(InvocationOnMock invocation) { + Object[] args = invocation.getArguments(); + assert args.length == 4; + ActionListener listener = (ActionListener) args[3]; + listener.onResponse(null); + return null; + } + }).when(usersStore) + .setEnabled(eq(user.principal()), eq(request.enabled()), eq(request.getRefreshPolicy()), any(ActionListener.class)); + TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, + mock(TransportService.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(request, new ActionListener() { + @Override + public void onResponse(SetEnabledResponse setEnabledResponse) { + responseRef.set(setEnabledResponse); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(responseRef.get(), is(notNullValue())); + assertThat(responseRef.get(), instanceOf(SetEnabledResponse.class)); + assertThat(throwableRef.get(), is(nullValue())); + verify(usersStore, times(1)) + .setEnabled(eq(user.principal()), eq(request.enabled()), eq(request.getRefreshPolicy()), any(ActionListener.class)); + } + + public void testException() { + ThreadPool threadPool = mock(ThreadPool.class); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + Authentication authentication = mock(Authentication.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); + threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + when(authentication.getRunAsUser()).thenReturn(new User("the runner")); + + final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); + NativeUsersStore usersStore = mock(NativeUsersStore.class); + SetEnabledRequest request = new SetEnabledRequest(); + request.username(user.principal()); + request.enabled(randomBoolean()); + request.setRefreshPolicy(randomFrom(RefreshPolicy.values())); + final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new RuntimeException()); + // we're mocking the setEnabled call on the native users store so that it will invoke the action listener with an exception + doAnswer(new Answer() { + public Void answer(InvocationOnMock invocation) { + Object[] args = invocation.getArguments(); + assert args.length == 4; + ActionListener listener = (ActionListener) args[3]; + listener.onFailure(e); + return null; + } + }).when(usersStore) + .setEnabled(eq(user.principal()), eq(request.enabled()), eq(request.getRefreshPolicy()), any(ActionListener.class)); + TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, + mock(TransportService.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(request, new ActionListener() { + @Override + public void onResponse(SetEnabledResponse setEnabledResponse) { + responseRef.set(setEnabledResponse); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), is(notNullValue())); + assertThat(throwableRef.get(), sameInstance(e)); + verify(usersStore, times(1)) + .setEnabled(eq(user.principal()), eq(request.enabled()), eq(request.getRefreshPolicy()), any(ActionListener.class)); + } + + public void testUserModifyingThemselves() { + final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); + ThreadPool threadPool = mock(ThreadPool.class); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + Authentication authentication = mock(Authentication.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); + threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + when(authentication.getRunAsUser()).thenReturn(user); + + NativeUsersStore usersStore = mock(NativeUsersStore.class); + SetEnabledRequest request = new SetEnabledRequest(); + request.username(user.principal()); + request.enabled(randomBoolean()); + request.setRefreshPolicy(randomFrom(RefreshPolicy.values())); + TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, + mock(TransportService.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(request, new ActionListener() { + @Override + public void onResponse(SetEnabledResponse setEnabledResponse) { + responseRef.set(setEnabledResponse); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), instanceOf(IllegalArgumentException.class)); + assertThat(throwableRef.get().getMessage(), containsString("own account")); + verifyZeroInteractions(usersStore); + } +} diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditLevelTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditLevelTests.java new file mode 100644 index 00000000000..9655e1f09d8 --- /dev/null +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditLevelTests.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.audit; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.audit.AuditLevel; + +import java.util.Collections; +import java.util.EnumSet; +import java.util.Locale; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class AuditLevelTests extends ESTestCase { + public void testAllIndexAuditLevel() { + EnumSet enumSet = AuditLevel.parse(Collections.singletonList("_all")); + AuditLevel[] levels = AuditLevel.values(); + assertThat(enumSet.size(), is(levels.length)); + for (AuditLevel level : levels) { + assertThat(enumSet.contains(level), is(true)); + } + } + + public void testExcludeHasPreference() { + EnumSet enumSet = AuditLevel.parse(Collections.singletonList("_all"), Collections.singletonList("_all")); + assertThat(enumSet.size(), is(0)); + } + + public void testExcludeHasPreferenceSingle() { + String excluded = randomFrom(AuditLevel.values()).toString().toLowerCase(Locale.ROOT); + EnumSet enumSet = AuditLevel.parse(Collections.singletonList("_all"), Collections.singletonList(excluded)); + EnumSet expected = EnumSet.allOf(AuditLevel.class); + expected.remove(AuditLevel.valueOf(excluded.toUpperCase(Locale.ROOT))); + assertThat(enumSet, equalTo(expected)); + } +} diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditLevelTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditLevelTests.java deleted file mode 100644 index 96b38f2ae10..00000000000 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditLevelTests.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.audit.index; - -import org.elasticsearch.test.ESTestCase; - -import java.util.Collections; -import java.util.EnumSet; -import java.util.Locale; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -public class IndexAuditLevelTests extends ESTestCase { - public void testAllIndexAuditLevel() { - EnumSet enumSet = IndexAuditLevel.parse(Collections.singletonList("_all")); - IndexAuditLevel[] levels = IndexAuditLevel.values(); - assertThat(enumSet.size(), is(levels.length)); - for (IndexAuditLevel level : levels) { - assertThat(enumSet.contains(level), is(true)); - } - } - - public void testExcludeHasPreference() { - EnumSet enumSet = IndexAuditLevel.parse(Collections.singletonList("_all"), Collections.singletonList("_all")); - assertThat(enumSet.size(), is(0)); - } - - public void testExcludeHasPreferenceSingle() { - String excluded = randomFrom(IndexAuditLevel.values()).toString().toLowerCase(Locale.ROOT); - EnumSet enumSet = IndexAuditLevel.parse(Collections.singletonList("_all"), Collections.singletonList(excluded)); - EnumSet expected = EnumSet.allOf(IndexAuditLevel.class); - expected.remove(IndexAuditLevel.valueOf(excluded.toUpperCase(Locale.ROOT))); - assertThat(enumSet, equalTo(expected)); - } -} diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java index a228791c25a..9614a9f6019 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java @@ -113,11 +113,6 @@ public class IndexAuditTrailMutedTests extends ESTestCase { TransportMessage message = mock(TransportMessage.class); AuthenticationToken token = mock(AuthenticationToken.class); - // with realm - auditTrail.authenticationFailed(randomAsciiOfLengthBetween(2, 10), token, "_action", message); - assertThat(messageEnqueued.get(), is(false)); - assertThat(clientCalled.get(), is(false)); - // without realm auditTrail.authenticationFailed(token, "_action", message); assertThat(messageEnqueued.get(), is(false)); @@ -136,11 +131,6 @@ public class IndexAuditTrailMutedTests extends ESTestCase { RestRequest restRequest = mock(RestRequest.class); AuthenticationToken token = mock(AuthenticationToken.class); - // with realm - auditTrail.authenticationFailed(randomAsciiOfLengthBetween(2, 10), token, restRequest); - assertThat(messageEnqueued.get(), is(false)); - assertThat(clientCalled.get(), is(false)); - // without the realm auditTrail.authenticationFailed(token, restRequest); assertThat(messageEnqueued.get(), is(false)); @@ -150,7 +140,32 @@ public class IndexAuditTrailMutedTests extends ESTestCase { auditTrail.authenticationFailed(restRequest); assertThat(messageEnqueued.get(), is(false)); assertThat(clientCalled.get(), is(false)); - + + verifyZeroInteractions(token, restRequest); + } + + public void testAuthenticationFailedRealmMutedTransport() { + createAuditTrail(new String[] { "realm_authentication_failed" }); + TransportMessage message = mock(TransportMessage.class); + AuthenticationToken token = mock(AuthenticationToken.class); + + // with realm + auditTrail.authenticationFailed(randomAsciiOfLengthBetween(2, 10), token, "_action", message); + assertThat(messageEnqueued.get(), is(false)); + assertThat(clientCalled.get(), is(false)); + + verifyZeroInteractions(token, message); + } + + public void testAuthenticationFailedRealmMutedRest() { + createAuditTrail(new String[]{"realm_authentication_failed"}); + RestRequest restRequest = mock(RestRequest.class); + AuthenticationToken token = mock(AuthenticationToken.class); + + // with realm + auditTrail.authenticationFailed(randomAsciiOfLengthBetween(2, 10), token, restRequest); + assertThat(messageEnqueued.get(), is(false)); + assertThat(clientCalled.get(), is(false)); verifyZeroInteractions(token, restRequest); } diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java index b5baf410824..2cfff86159d 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java @@ -96,6 +96,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { private int numShards; private int numReplicas; private ThreadPool threadPool; + private boolean includeRequestBody; @BeforeClass public static void configureBeforeClass() { @@ -241,6 +242,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { Settings.Builder builder = Settings.builder(); builder.put(levelSettings(includes, excludes)); builder.put(commonSettings(rollover)); + builder.put("xpack.security.audit.index.events.emit_request_body", includeRequestBody); return builder.build(); } @@ -256,6 +258,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { rollover = randomFrom(HOURLY, DAILY, WEEKLY, MONTHLY); numReplicas = numberOfReplicas(); numShards = numberOfShards(); + includeRequestBody = randomBoolean(); Settings.Builder builder = Settings.builder(); if (remoteIndexing) { builder.put(remoteSettings); @@ -314,7 +317,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { assertThat(NetworkAddress.format(InetAddress.getLoopbackAddress()), equalTo(sourceMap.get("origin_address"))); assertThat("_uri", equalTo(sourceMap.get("uri"))); assertThat(sourceMap.get("origin_type"), is("rest")); - assertThat(sourceMap.get("request_body"), notNullValue()); + assertRequestBody(sourceMap); } public void testAuthenticationFailedTransport() throws Exception { @@ -373,7 +376,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { assertThat("127.0.0.1", equalTo(sourceMap.get("origin_address"))); assertThat("_uri", equalTo(sourceMap.get("uri"))); assertThat(sourceMap.get("origin_type"), is("rest")); - assertThat(sourceMap.get("request_body"), notNullValue()); + assertRequestBody(sourceMap); } public void testAuthenticationFailedRestNoToken() throws Exception { @@ -388,7 +391,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { assertThat("127.0.0.1", equalTo(sourceMap.get("origin_address"))); assertThat("_uri", equalTo(sourceMap.get("uri"))); assertThat(sourceMap.get("origin_type"), is("rest")); - assertThat(sourceMap.get("request_body"), notNullValue()); + assertRequestBody(sourceMap); } public void testAuthenticationFailedTransportRealm() throws Exception { @@ -429,7 +432,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { assertThat("_uri", equalTo(sourceMap.get("uri"))); assertEquals("_realm", sourceMap.get("realm")); assertThat(sourceMap.get("origin_type"), is("rest")); - assertThat(sourceMap.get("request_body"), notNullValue()); + assertRequestBody(sourceMap); } public void testAccessGranted() throws Exception { @@ -520,7 +523,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { assertThat("127.0.0.1", equalTo(sourceMap.get("origin_address"))); assertThat("_uri", equalTo(sourceMap.get("uri"))); assertThat(sourceMap.get("origin_type"), is("rest")); - assertThat(sourceMap.get("request_body"), notNullValue()); + assertRequestBody(sourceMap); } public void testTamperedRequest() throws Exception { @@ -638,6 +641,13 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { assertEquals(type, sourceMap.get("event_type")); } + private void assertRequestBody(Map sourceMap) { + if (includeRequestBody) { + assertThat(sourceMap.get("request_body"), notNullValue()); + } else { + assertThat(sourceMap.get("request_body"), nullValue()); + } + } private static class LocalHostMockMessage extends TransportMessage { LocalHostMockMessage() { remoteAddress(new LocalTransportAddress("local_host")); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/CapturingLogger.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/CapturingLogger.java index 9353e46f154..21f6bc7c77c 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/CapturingLogger.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/CapturingLogger.java @@ -14,7 +14,6 @@ import org.apache.logging.log4j.core.appender.AbstractAppender; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; import org.apache.logging.log4j.core.filter.RegexFilter; -import org.apache.logging.log4j.core.impl.MutableLogEvent; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.TestLoggers; diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 0d0f0a159a1..8c277524588 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -105,53 +105,45 @@ public class LoggingAuditTrailTests extends ESTestCase { private DiscoveryNode localNode; private ClusterService clusterService; private ThreadContext threadContext; + private boolean includeRequestBody; @Before public void init() throws Exception { + includeRequestBody = randomBoolean(); settings = Settings.builder() .put("xpack.security.audit.logfile.prefix.emit_node_host_address", randomBoolean()) .put("xpack.security.audit.logfile.prefix.emit_node_host_name", randomBoolean()) .put("xpack.security.audit.logfile.prefix.emit_node_name", randomBoolean()) + .put("xpack.security.audit.logfile.events.emit_request_body", includeRequestBody) .build(); localNode = mock(DiscoveryNode.class); when(localNode.getHostAddress()).thenReturn(LocalTransportAddress.buildUnique().toString()); clusterService = mock(ClusterService.class); when(clusterService.localNode()).thenReturn(localNode); prefix = LoggingAuditTrail.resolvePrefix(settings, localNode); + threadContext = new ThreadContext(Settings.EMPTY); } public void testAnonymousAccessDeniedTransport() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, clusterService.localNode(), threadContext); - auditTrail.anonymousAccessDenied("_action", message); - switch (level.toString()) { - case "ERROR": - assertEmptyLog(logger); - break; - case "WARN": - case "INFO": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.WARN, prefix + "[transport] [anonymous_access_denied]\t" + origins + - ", action=[_action], indices=[" + indices(message) + "]"); - } else { - assertMsg(logger, Level.WARN, prefix + "[transport] [anonymous_access_denied]\t" + origins + ", action=[_action]"); - } - break; - case "DEBUG": - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [anonymous_access_denied]\t" + origins + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [anonymous_access_denied]\t" + origins + - ", action=[_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, clusterService.localNode(), threadContext); + auditTrail.anonymousAccessDenied("_action", message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [anonymous_access_denied]\t" + origins + + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [anonymous_access_denied]\t" + origins + + ", action=[_action], request=[MockMessage]"); } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "anonymous_access_denied").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.anonymousAccessDenied("_action", message); + assertEmptyLog(logger); } public void testAnonymousAccessDeniedRest() throws Exception { @@ -160,359 +152,283 @@ public class LoggingAuditTrailTests extends ESTestCase { when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200)); when(request.uri()).thenReturn("_uri"); String expectedMessage = prepareRestContent(request); - - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.anonymousAccessDenied(request); - switch (level.toString()) { - case "ERROR": - assertEmptyLog(logger); - break; - case "WARN": - case "INFO": - assertMsg(logger, Level.WARN, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri]"); - break; - case "DEBUG": - case "TRACE": - assertMsg(logger, Level.DEBUG, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]"); - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.anonymousAccessDenied(request); + if (includeRequestBody) { + assertMsg(logger, Level.INFO, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" + + NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" + + NetworkAddress.format(address) + "], uri=[_uri]"); } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "anonymous_access_denied").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.anonymousAccessDenied(request); + assertEmptyLog(logger); } public void testAuthenticationFailed() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);; - auditTrail.authenticationFailed(new MockToken(), "_action", message); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.ERROR, prefix + "[transport] [authentication_failed]\t" + origins + - ", principal=[_principal], action=[_action], indices=[" + indices(message) + "]"); - } else { - assertMsg(logger, Level.ERROR, prefix + "[transport] [authentication_failed]\t" + origins + - ", principal=[_principal], action=[_action]"); - } - break; - case "DEBUG": - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [authentication_failed]\t" + origins + - ", principal=[_principal], action=[_action], indices=[" + indices(message) + - "], request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [authentication_failed]\t" + origins + - ", principal=[_principal], action=[_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);; + auditTrail.authenticationFailed(new MockToken(), "_action", message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins + + ", principal=[_principal], action=[_action], indices=[" + indices(message) + + "], request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins + + ", principal=[_principal], action=[_action], request=[MockMessage]"); } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "authentication_failed").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.authenticationFailed(new MockToken(), "_action", message); + assertEmptyLog(logger); } public void testAuthenticationFailedNoToken() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);; - auditTrail.authenticationFailed("_action", message); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.ERROR, prefix + "[transport] [authentication_failed]\t" + origins + - ", action=[_action], indices=[" + indices(message) + "]"); - } else { - assertMsg(logger, Level.ERROR, prefix + "[transport] [authentication_failed]\t" + origins + - ", action=[_action]"); - } - break; - case "DEBUG": - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [authentication_failed]\t" + origins + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [authentication_failed]\t" + origins + - ", action=[_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);; + auditTrail.authenticationFailed("_action", message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins + + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins + + ", action=[_action], request=[MockMessage]"); } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "authentication_failed").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.authenticationFailed("_action", message); + assertEmptyLog(logger); } public void testAuthenticationFailedRest() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - RestRequest request = mock(RestRequest.class); - InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1"); - when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200)); - when(request.uri()).thenReturn("_uri"); - String expectedMessage = prepareRestContent(request); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.authenticationFailed(new MockToken(), request); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - assertMsg(logger, Level.ERROR, prefix + "[rest] [authentication_failed]\torigin_address=[" + - NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]"); - break; - case "DEBUG": - case "TRACE": - assertMsg(logger, Level.DEBUG, prefix + "[rest] [authentication_failed]\torigin_address=[" + - NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" + - expectedMessage + "]"); - } + RestRequest request = mock(RestRequest.class); + InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1"); + when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200)); + when(request.uri()).thenReturn("_uri"); + String expectedMessage = prepareRestContent(request); + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.authenticationFailed(new MockToken(), request); + if (includeRequestBody) { + assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" + + NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" + + expectedMessage + "]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" + + NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]"); } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "authentication_failed").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.authenticationFailed(new MockToken(), request); + assertEmptyLog(logger); } public void testAuthenticationFailedRestNoToken() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - RestRequest request = mock(RestRequest.class); - InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1"); - when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200)); - when(request.uri()).thenReturn("_uri"); - String expectedMessage = prepareRestContent(request); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.authenticationFailed(request); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - assertMsg(logger, Level.ERROR, prefix + "[rest] [authentication_failed]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri]"); - break; - case "DEBUG": - case "TRACE": - assertMsg(logger, Level.DEBUG, prefix + "[rest] [authentication_failed]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]"); - } + RestRequest request = mock(RestRequest.class); + InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1"); + when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200)); + when(request.uri()).thenReturn("_uri"); + String expectedMessage = prepareRestContent(request); + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.authenticationFailed(request); + if (includeRequestBody) { + assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" + + NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" + + NetworkAddress.format(address) + "], uri=[_uri]"); } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "authentication_failed").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.authenticationFailed(request); + assertEmptyLog(logger); } public void testAuthenticationFailedRealm() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);; - auditTrail.authenticationFailed("_realm", new MockToken(), "_action", message); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - case "DEBUG": - assertEmptyLog(logger); - break; - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.TRACE, prefix + "[transport] [authentication_failed]\trealm=[_realm], " + origins + - ", principal=[_principal], action=[_action], indices=[" + indices(message) + "], " + - "request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.TRACE, prefix + "[transport] [authentication_failed]\trealm=[_realm], " + origins + - ", principal=[_principal], action=[_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); + auditTrail.authenticationFailed("_realm", new MockToken(), "_action", message); + assertEmptyLog(logger); + + // test enabled + settings = + Settings.builder().put(settings).put("xpack.security.audit.logfile.events.include", "realm_authentication_failed").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.authenticationFailed("_realm", new MockToken(), "_action", message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [realm_authentication_failed]\trealm=[_realm], " + origins + + ", principal=[_principal], action=[_action], indices=[" + indices(message) + "], " + + "request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [realm_authentication_failed]\trealm=[_realm], " + origins + + ", principal=[_principal], action=[_action], request=[MockMessage]"); } } public void testAuthenticationFailedRealmRest() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - RestRequest request = mock(RestRequest.class); - InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1"); - when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200)); - when(request.uri()).thenReturn("_uri"); - String expectedMessage = prepareRestContent(request); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.authenticationFailed("_realm", new MockToken(), request); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - case "DEBUG": - assertEmptyLog(logger); - break; - case "TRACE": - assertMsg(logger, Level.TRACE, prefix + "[rest] [authentication_failed]\trealm=[_realm], origin_address=[" + - NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" + - expectedMessage + "]"); - } + RestRequest request = mock(RestRequest.class); + InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1"); + when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200)); + when(request.uri()).thenReturn("_uri"); + String expectedMessage = prepareRestContent(request); + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.authenticationFailed("_realm", new MockToken(), request); + assertEmptyLog(logger); + + // test enabled + settings = + Settings.builder().put(settings).put("xpack.security.audit.logfile.events.include", "realm_authentication_failed").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.authenticationFailed("_realm", new MockToken(), request); + if (includeRequestBody) { + assertMsg(logger, Level.INFO, prefix + "[rest] [realm_authentication_failed]\trealm=[_realm], origin_address=[" + + NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" + + expectedMessage + "]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[rest] [realm_authentication_failed]\trealm=[_realm], origin_address=[" + + NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]"); } } public void testAccessGranted() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); - boolean runAs = randomBoolean(); - User user; - if (runAs) { - user = new User("_username", new String[]{"r1"}, - new User("running as", new String[] {"r2"})); - } else { - user = new User("_username", new String[]{"r1"}); - } - String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]"; - auditTrail.accessGranted(user, "_action", message); - switch (level.toString()) { - case "ERROR": - case "WARN": - assertEmptyLog(logger); - break; - case "INFO": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[_action], indices=[" + indices(message) + "]"); - } else { - assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[_action]"); - } - break; - case "DEBUG": - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); + boolean runAs = randomBoolean(); + User user; + if (runAs) { + user = new User("_username", new String[]{"r1"}, + new User("running as", new String[] {"r2"})); + } else { + user = new User("_username", new String[]{"r1"}); } + String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]"; + auditTrail.accessGranted(user, "_action", message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + + ", action=[_action], request=[MockMessage]"); + } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "access_granted").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.accessGranted(user, "_action", message); + assertEmptyLog(logger); } public void testAccessGrantedInternalSystemAction() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); - auditTrail.accessGranted(SystemUser.INSTANCE, "internal:_action", message); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - case "DEBUG": - assertEmptyLog(logger); - break; - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.TRACE, prefix + "[transport] [access_granted]\t" + origins + ", principal=[" + - SystemUser.INSTANCE.principal() - + "], action=[internal:_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.TRACE, prefix + "[transport] [access_granted]\t" + origins + ", principal=[" + - SystemUser.INSTANCE.principal() + "], action=[internal:_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); + auditTrail.accessGranted(SystemUser.INSTANCE, "internal:_action", message); + assertEmptyLog(logger); + + // test enabled + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.include", "system_access_granted").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.accessGranted(SystemUser.INSTANCE, "internal:_action", message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", principal=[" + + SystemUser.INSTANCE.principal() + + "], action=[internal:_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", principal=[" + + SystemUser.INSTANCE.principal() + "], action=[internal:_action], request=[MockMessage]"); } } public void testAccessGrantedInternalSystemActionNonSystemUser() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); - boolean runAs = randomBoolean(); - User user; - if (runAs) { - user = new User("_username", new String[]{"r1"}, - new User("running as", new String[] {"r2"})); - } else { - user = new User("_username", new String[]{"r1"}); - } - String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]"; - auditTrail.accessGranted(user, "internal:_action", message); - switch (level.toString()) { - case "ERROR": - case "WARN": - assertEmptyLog(logger); - break; - case "INFO": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[internal:_action], indices=[" + indices(message) + "]"); - } else { - assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[internal:_action]"); - } - break; - case "DEBUG": - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[internal:_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[internal:_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); + boolean runAs = randomBoolean(); + User user; + if (runAs) { + user = new User("_username", new String[]{"r1"}, + new User("running as", new String[] {"r2"})); + } else { + user = new User("_username", new String[]{"r1"}); } + String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]"; + auditTrail.accessGranted(user, "internal:_action", message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + + ", action=[internal:_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + + ", action=[internal:_action], request=[MockMessage]"); + } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "access_granted").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.accessGranted(user, "internal:_action", message); + assertEmptyLog(logger); } public void testAccessDenied() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); - boolean runAs = randomBoolean(); - User user; - if (runAs) { - user = new User("_username", new String[]{"r1"}, - new User("running as", new String[] {"r2"})); - } else { - user = new User("_username", new String[]{"r1"}); - } - String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]"; - auditTrail.accessDenied(user, "_action", message); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.ERROR, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo + - ", action=[_action], indices=[" + indices(message) + "]"); - } else { - assertMsg(logger, Level.ERROR, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo + - ", action=[_action]"); - } - break; - case "DEBUG": - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo + - ", action=[_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); + boolean runAs = randomBoolean(); + User user; + if (runAs) { + user = new User("_username", new String[]{"r1"}, + new User("running as", new String[] {"r2"})); + } else { + user = new User("_username", new String[]{"r1"}); } + String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]"; + auditTrail.accessDenied(user, "_action", message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo + + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo + + ", action=[_action], request=[MockMessage]"); + } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "access_denied").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.accessDenied(user, "_action", message); + assertEmptyLog(logger); } public void testTamperedRequestRest() throws Exception { @@ -521,58 +437,42 @@ public class LoggingAuditTrailTests extends ESTestCase { when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200)); when(request.uri()).thenReturn("_uri"); String expectedMessage = prepareRestContent(request); - - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.tamperedRequest(request); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - assertMsg(logger, Level.ERROR, prefix + "[rest] [tampered_request]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri]"); - break; - case "DEBUG": - case "TRACE": - assertMsg(logger, Level.DEBUG, prefix + "[rest] [tampered_request]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]"); - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.tamperedRequest(request); + if (includeRequestBody) { + assertMsg(logger, Level.INFO, prefix + "[rest] [tampered_request]\torigin_address=[" + + NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[rest] [tampered_request]\torigin_address=[" + + NetworkAddress.format(address) + "], uri=[_uri]"); } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "tampered_request").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.tamperedRequest(request); + assertEmptyLog(logger); } public void testTamperedRequest() throws Exception { String action = "_action"; - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.tamperedRequest(action, message); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.ERROR, prefix + "[transport] [tampered_request]\t" + origins + - ", action=[_action], indices=[" + indices(message) + "]"); - } else { - assertMsg(logger, Level.ERROR, prefix + "[transport] [tampered_request]\t" + origins + ", action=[_action]"); - } - break; - case "DEBUG": - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [tampered_request]\t" + origins + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [tampered_request]\t" + origins + - ", action=[_action], request=[MockMessage]"); - } - } + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.tamperedRequest(action, message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + + ", action=[_action], request=[MockMessage]"); } + + // test disabled + } public void testTamperedRequestWithUser() throws Exception { @@ -585,137 +485,100 @@ public class LoggingAuditTrailTests extends ESTestCase { user = new User("_username", new String[]{"r1"}); } String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]"; - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.tamperedRequest(user, action, message); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.ERROR, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo + - ", action=[_action], indices=[" + indices(message) + "]"); - } else { - assertMsg(logger, Level.ERROR, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo + - ", action=[_action]"); - } - break; - case "DEBUG": - case "TRACE": - if (message instanceof IndicesRequest) { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); - } else { - assertMsg(logger, Level.DEBUG, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo + - ", action=[_action], request=[MockMessage]"); - } - } + TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.tamperedRequest(user, action, message); + if (message instanceof IndicesRequest) { + assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo + + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + } else { + assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo + + ", action=[_action], request=[MockMessage]"); } + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "tampered_request").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.tamperedRequest(user, action, message); + assertEmptyLog(logger); } public void testConnectionDenied() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - InetAddress inetAddress = InetAddress.getLoopbackAddress(); - SecurityIpFilterRule rule = new SecurityIpFilterRule(false, "_all"); - auditTrail.connectionDenied(inetAddress, "default", rule); - switch (level.toString()) { - case "ERROR": - assertMsg(logger, Level.ERROR, String.format(Locale.ROOT, prefix + - "[ip_filter] [connection_denied]\torigin_address=[%s], transport_profile=[%s], rule=[deny %s]", - NetworkAddress.format(inetAddress), "default", "_all")); - break; - case "WARN": - case "INFO": - case "DEBUG": - case "TRACE": - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + InetAddress inetAddress = InetAddress.getLoopbackAddress(); + SecurityIpFilterRule rule = new SecurityIpFilterRule(false, "_all"); + auditTrail.connectionDenied(inetAddress, "default", rule); + assertMsg(logger, Level.INFO, String.format(Locale.ROOT, prefix + + "[ip_filter] [connection_denied]\torigin_address=[%s], transport_profile=[%s], rule=[deny %s]", + NetworkAddress.format(inetAddress), "default", "_all")); + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "connection_denied").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.connectionDenied(inetAddress, "default", rule); + assertEmptyLog(logger); } public void testConnectionGranted() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - InetAddress inetAddress = InetAddress.getLoopbackAddress(); - SecurityIpFilterRule rule = IPFilter.DEFAULT_PROFILE_ACCEPT_ALL; - auditTrail.connectionGranted(inetAddress, "default", rule); - switch (level.toString()) { - case "ERROR": - case "WARN": - case "INFO": - case "DEBUG": - assertEmptyLog(logger); - break; - case "TRACE": - assertMsg(logger, Level.TRACE, String.format(Locale.ROOT, prefix + "[ip_filter] " + - "[connection_granted]\torigin_address=[%s], transport_profile=[default], rule=[allow default:accept_all]", - NetworkAddress.format(inetAddress))); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + InetAddress inetAddress = InetAddress.getLoopbackAddress(); + SecurityIpFilterRule rule = IPFilter.DEFAULT_PROFILE_ACCEPT_ALL; + auditTrail.connectionGranted(inetAddress, "default", rule); + assertEmptyLog(logger); + + // test enabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.include", "connection_granted").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.connectionGranted(inetAddress, "default", rule); + assertMsg(logger, Level.INFO, String.format(Locale.ROOT, prefix + "[ip_filter] [connection_granted]\torigin_address=[%s], " + + "transport_profile=[default], rule=[allow default:accept_all]", NetworkAddress.format(inetAddress))); } public void testRunAsGranted() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = new MockMessage(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); - User user = new User("_username", new String[]{"r1"}, new User("running as", new String[] {"r2"})); - auditTrail.runAsGranted(user, "_action", message); - switch (level.toString()) { - case "ERROR": - case "WARN": - assertEmptyLog(logger); - break; - case "INFO": - assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_granted]\t" + origins + - ", principal=[_username], run_as_principal=[running as], action=[_action]"); - break; - case "DEBUG": - case "TRACE": - assertMsg(logger, Level.DEBUG, prefix + "[transport] [run_as_granted]\t" + origins + - ", principal=[_username], run_as_principal=[running as], action=[_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = new MockMessage(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); + User user = new User("_username", new String[]{"r1"}, new User("running as", new String[] {"r2"})); + auditTrail.runAsGranted(user, "_action", message); + assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_granted]\t" + origins + + ", principal=[_username], run_as_principal=[running as], action=[_action], request=[MockMessage]"); + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "run_as_granted").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.runAsGranted(user, "_action", message); + assertEmptyLog(logger); } public void testRunAsDenied() throws Exception { - for (Level level : Level.values()) { - threadContext = new ThreadContext(Settings.EMPTY); - Logger logger = CapturingLogger.newCapturingLogger(level); - LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - TransportMessage message = new MockMessage(threadContext); - String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); - User user = new User("_username", new String[]{"r1"}, new User("running as", new String[] {"r2"})); - auditTrail.runAsDenied(user, "_action", message); - switch (level.toString()) { - case "ERROR": - case "WARN": - assertEmptyLog(logger); - break; - case "INFO": - assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_denied]\t" + origins + - ", principal=[_username], run_as_principal=[running as], action=[_action]"); - break; - case "DEBUG": - case "TRACE": - assertMsg(logger, Level.DEBUG, prefix + "[transport] [run_as_denied]\t" + origins + - ", principal=[_username], run_as_principal=[running as], action=[_action], request=[MockMessage]"); - } - } + Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); + LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + TransportMessage message = new MockMessage(threadContext); + String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext); + User user = new User("_username", new String[]{"r1"}, new User("running as", new String[] {"r2"})); + auditTrail.runAsDenied(user, "_action", message); + assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_denied]\t" + origins + + ", principal=[_username], run_as_principal=[running as], action=[_action], request=[MockMessage]"); + + // test disabled + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + settings = Settings.builder().put(settings).put("xpack.security.audit.logfile.events.exclude", "run_as_denied").build(); + auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); + auditTrail.runAsDenied(user, "_action", message); + assertEmptyLog(logger); } public void testOriginAttributes() throws Exception { - threadContext = new ThreadContext(Settings.EMPTY); + MockMessage message = new MockMessage(threadContext); String text = LoggingAuditTrail.originAttributes(message, localNode, threadContext);; InetSocketAddress restAddress = RemoteHostHeader.restRemoteAddress(threadContext); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 64161fc7607..c536ae36b2b 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.xpack.security.crypto.CryptoService; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; -import org.junit.After; import org.junit.Before; import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; @@ -56,21 +55,21 @@ import static org.mockito.Mockito.when; /** - * + * Unit tests for the {@link AuthenticationService} */ public class AuthenticationServiceTests extends ESTestCase { - AuthenticationService service; - TransportMessage message; - RestRequest restRequest; - Realms realms; - Realm firstRealm; - Realm secondRealm; - AuditTrailService auditTrail; - AuthenticationToken token; - CryptoService cryptoService; - ThreadPool threadPool; - ThreadContext threadContext; + private AuthenticationService service; + private TransportMessage message; + private RestRequest restRequest; + private Realms realms; + private Realm firstRealm; + private Realm secondRealm; + private AuditTrailService auditTrail; + private AuthenticationToken token; + private CryptoService cryptoService; + private ThreadPool threadPool; + private ThreadContext threadContext; @Before public void init() throws Exception { @@ -109,12 +108,7 @@ public class AuthenticationServiceTests extends ESTestCase { when(threadPool.getThreadContext()).thenReturn(threadContext); when(cryptoService.sign(any(String.class))).thenReturn("_signed_auth"); service = new AuthenticationService(settings, realms, auditTrail, cryptoService, - new DefaultAuthenticationFailureHandler(), threadPool); - } - - @After - public void resetAnonymous() { - AnonymousUser.initialize(Settings.EMPTY); + new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(settings)); } @SuppressWarnings("unchecked") @@ -268,6 +262,33 @@ public class AuthenticationServiceTests extends ESTestCase { assertThreadContextContainsAuthentication(result); } + public void testAuthenticateTransportDisabledUser() throws Exception { + User user = new User("username", new String[] { "r1", "r2" }, null, null, null, false); + User fallback = randomBoolean() ? SystemUser.INSTANCE : null; + when(firstRealm.token(threadContext)).thenReturn(token); + when(firstRealm.supports(token)).thenReturn(true); + when(firstRealm.authenticate(token)).thenReturn(user); + + ElasticsearchSecurityException e = + expectThrows(ElasticsearchSecurityException.class, () -> service.authenticate("_action", message, fallback)); + verify(auditTrail).authenticationFailed(token, "_action", message); + verifyNoMoreInteractions(auditTrail); + assertAuthenticationException(e); + } + + public void testAuthenticateRestDisabledUser() throws Exception { + User user = new User("username", new String[] { "r1", "r2" }, null, null, null, false); + when(firstRealm.token(threadContext)).thenReturn(token); + when(firstRealm.supports(token)).thenReturn(true); + when(firstRealm.authenticate(token)).thenReturn(user); + + ElasticsearchSecurityException e = + expectThrows(ElasticsearchSecurityException.class, () -> service.authenticate(restRequest)); + verify(auditTrail).authenticationFailed(token, restRequest); + verifyNoMoreInteractions(auditTrail); + assertAuthenticationException(e); + } + public void testAuthenticateTransportSuccess() throws Exception { User user = new User("username", "r1", "r2"); User fallback = randomBoolean() ? SystemUser.INSTANCE : null; @@ -308,7 +329,7 @@ public class AuthenticationServiceTests extends ESTestCase { ThreadContext threadContext1 = new ThreadContext(Settings.EMPTY); when(threadPool.getThreadContext()).thenReturn(threadContext1); service = new AuthenticationService(Settings.EMPTY, realms, auditTrail, cryptoService, - new DefaultAuthenticationFailureHandler(), threadPool); + new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(Settings.EMPTY)); threadContext1.putTransient(Authentication.AUTHENTICATION_KEY, threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); threadContext1.putHeader(Authentication.AUTHENTICATION_KEY, threadContext.getHeader(Authentication.AUTHENTICATION_KEY)); @@ -317,12 +338,11 @@ public class AuthenticationServiceTests extends ESTestCase { verifyZeroInteractions(firstRealm); reset(firstRealm); - // checking authentication from the user header threadContext1 = new ThreadContext(Settings.EMPTY); when(threadPool.getThreadContext()).thenReturn(threadContext1); service = new AuthenticationService(Settings.EMPTY, realms, auditTrail, cryptoService, - new DefaultAuthenticationFailureHandler(), threadPool); + new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(Settings.EMPTY)); threadContext1.putHeader(Authentication.AUTHENTICATION_KEY, threadContext.getHeader(Authentication.AUTHENTICATION_KEY)); when(cryptoService.unsignAndVerify("_signed_auth")).thenReturn(authentication.encode()); @@ -334,7 +354,7 @@ public class AuthenticationServiceTests extends ESTestCase { when(threadPool.getThreadContext()).thenReturn(threadContext1); service = new AuthenticationService(Settings.EMPTY, realms, auditTrail, cryptoService, - new DefaultAuthenticationFailureHandler(), threadPool); + new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(Settings.EMPTY)); Authentication result = service.authenticate("_action", new InternalMessage(), SystemUser.INSTANCE); assertThat(result, notNullValue()); assertThat(result.getUser(), equalTo(user1)); @@ -344,7 +364,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testAuthenticateTransportContextAndHeaderNoSigning() throws Exception { Settings settings = Settings.builder().put(AuthenticationService.SIGN_USER_HEADER.getKey(), false).build(); service = new AuthenticationService(settings, realms, auditTrail, cryptoService, - new DefaultAuthenticationFailureHandler(), threadPool); + new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(Settings.EMPTY)); User user1 = new User("username", "r1", "r2"); when(firstRealm.supports(token)).thenReturn(true); @@ -361,7 +381,7 @@ public class AuthenticationServiceTests extends ESTestCase { ThreadContext threadContext1 = new ThreadContext(Settings.EMPTY); when(threadPool.getThreadContext()).thenReturn(threadContext1); service = new AuthenticationService(Settings.EMPTY, realms, auditTrail, cryptoService, - new DefaultAuthenticationFailureHandler(), threadPool); + new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(Settings.EMPTY)); threadContext1.putTransient(Authentication.AUTHENTICATION_KEY, threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); threadContext1.putHeader(Authentication.AUTHENTICATION_KEY, threadContext.getHeader(Authentication.AUTHENTICATION_KEY)); Authentication ctxAuth = service.authenticate("_action", message1, SystemUser.INSTANCE); @@ -381,7 +401,7 @@ public class AuthenticationServiceTests extends ESTestCase { when(threadPool.getThreadContext()).thenReturn(threadContext1); service = new AuthenticationService(settings, realms, auditTrail, cryptoService, - new DefaultAuthenticationFailureHandler(), threadPool); + new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(Settings.EMPTY)); Authentication result = service.authenticate("_action", new InternalMessage(), SystemUser.INSTANCE); assertThat(result, notNullValue()); assertThat(result.getUser(), equalTo(user1)); @@ -442,15 +462,15 @@ public class AuthenticationServiceTests extends ESTestCase { builder.put(AnonymousUser.USERNAME_SETTING.getKey(), username); } Settings settings = builder.build(); - AnonymousUser.initialize(settings); + final AnonymousUser anonymousUser = new AnonymousUser(settings); service = new AuthenticationService(settings, realms, auditTrail, cryptoService, new DefaultAuthenticationFailureHandler(), - threadPool); + threadPool, anonymousUser); RestRequest request = new FakeRestRequest(); Authentication result = service.authenticate(request); assertThat(result, notNullValue()); - assertThat(result.getUser(), sameInstance((Object) AnonymousUser.INSTANCE)); + assertThat(result.getUser(), sameInstance((Object) anonymousUser)); assertThreadContextContainsAuthentication(result); } @@ -458,14 +478,14 @@ public class AuthenticationServiceTests extends ESTestCase { Settings settings = Settings.builder() .putArray(AnonymousUser.ROLES_SETTING.getKey(), "r1", "r2", "r3") .build(); - AnonymousUser.initialize(settings); + final AnonymousUser anonymousUser = new AnonymousUser(settings); service = new AuthenticationService(settings, realms, auditTrail, cryptoService, - new DefaultAuthenticationFailureHandler(), threadPool); + new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); InternalMessage message = new InternalMessage(); Authentication result = service.authenticate("_action", message, null); assertThat(result, notNullValue()); - assertThat(result.getUser(), sameInstance(AnonymousUser.INSTANCE)); + assertThat(result.getUser(), sameInstance(anonymousUser)); assertThreadContextContainsAuthentication(result); } @@ -473,9 +493,9 @@ public class AuthenticationServiceTests extends ESTestCase { Settings settings = Settings.builder() .putArray(AnonymousUser.ROLES_SETTING.getKey(), "r1", "r2", "r3") .build(); - AnonymousUser.initialize(settings); + final AnonymousUser anonymousUser = new AnonymousUser(settings); service = new AuthenticationService(settings, realms, auditTrail, cryptoService, - new DefaultAuthenticationFailureHandler(), threadPool); + new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); InternalMessage message = new InternalMessage(); @@ -688,6 +708,40 @@ public class AuthenticationServiceTests extends ESTestCase { } } + public void testAuthenticateTransportDisabledRunAsUser() throws Exception { + AuthenticationToken token = mock(AuthenticationToken.class); + threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, "run_as"); + when(secondRealm.token(threadContext)).thenReturn(token); + when(secondRealm.supports(token)).thenReturn(true); + when(secondRealm.authenticate(token)).thenReturn(new User("lookup user", new String[]{"user"})); + when(secondRealm.lookupUser("run_as")) + .thenReturn(new User("looked up user", new String[]{"some role"}, null, null, null, false)); + when(secondRealm.userLookupSupported()).thenReturn(true); + User fallback = randomBoolean() ? SystemUser.INSTANCE : null; + ElasticsearchSecurityException e = + expectThrows(ElasticsearchSecurityException.class, () -> service.authenticate("_action", message, fallback)); + verify(auditTrail).authenticationFailed(token, "_action", message); + verifyNoMoreInteractions(auditTrail); + assertAuthenticationException(e); + } + + public void testAuthenticateRestDisabledRunAsUser() throws Exception { + AuthenticationToken token = mock(AuthenticationToken.class); + threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, "run_as"); + when(secondRealm.token(threadContext)).thenReturn(token); + when(secondRealm.supports(token)).thenReturn(true); + when(secondRealm.authenticate(token)).thenReturn(new User("lookup user", new String[]{"user"})); + when(secondRealm.lookupUser("run_as")) + .thenReturn(new User("looked up user", new String[]{"some role"}, null, null, null, false)); + when(secondRealm.userLookupSupported()).thenReturn(true); + + ElasticsearchSecurityException e = + expectThrows(ElasticsearchSecurityException.class, () -> service.authenticate(restRequest)); + verify(auditTrail).authenticationFailed(token, restRequest); + verifyNoMoreInteractions(auditTrail); + assertAuthenticationException(e); + } + private static class InternalMessage extends TransportMessage { } diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java index 4081d056530..6ac4a3fe74d 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java @@ -39,8 +39,6 @@ public class RunAsIntegTests extends SecurityIntegTestCase { static final String RUN_AS_USER = "run_as_user"; static final String TRANSPORT_CLIENT_USER = "transport_user"; static final String ROLES = - "transport_client:\n" + - " cluster: [ 'cluster:monitor/nodes/liveness' ]\n" + "run_as_role:\n" + " run_as: [ '" + SecuritySettingsSource.DEFAULT_USER_NAME + "', 'idontexist' ]\n"; diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 3d162918cfa..5f142a40165 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -599,4 +599,27 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { assertThat(usage.get("fls"), is(fls)); assertThat(usage.get("dls"), is(dls)); } + + public void testSetEnabled() throws Exception { + securityClient().preparePutUser("joe", "s3krit".toCharArray(), SecuritySettingsSource.DEFAULT_ROLE).get(); + final String token = basicAuthHeaderValue("joe", new SecuredString("s3krit".toCharArray())); + ClusterHealthResponse response = client().filterWithHeader(Collections.singletonMap("Authorization", token)) + .admin().cluster().prepareHealth().get(); + assertThat(response.isTimedOut(), is(false)); + + securityClient(client()).prepareSetEnabled("joe", false).get(); + + ElasticsearchSecurityException expected = expectThrows(ElasticsearchSecurityException.class, + () -> client().filterWithHeader(Collections.singletonMap("Authorization", token)).admin().cluster().prepareHealth().get()); + assertThat(expected.status(), is(RestStatus.UNAUTHORIZED)); + + securityClient(client()).prepareSetEnabled("joe", true).get(); + + response = client().filterWithHeader(Collections.singletonMap("Authorization", token)).admin().cluster().prepareHealth().get(); + assertThat(response.isTimedOut(), is(false)); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> securityClient(client()).prepareSetEnabled("not_a_real_user", false).get()); + assertThat(e.getMessage(), containsString("only existing users can be disabled")); + } } diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java index 0c842875f34..14b91156f12 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java @@ -74,4 +74,35 @@ public class ReservedRealmIntegTests extends NativeRealmIntegTestCase { .get(); assertThat(healthResponse.getClusterName(), is(cluster().getClusterName())); } + + public void testDisablingUser() throws Exception { + // validate the user works + ClusterHealthResponse response = client() + .filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(ElasticUser.NAME, DEFAULT_PASSWORD))) + .admin() + .cluster() + .prepareHealth() + .get(); + assertThat(response.getClusterName(), is(cluster().getClusterName())); + + // disable user + securityClient().prepareSetEnabled(ElasticUser.NAME, false).get(); + ElasticsearchSecurityException elasticsearchSecurityException = expectThrows(ElasticsearchSecurityException.class, () -> client() + .filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(ElasticUser.NAME, DEFAULT_PASSWORD))) + .admin() + .cluster() + .prepareHealth() + .get()); + assertThat(elasticsearchSecurityException.getMessage(), containsString("authenticate")); + + //enable + securityClient().prepareSetEnabled(ElasticUser.NAME, true).get(); + response = client() + .filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(ElasticUser.NAME, DEFAULT_PASSWORD))) + .admin() + .cluster() + .prepareHealth() + .get(); + assertThat(response.getClusterName(), is(cluster().getClusterName())); + } } diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index b84b3cb444f..780aa8742af 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.security.authc.esnative; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ChangeListener; +import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ReservedUserInfo; import org.elasticsearch.xpack.security.authc.support.Hasher; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; @@ -23,8 +23,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.sameInstance; -import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -41,20 +39,19 @@ public class ReservedRealmTests extends ESTestCase { @Before public void setupMocks() { - AnonymousUser.initialize(Settings.EMPTY); usersStore = mock(NativeUsersStore.class); when(usersStore.started()).thenReturn(true); } public void testUserStoreNotStarted() { when(usersStore.started()).thenReturn(false); - final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore); + final ReservedRealm reservedRealm = + new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); final String principal = randomFrom(ElasticUser.NAME, KibanaUser.NAME); ElasticsearchSecurityException expected = expectThrows(ElasticsearchSecurityException.class, () -> reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, DEFAULT_PASSWORD))); assertThat(expected.getMessage(), containsString("failed to authenticate user [" + principal)); - verify(usersStore).addListener(any(ChangeListener.class)); verify(usersStore).started(); verifyNoMoreInteractions(usersStore); } @@ -64,28 +61,29 @@ public class ReservedRealmTests extends ESTestCase { if (securityIndexExists) { when(usersStore.securityIndexExists()).thenReturn(true); } - final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore); - final User expected = randomFrom((User) ElasticUser.INSTANCE, KibanaUser.INSTANCE); + final ReservedRealm reservedRealm = + new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); + final User expected = randomFrom(new ElasticUser(true), new KibanaUser(true)); final String principal = expected.principal(); final User authenticated = reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, DEFAULT_PASSWORD)); - assertThat(authenticated, sameInstance(expected)); - verify(usersStore).addListener(any(ChangeListener.class)); + assertEquals(expected, authenticated); verify(usersStore).started(); verify(usersStore).securityIndexExists(); if (securityIndexExists) { - verify(usersStore).reservedUserPassword(principal); + verify(usersStore).getReservedUserInfo(principal); } verifyNoMoreInteractions(usersStore); } public void testAuthenticationWithStoredPassword() throws Throwable { - final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore); - final User expectedUser = randomFrom((User) ElasticUser.INSTANCE, KibanaUser.INSTANCE); + final ReservedRealm reservedRealm = + new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); final String principal = expectedUser.principal(); final SecuredString newPassword = new SecuredString("foobar".toCharArray()); when(usersStore.securityIndexExists()).thenReturn(true); - when(usersStore.reservedUserPassword(principal)).thenReturn(Hasher.BCRYPT.hash(newPassword)); + when(usersStore.getReservedUserInfo(principal)).thenReturn(new ReservedUserInfo(Hasher.BCRYPT.hash(newPassword), true)); // test default password ElasticsearchSecurityException expected = expectThrows(ElasticsearchSecurityException.class, @@ -93,52 +91,75 @@ public class ReservedRealmTests extends ESTestCase { assertThat(expected.getMessage(), containsString("failed to authenticate user [" + principal)); // the realm assumes it owns the hashed password so it fills it with 0's - when(usersStore.reservedUserPassword(principal)).thenReturn(Hasher.BCRYPT.hash(newPassword)); + when(usersStore.getReservedUserInfo(principal)).thenReturn(new ReservedUserInfo(Hasher.BCRYPT.hash(newPassword), true)); // test new password final User authenticated = reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, newPassword)); - assertThat(authenticated, sameInstance(expectedUser)); - verify(usersStore).addListener(any(ChangeListener.class)); + assertEquals(expectedUser, authenticated); verify(usersStore, times(2)).started(); verify(usersStore, times(2)).securityIndexExists(); - verify(usersStore, times(2)).reservedUserPassword(principal); + verify(usersStore, times(2)).getReservedUserInfo(principal); verifyNoMoreInteractions(usersStore); } - public void testLookup() { - final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore); - final User expectedUser = randomFrom((User) ElasticUser.INSTANCE, KibanaUser.INSTANCE); + public void testLookup() throws Exception { + final ReservedRealm reservedRealm = + new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); final String principal = expectedUser.principal(); final User user = reservedRealm.doLookupUser(principal); - assertThat(user, sameInstance(expectedUser)); - verify(usersStore).addListener(any(ChangeListener.class)); - verifyNoMoreInteractions(usersStore); + assertEquals(expectedUser, user); + verify(usersStore).started(); + verify(usersStore).securityIndexExists(); final User doesntExist = reservedRealm.doLookupUser("foobar"); assertThat(doesntExist, nullValue()); + verifyNoMoreInteractions(usersStore); } - public void testHelperMethods() { - final User expectedUser = randomFrom((User) ElasticUser.INSTANCE, KibanaUser.INSTANCE); + public void testLookupThrows() throws Exception { + final ReservedRealm reservedRealm = + new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); final String principal = expectedUser.principal(); - assertThat(ReservedRealm.isReserved(principal), is(true)); - assertThat(ReservedRealm.getUser(principal), sameInstance(expectedUser)); + when(usersStore.securityIndexExists()).thenReturn(true); + final RuntimeException e = new RuntimeException("store threw"); + when(usersStore.getReservedUserInfo(principal)).thenThrow(e); + + ElasticsearchSecurityException securityException = + expectThrows(ElasticsearchSecurityException.class, () -> reservedRealm.lookupUser(principal)); + assertThat(securityException.getMessage(), containsString("failed to lookup")); + + verify(usersStore).started(); + verify(usersStore).securityIndexExists(); + verify(usersStore).getReservedUserInfo(principal); + verifyNoMoreInteractions(usersStore); + } + + public void testIsReserved() { + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); + final String principal = expectedUser.principal(); + assertThat(ReservedRealm.isReserved(principal, Settings.EMPTY), is(true)); final String notExpected = randomFrom("foobar", "", randomAsciiOfLengthBetween(1, 30)); - assertThat(ReservedRealm.isReserved(notExpected), is(false)); - assertThat(ReservedRealm.getUser(notExpected), nullValue()); + assertThat(ReservedRealm.isReserved(notExpected, Settings.EMPTY), is(false)); + } - assertThat(ReservedRealm.users(), containsInAnyOrder((User) ElasticUser.INSTANCE, KibanaUser.INSTANCE)); + public void testGetUsers() { + final ReservedRealm reservedRealm = + new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); + assertThat(reservedRealm.users(), containsInAnyOrder(new ElasticUser(true), new KibanaUser(true))); } public void testFailedAuthentication() { - final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore); + final ReservedRealm reservedRealm = + new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); // maybe cache a successful auth if (randomBoolean()) { User user = reservedRealm.authenticate( new UsernamePasswordToken(ElasticUser.NAME, new SecuredString("changeme".toCharArray()))); - assertThat(user, sameInstance(ElasticUser.INSTANCE)); + assertEquals(new ElasticUser(true), user); } try { diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java index fbd7ed13548..8885588405f 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.security.authc.file; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.core.LogEvent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; @@ -154,7 +153,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { public void testParseFile() throws Exception { Path path = getDataPath("users"); - Map users = FileUserPasswdStore.parseFile(path, null); + Map users = FileUserPasswdStore.parseFile(path, null, Settings.EMPTY); assertThat(users, notNullValue()); assertThat(users.size(), is(6)); assertThat(users.get("bcrypt"), notNullValue()); @@ -174,7 +173,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { public void testParseFile_Empty() throws Exception { Path empty = createTempFile(); Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG); - Map users = FileUserPasswdStore.parseFile(empty, logger); + Map users = FileUserPasswdStore.parseFile(empty, logger, Settings.EMPTY); assertThat(users.isEmpty(), is(true)); List events = CapturingLogger.output(logger.getName(), Level.DEBUG); assertThat(events.size(), is(1)); @@ -184,7 +183,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { public void testParseFile_WhenFileDoesNotExist() throws Exception { Path file = createTempDir().resolve(randomAsciiOfLength(10)); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); - Map users = FileUserPasswdStore.parseFile(file, logger); + Map users = FileUserPasswdStore.parseFile(file, logger, Settings.EMPTY); assertThat(users, notNullValue()); assertThat(users.isEmpty(), is(true)); } @@ -195,7 +194,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); try { - FileUserPasswdStore.parseFile(file, logger); + FileUserPasswdStore.parseFile(file, logger, Settings.EMPTY); fail("expected a parse failure"); } catch (IllegalStateException se) { this.logger.info("expected", se); @@ -205,7 +204,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { public void testParseFile_InvalidLineDoesNotResultInLoggerNPE() throws Exception { Path file = createTempFile(); Files.write(file, Arrays.asList("NotValidUsername=Password", "user:pass"), StandardCharsets.UTF_8); - Map users = FileUserPasswdStore.parseFile(file, null); + Map users = FileUserPasswdStore.parseFile(file, null, Settings.EMPTY); assertThat(users, notNullValue()); assertThat(users.keySet(), hasSize(1)); } @@ -215,7 +214,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { // writing in utf_16 should cause a parsing error as we try to read the file in utf_8 Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); - Map users = FileUserPasswdStore.parseFileLenient(file, logger); + Map users = FileUserPasswdStore.parseFileLenient(file, logger, Settings.EMPTY); assertThat(users, notNullValue()); assertThat(users.isEmpty(), is(true)); List events = CapturingLogger.output(logger.getName(), Level.ERROR); @@ -226,7 +225,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { public void testParseFileWithLineWithEmptyPasswordAndWhitespace() throws Exception { Path file = createTempFile(); Files.write(file, Collections.singletonList("user: "), StandardCharsets.UTF_8); - Map users = FileUserPasswdStore.parseFile(file, null); + Map users = FileUserPasswdStore.parseFile(file, null, Settings.EMPTY); assertThat(users, notNullValue()); assertThat(users.keySet(), is(empty())); } diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/file/tool/UsersToolTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/file/tool/UsersToolTests.java index 1ba3dd7c220..7859030831d 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/file/tool/UsersToolTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authc/file/tool/UsersToolTests.java @@ -175,7 +175,7 @@ public class UsersToolTests extends CommandTestCase { public void testParseInvalidUsername() throws Exception { UserException e = expectThrows(UserException.class, () -> { - UsersTool.parseUsername(Collections.singletonList("$34dkl")); + UsersTool.parseUsername(Collections.singletonList("$34dkl"), Settings.EMPTY); }); assertEquals(ExitCodes.DATA_ERROR, e.exitCode); assertTrue(e.getMessage(), e.getMessage().contains("Invalid username")); @@ -183,7 +183,7 @@ public class UsersToolTests extends CommandTestCase { public void testParseUsernameMissing() throws Exception { UserException e = expectThrows(UserException.class, () -> { - UsersTool.parseUsername(Collections.emptyList()); + UsersTool.parseUsername(Collections.emptyList(), Settings.EMPTY); }); assertEquals(ExitCodes.USAGE, e.exitCode); assertTrue(e.getMessage(), e.getMessage().contains("Missing username argument")); @@ -191,7 +191,7 @@ public class UsersToolTests extends CommandTestCase { public void testParseUsernameExtraArgs() throws Exception { UserException e = expectThrows(UserException.class, () -> { - UsersTool.parseUsername(Arrays.asList("username", "extra")); + UsersTool.parseUsername(Arrays.asList("username", "extra"), Settings.EMPTY); }); assertEquals(ExitCodes.USAGE, e.exitCode); assertTrue(e.getMessage(), e.getMessage().contains("Expected a single username argument")); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index a4c4d57b842..1f0cc5d145c 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -73,7 +73,6 @@ import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.security.user.XPackUser; -import org.junit.After; import org.junit.Before; import java.util.ArrayList; @@ -109,12 +108,7 @@ public class AuthorizationServiceTests extends ESTestCase { when(threadPool.getThreadContext()).thenReturn(threadContext); authorizationService = new AuthorizationService(Settings.EMPTY, rolesStore, clusterService, - auditTrail, new DefaultAuthenticationFailureHandler(), threadPool); - } - - @After - public void resetAnonymous() { - AnonymousUser.initialize(Settings.EMPTY); + auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(Settings.EMPTY)); } public void testActionsSystemUserIsAuthorized() { @@ -352,21 +346,22 @@ public class AuthorizationServiceTests extends ESTestCase { public void testDenialForAnonymousUser() { TransportRequest request = new IndicesExistsRequest("b"); ClusterState state = mock(ClusterState.class); - AnonymousUser.initialize(Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "a_all").build()); - authorizationService = new AuthorizationService(Settings.EMPTY, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(), threadPool); + Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "a_all").build(); + final AnonymousUser anonymousUser = new AnonymousUser(settings); + authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, + new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); when(rolesStore.role("a_all")).thenReturn(Role.builder("a_all").add(IndexPrivilege.ALL, "a").build()); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.EMPTY_META_DATA); try { - authorizationService.authorize(createAuthentication(AnonymousUser.INSTANCE), "indices:a", request); + authorizationService.authorize(createAuthentication(anonymousUser), "indices:a", request); fail("indices request for b should be denied since there is no such index"); } catch (ElasticsearchSecurityException e) { assertAuthorizationException(e, - containsString("action [indices:a] is unauthorized for user [" + AnonymousUser.INSTANCE.principal() + "]")); - verify(auditTrail).accessDenied(AnonymousUser.INSTANCE, "indices:a", request); + containsString("action [indices:a] is unauthorized for user [" + anonymousUser.principal() + "]")); + verify(auditTrail).accessDenied(anonymousUser, "indices:a", request); verifyNoMoreInteractions(auditTrail); verify(clusterService, times(2)).state(); verify(state, times(3)).metaData(); @@ -376,14 +371,13 @@ public class AuthorizationServiceTests extends ESTestCase { public void testDenialForAnonymousUserAuthorizationExceptionDisabled() { TransportRequest request = new IndicesExistsRequest("b"); ClusterState state = mock(ClusterState.class); - AnonymousUser.initialize(Settings.builder() + Settings settings = Settings.builder() .put(AnonymousUser.ROLES_SETTING.getKey(), "a_all") .put(AuthorizationService.ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING.getKey(), false) - .build()); - User anonymousUser = AnonymousUser.INSTANCE; - authorizationService = new AuthorizationService( - Settings.builder().put(AuthorizationService.ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING.getKey(), false).build(), - rolesStore, clusterService, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool); + .build(); + final AnonymousUser anonymousUser = new AnonymousUser(settings); + authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, + new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(settings)); when(rolesStore.role("a_all")).thenReturn(Role.builder("a_all").add(IndexPrivilege.ALL, "a").build()); when(clusterService.state()).thenReturn(state); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java index edfce32992a..29c355e2a02 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java @@ -440,7 +440,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { public void testTemplating() throws Exception { User user = new User("_username", new String[]{"role1", "role2"}, "_full_name", "_email", - Collections.singletonMap("key", "value")); + Collections.singletonMap("key", "value"), true); securityIndexSearcherWrapper = new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, scriptService) { diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SetSecurityUserProcessorTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SetSecurityUserProcessorTests.java index f56c4fbe35f..eb821597b0d 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SetSecurityUserProcessorTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SetSecurityUserProcessorTests.java @@ -25,7 +25,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { public void testProcessor() throws Exception { User user = new User("_username", new String[]{"role1", "role2"}, "firstname lastname", "_email", - Collections.singletonMap("key", "value")); + Collections.singletonMap("key", "value"), true); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); @@ -100,7 +100,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { public void testFullNameProperties() throws Exception { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - User user = new User(null, null, "_full_name", null, null); + User user = new User(null, null, "_full_name", null, null, true); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); @@ -116,7 +116,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { public void testEmailProperties() throws Exception { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - User user = new User(null, null, null, "_email", null); + User user = new User(null, null, null, "_email", null, true); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); @@ -132,7 +132,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { public void testMetadataProperties() throws Exception { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - User user = new User(null, null, null, null, Collections.singletonMap("key", "value")); + User user = new User(null, null, null, null, Collections.singletonMap("key", "value"), true); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/indicesresolver/DefaultIndicesResolverTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/indicesresolver/DefaultIndicesResolverTests.java index ae66de55286..87ce3146a0d 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/indicesresolver/DefaultIndicesResolverTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/indicesresolver/DefaultIndicesResolverTests.java @@ -32,6 +32,10 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.security.SecurityTemplateService; +import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.user.AnonymousUser; +import org.elasticsearch.xpack.security.user.User; +import org.elasticsearch.xpack.security.user.XPackUser; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.DefaultAuthenticationFailureHandler; import org.elasticsearch.xpack.security.authz.AuthorizationService; @@ -39,9 +43,6 @@ import org.elasticsearch.xpack.security.authz.permission.Role; import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; -import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; -import org.elasticsearch.xpack.security.user.User; -import org.elasticsearch.xpack.security.user.XPackUser; import org.junit.Before; import java.util.Set; @@ -102,7 +103,8 @@ public class DefaultIndicesResolverTests extends ESTestCase { when(state.metaData()).thenReturn(metaData); AuthorizationService authzService = new AuthorizationService(settings, rolesStore, clusterService, - mock(AuditTrailService.class), new DefaultAuthenticationFailureHandler(), mock(ThreadPool.class)); + mock(AuditTrailService.class), new DefaultAuthenticationFailureHandler(), mock(ThreadPool.class), + new AnonymousUser(settings)); defaultIndicesResolver = new DefaultIndicesAndAliasesResolver(authzService, indexNameExpressionResolver); } diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index b5c754151dd..45f56ed63c5 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.security.authz.store; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.core.LogEvent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.XPackPlugin; @@ -361,14 +360,14 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(roles, hasKey("admin")); - List events = CapturingLogger.output(logger.getName(), Level.WARN); + List events = CapturingLogger.output(logger.getName(), Level.ERROR); assertThat(events, notNullValue()); assertThat(events, hasSize(4)); // the system role will always be checked first - assertThat(events.get(0), containsString("role [_system] is reserved")); - assertThat(events.get(1), containsString("role [superuser] is reserved")); - assertThat(events.get(2), containsString("role [kibana] is reserved")); - assertThat(events.get(3), containsString("role [transport_client] is reserved")); + assertThat(events.get(0), containsString("Role [_system] is reserved")); + assertThat(events.get(1), containsString("Role [superuser] is reserved")); + assertThat(events.get(2), containsString("Role [kibana] is reserved")); + assertThat(events.get(3), containsString("Role [transport_client] is reserved")); } public void testUsageStats() throws Exception { diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java index bffce016af9..0036f141872 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java @@ -45,7 +45,7 @@ public class ReservedRolesStoreTests extends ESTestCase { public void testRetrievingReservedRolesNonKibanaUser() { if (randomBoolean()) { - when(securityContext.getUser()).thenReturn(ElasticUser.INSTANCE); + when(securityContext.getUser()).thenReturn(new ElasticUser(true)); } assertThat(reservedRolesStore.role(SuperuserRole.NAME), sameInstance(SuperuserRole.INSTANCE)); @@ -77,7 +77,7 @@ public class ReservedRolesStoreTests extends ESTestCase { } public void testRetrievingReservedRoleKibanaUser() { - when(securityContext.getUser()).thenReturn(KibanaUser.INSTANCE); + when(securityContext.getUser()).thenReturn(new KibanaUser(true)); assertThat(reservedRolesStore.role(SuperuserRole.NAME), sameInstance(SuperuserRole.INSTANCE)); assertThat(reservedRolesStore.roleDescriptor(SuperuserRole.NAME), sameInstance(SuperuserRole.DESCRIPTOR)); diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/support/ValidationTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/support/ValidationTests.java index f82b779bfbd..05a1128b7c6 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/support/ValidationTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/support/ValidationTests.java @@ -5,11 +5,17 @@ */ package org.elasticsearch.xpack.security.support; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.support.Validation.Error; import org.elasticsearch.xpack.security.support.Validation.Users; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.user.ElasticUser; +import org.elasticsearch.xpack.security.user.KibanaUser; import java.util.Arrays; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -50,7 +56,14 @@ public class ValidationTests extends ESTestCase { public void testUsersValidateUsername() throws Exception { int length = randomIntBetween(1, 30); String name = new String(generateValidName(length)); - assertThat(Users.validateUsername(name), nullValue()); + assertThat(Users.validateUsername(name, false, Settings.EMPTY), nullValue()); + } + + public void testReservedUsernames() { + final String username = randomFrom(ElasticUser.NAME, KibanaUser.NAME); + final Error error = Users.validateUsername(username, false, Settings.EMPTY); + assertNotNull(error); + assertThat(error.toString(), containsString("is reserved")); } public void testUsersValidateUsernameInvalidLength() throws Exception { @@ -59,13 +72,13 @@ public class ValidationTests extends ESTestCase { if (length > 0) { name = generateValidName(length); } - assertThat(Users.validateUsername(new String(name)), notNullValue()); + assertThat(Users.validateUsername(new String(name), false, Settings.EMPTY), notNullValue()); } public void testUsersValidateUsernameInvalidCharacters() throws Exception { int length = randomIntBetween(1, 30); // valid length String name = new String(generateInvalidName(length)); - assertThat(Users.validateUsername(name), notNullValue()); + assertThat(Users.validateUsername(name, false, Settings.EMPTY), notNullValue()); } public void testUsersValidatePassword() throws Exception { @@ -84,19 +97,29 @@ public class ValidationTests extends ESTestCase { assertThat(Validation.Roles.validateRoleName(name), nullValue()); } + public void testReservedRoleName() { + final String rolename = randomFrom(ReservedRolesStore.names()); + final Error error = Validation.Roles.validateRoleName(rolename); + assertNotNull(error); + assertThat(error.toString(), containsString("is reserved")); + + final Error allowed = Validation.Roles.validateRoleName(rolename, true); + assertNull(allowed); + } + public void testRolesValidateRoleNameInvalidLength() throws Exception { int length = frequently() ? randomIntBetween(31, 200) : 0; // invalid length char[] name = new char[length]; if (length > 0) { name = generateValidName(length); } - assertThat(Users.validateUsername(new String(name)), notNullValue()); + assertThat(Users.validateUsername(new String(name), false, Settings.EMPTY), notNullValue()); } public void testRolesValidateRoleNameInvalidCharacters() throws Exception { int length = randomIntBetween(1, 30); // valid length String name = new String(generateInvalidName(length)); - assertThat(Users.validateUsername(name), notNullValue()); + assertThat(Users.validateUsername(name, false, Settings.EMPTY), notNullValue()); } private static char[] generateValidName(int length) { diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserTests.java index f4b85ad83da..f2dd7298d2d 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.user; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.junit.After; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -16,26 +15,19 @@ import static org.hamcrest.Matchers.is; public class AnonymousUserTests extends ESTestCase { - @After - public void resetAnonymous() { - AnonymousUser.initialize(Settings.EMPTY); - } - public void testResolveAnonymousUser() throws Exception { Settings settings = Settings.builder() .put(AnonymousUser.USERNAME_SETTING.getKey(), "anonym1") .putArray(AnonymousUser.ROLES_SETTING.getKey(), "r1", "r2", "r3") .build(); - AnonymousUser.initialize(settings); - User user = AnonymousUser.INSTANCE; + AnonymousUser user = new AnonymousUser(settings); assertThat(user.principal(), equalTo("anonym1")); assertThat(user.roles(), arrayContainingInAnyOrder("r1", "r2", "r3")); settings = Settings.builder() .putArray(AnonymousUser.ROLES_SETTING.getKey(), "r1", "r2", "r3") .build(); - AnonymousUser.initialize(settings); - user = AnonymousUser.INSTANCE; + user = new AnonymousUser(settings); assertThat(user.principal(), equalTo(AnonymousUser.DEFAULT_ANONYMOUS_USERNAME)); assertThat(user.roles(), arrayContainingInAnyOrder("r1", "r2", "r3")); } @@ -44,8 +36,7 @@ public class AnonymousUserTests extends ESTestCase { Settings settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put(AnonymousUser.USERNAME_SETTING.getKey(), "user1").build(); - AnonymousUser.initialize(settings); - assertThat(AnonymousUser.enabled(), is(false)); + assertThat(AnonymousUser.isAnonymousEnabled(settings), is(false)); } public void testAnonymous() throws Exception { @@ -54,24 +45,21 @@ public class AnonymousUserTests extends ESTestCase { settings = Settings.builder().put(settings).put(AnonymousUser.USERNAME_SETTING.getKey(), "anon").build(); } - AnonymousUser.initialize(settings); - User user = AnonymousUser.INSTANCE; - assertThat(AnonymousUser.is(user), is(true)); - assertThat(AnonymousUser.isAnonymousUsername(user.principal()), is(true)); + AnonymousUser user = new AnonymousUser(settings); + assertEquals(user, new AnonymousUser(settings)); + assertThat(AnonymousUser.isAnonymousUsername(user.principal(), settings), is(true)); // make sure check works with serialization BytesStreamOutput output = new BytesStreamOutput(); User.writeTo(user, output); User anonymousSerialized = User.readFrom(output.bytes().streamInput()); - assertThat(AnonymousUser.is(anonymousSerialized), is(true)); + assertEquals(user, anonymousSerialized); - // test with null anonymous - AnonymousUser.initialize(Settings.EMPTY); - assertThat(AnonymousUser.is(null), is(false)); + // test with anonymous disabled if (user.principal().equals(AnonymousUser.DEFAULT_ANONYMOUS_USERNAME)) { - assertThat(AnonymousUser.isAnonymousUsername(user.principal()), is(true)); + assertThat(AnonymousUser.isAnonymousUsername(user.principal(), Settings.EMPTY), is(true)); } else { - assertThat(AnonymousUser.isAnonymousUsername(user.principal()), is(false)); + assertThat(AnonymousUser.isAnonymousUsername(user.principal(), Settings.EMPTY), is(false)); } } } diff --git a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/user/UserTests.java b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/user/UserTests.java index 08f42022328..dbfbef84490 100644 --- a/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/user/UserTests.java +++ b/elasticsearch/x-pack/security/src/test/java/org/elasticsearch/xpack/security/user/UserTests.java @@ -6,14 +6,11 @@ package org.elasticsearch.xpack.security.user; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.xpack.security.support.MetadataUtils; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; import java.util.Collections; -import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -103,7 +100,7 @@ public class UserTests extends ESTestCase { public void testUserToString() throws Exception { User user = new User("u1", "r1"); assertThat(user.toString(), is("User[username=u1,roles=[r1],fullName=null,email=null,metadata={}]")); - user = new User("u1", new String[] { "r1", "r2" }, "user1", "user1@domain.com", Collections.singletonMap("key", "val")); + user = new User("u1", new String[] { "r1", "r2" }, "user1", "user1@domain.com", Collections.singletonMap("key", "val"), true); assertThat(user.toString(), is("User[username=u1,roles=[r1,r2],fullName=user1,email=user1@domain.com,metadata={key=val}]")); user = new User("u1", new String[] {"r1", "r2"}, new User("u2", "r3")); assertThat(user.toString(), is("User[username=u1,roles=[r1,r2],fullName=null,email=null,metadata={},runAs=[User[username=u2," + @@ -112,27 +109,17 @@ public class UserTests extends ESTestCase { public void testReservedUserSerialization() throws Exception { BytesStreamOutput output = new BytesStreamOutput(); - User.writeTo(ElasticUser.INSTANCE, output); + final ElasticUser elasticUser = new ElasticUser(true); + User.writeTo(elasticUser, output); User readFrom = User.readFrom(output.bytes().streamInput()); - assertThat(readFrom, is(sameInstance(ElasticUser.INSTANCE))); + assertEquals(elasticUser, readFrom); + final KibanaUser kibanaUser = new KibanaUser(true); output = new BytesStreamOutput(); - User.writeTo(KibanaUser.INSTANCE, output); + User.writeTo(kibanaUser, output); readFrom = User.readFrom(output.bytes().streamInput()); - assertThat(readFrom, is(sameInstance(KibanaUser.INSTANCE))); - } - - public void testReservedMetadata() throws Exception { - Map validMetadata = Collections.singletonMap("foo", "bar"); - Map invalidMetadata = Collections.singletonMap(MetadataUtils.RESERVED_PREFIX + "foo", "bar"); - - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> - new User("john", Strings.EMPTY_ARRAY, "John Doe", "john@doe.com", invalidMetadata)); - assertThat(exception.getMessage(), containsString("reserved")); - - User user = new User("john", Strings.EMPTY_ARRAY, "John Doe", "john@doe.com", validMetadata); - assertNotNull(user); + assertEquals(kibanaUser, readFrom); } } diff --git a/elasticsearch/x-pack/security/src/test/resources/org/elasticsearch/transport/actions b/elasticsearch/x-pack/security/src/test/resources/org/elasticsearch/transport/actions index f680b281f94..73ffb3b4dfa 100644 --- a/elasticsearch/x-pack/security/src/test/resources/org/elasticsearch/transport/actions +++ b/elasticsearch/x-pack/security/src/test/resources/org/elasticsearch/transport/actions @@ -87,6 +87,7 @@ cluster:admin/xpack/security/user/change_password cluster:admin/xpack/security/user/put cluster:admin/xpack/security/user/delete cluster:admin/xpack/security/user/get +cluster:admin/xpack/security/user/set_enabled cluster:admin/xpack/security/role/put cluster:admin/xpack/security/role/delete cluster:admin/xpack/security/role/get diff --git a/elasticsearch/x-pack/security/src/test/resources/org/elasticsearch/transport/handlers b/elasticsearch/x-pack/security/src/test/resources/org/elasticsearch/transport/handlers index 0de48418d6b..bb3374ee88c 100644 --- a/elasticsearch/x-pack/security/src/test/resources/org/elasticsearch/transport/handlers +++ b/elasticsearch/x-pack/security/src/test/resources/org/elasticsearch/transport/handlers @@ -21,6 +21,7 @@ cluster:admin/xpack/security/user/change_password cluster:admin/xpack/security/user/put cluster:admin/xpack/security/user/delete cluster:admin/xpack/security/user/get +cluster:admin/xpack/security/user/set_enabled indices:admin/analyze[s] indices:admin/cache/clear[n] indices:admin/forcemerge[n] diff --git a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.change_password.json b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.change_password.json index 9b8b4100663..b193284c1e3 100644 --- a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.change_password.json +++ b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.change_password.json @@ -14,8 +14,9 @@ }, "params": { "refresh": { - "type" : "boolean", - "description" : "Refresh the index after performing the operation" + "type" : "enum", + "options": ["true", "false", "wait_for"], + "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." } } }, diff --git a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json index 3a04be73dc2..365d3ba4a5c 100644 --- a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json +++ b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json @@ -14,8 +14,9 @@ }, "params": { "refresh": { - "type" : "boolean", - "description" : "Refresh the index after performing the operation" + "type" : "enum", + "options": ["true", "false", "wait_for"], + "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." } } }, diff --git a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json index 70d3cad0759..4e6c1cc5370 100644 --- a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json +++ b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json @@ -14,8 +14,9 @@ }, "params": { "refresh": { - "type" : "boolean", - "description" : "Refresh the index after performing the operation" + "type" : "enum", + "options": ["true", "false", "wait_for"], + "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." } } }, diff --git a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json new file mode 100644 index 00000000000..75c1d26cd8a --- /dev/null +++ b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json @@ -0,0 +1,25 @@ +{ + "xpack.security.disable_user": { + "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-disable-user.html", + "methods": [ "PUT", "POST" ], + "url": { + "path": "/_xpack/security/user/{username}/_disable", + "paths": [ "/_xpack/security/user/{username}/_disable" ], + "parts": { + "username": { + "type" : "string", + "description" : "The username of the user to disable", + "required" : false + } + }, + "params": { + "refresh": { + "type" : "enum", + "options": ["true", "false", "wait_for"], + "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." + } + } + }, + "body": null + } +} diff --git a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json new file mode 100644 index 00000000000..eaf40c09275 --- /dev/null +++ b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json @@ -0,0 +1,25 @@ +{ + "xpack.security.enable_user": { + "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-enable-user.html", + "methods": [ "PUT", "POST" ], + "url": { + "path": "/_xpack/security/user/{username}/_enable", + "paths": [ "/_xpack/security/user/{username}/_enable" ], + "parts": { + "username": { + "type" : "string", + "description" : "The username of the user to enable", + "required" : false + } + }, + "params": { + "refresh": { + "type" : "enum", + "options": ["true", "false", "wait_for"], + "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." + } + } + }, + "body": null + } +} diff --git a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.put_role.json b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.put_role.json index 93af66619a4..c2d51dc016a 100644 --- a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.put_role.json +++ b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.put_role.json @@ -14,8 +14,9 @@ }, "params": { "refresh": { - "type" : "boolean", - "description" : "Refresh the index after performing the operation" + "type" : "enum", + "options": ["true", "false", "wait_for"], + "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." } } }, diff --git a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.put_user.json b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.put_user.json index c6aa13727f2..a589dd1e61d 100644 --- a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.put_user.json +++ b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/api/xpack.security.put_user.json @@ -14,8 +14,9 @@ }, "params": { "refresh": { - "type" : "boolean", - "description" : "Refresh the index after performing the operation" + "type" : "enum", + "options": ["true", "false", "wait_for"], + "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." } } }, diff --git a/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/test/users/30_enable_disable.yaml b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/test/users/30_enable_disable.yaml new file mode 100644 index 00000000000..1d906f5d003 --- /dev/null +++ b/elasticsearch/x-pack/security/src/test/resources/rest-api-spec/test/users/30_enable_disable.yaml @@ -0,0 +1,124 @@ +--- +setup: + - skip: + features: [headers, catch_unauthorized] + - do: + cluster.health: + wait_for_status: yellow + + - do: + xpack.security.put_user: + username: "joe" + body: > + { + "password": "s3krit", + "roles" : [ "superuser" ] + } + +--- +teardown: + - do: + xpack.security.delete_user: + username: "joe" + ignore: 404 + +--- +"Test disable then enable user": +# check that the user works + - do: + headers: + Authorization: "Basic am9lOnMza3JpdA==" + cluster.health: {} + - match: { timed_out: false } + +# disable the user + - do: + xpack.security.disable_user: + username: "joe" + +# validate user cannot login + - do: + catch: unauthorized + headers: + Authorization: "Basic am9lOnMza3JpdA==" + cluster.health: {} + +# enable the user + - do: + xpack.security.enable_user: + username: "joe" + +# validate that the user can login again + - do: + headers: + Authorization: "Basic am9lOnMza3JpdA==" + cluster.health: {} + - match: { timed_out: false } + +--- +"Test enabling already enabled user": +# check that the user works + - do: + headers: + Authorization: "Basic am9lOnMza3JpdA==" + cluster.health: {} + - match: { timed_out: false } + +# enable the user + - do: + xpack.security.enable_user: + username: "joe" + +# validate that the user still works + - do: + headers: + Authorization: "Basic am9lOnMza3JpdA==" + cluster.health: {} + - match: { timed_out: false } + +--- +"Test disabling already disabled user": +# check that the user works + - do: + headers: + Authorization: "Basic am9lOnMza3JpdA==" + cluster.health: {} + - match: { timed_out: false } + +# disable the user + - do: + xpack.security.disable_user: + username: "joe" + +# validate user cannot login + - do: + catch: unauthorized + headers: + Authorization: "Basic am9lOnMza3JpdA==" + cluster.health: {} + +# disable again + - do: + xpack.security.disable_user: + username: "joe" + + - do: + xpack.security.enable_user: + username: "joe" + +--- +"Test disabling yourself": +# check that the user works + - do: + headers: + Authorization: "Basic am9lOnMza3JpdA==" + cluster.health: {} + - match: { timed_out: false } + +# try to disable yourself + - do: + catch: '/users may not update the enabled status of their own account/' + headers: + Authorization: "Basic am9lOnMza3JpdA==" + xpack.security.disable_user: + username: "joe" diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/XPackPlugin.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/XPackPlugin.java index 9de4e85f5c0..5fff3a3d0c3 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/XPackPlugin.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/XPackPlugin.java @@ -170,7 +170,7 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I this.licensing = new Licensing(settings); this.security = new Security(settings, env, licenseState, sslService); - this.monitoring = new Monitoring(settings, env, licenseState); + this.monitoring = new Monitoring(settings, licenseState); this.watcher = new Watcher(settings); this.graph = new Graph(settings); // Check if the node is a transport client. diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/http/HttpRequest.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/http/HttpRequest.java index 34c76e46272..ceb46a813f8 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/http/HttpRequest.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/http/HttpRequest.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -18,10 +17,10 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.xpack.common.http.auth.HttpAuth; import org.elasticsearch.xpack.common.http.auth.HttpAuthRegistry; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.watcher.support.WatcherUtils; -import org.elasticsearch.xpack.common.http.auth.HttpAuth; import java.io.IOException; import java.io.UnsupportedEncodingException; @@ -31,6 +30,7 @@ import java.net.URLDecoder; import java.net.URLEncoder; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -159,13 +159,15 @@ public class HttpRequest implements ToXContent { builder.field(Field.BODY.getPreferredName(), body); } if (connectionTimeout != null) { - builder.field(Field.CONNECTION_TIMEOUT.getPreferredName(), connectionTimeout); + builder.timeValueField(HttpRequest.Field.CONNECTION_TIMEOUT.getPreferredName(), + HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN.getPreferredName(), connectionTimeout); } if (readTimeout != null) { - builder.field(Field.READ_TIMEOUT.getPreferredName(), readTimeout); + builder.timeValueField(HttpRequest.Field.READ_TIMEOUT.getPreferredName(), + HttpRequest.Field.READ_TIMEOUT_HUMAN.getPreferredName(), readTimeout); } if (proxy != null) { - builder.field(Field.PROXY.getPreferredName(), proxy); + proxy.toXContent(builder, params); } return builder.endObject(); } @@ -194,19 +196,7 @@ public class HttpRequest implements ToXContent { @Override public int hashCode() { - int result = host.hashCode(); - result = 31 * result + port; - result = 31 * result + scheme.hashCode(); - result = 31 * result + method.hashCode(); - result = 31 * result + (path != null ? path.hashCode() : 0); - result = 31 * result + params.hashCode(); - result = 31 * result + headers.hashCode(); - result = 31 * result + (auth != null ? auth.hashCode() : 0); - result = 31 * result + (connectionTimeout != null ? connectionTimeout.hashCode() : 0); - result = 31 * result + (readTimeout != null ? readTimeout.hashCode() : 0); - result = 31 * result + (body != null ? body.hashCode() : 0); - result = 31 * result + (proxy != null ? proxy.hashCode() : 0); - return result; + return Objects.hash(host, port, scheme, method, path, params, headers, auth, connectionTimeout, readTimeout, body, proxy); } @Override @@ -269,19 +259,26 @@ public class HttpRequest implements ToXContent { } } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.AUTH)) { builder.auth(httpAuthRegistry.parse(parser)); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.CONNECTION_TIMEOUT)) { + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.CONNECTION_TIMEOUT)) { + builder.connectionTimeout(TimeValue.timeValueMillis(parser.longValue())); + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN)) { + // Users and 2.x specify the timeout this way try { - builder.connectionTimeout(WatcherDateTimeUtils.parseTimeValue(parser, Field.CONNECTION_TIMEOUT.toString())); + builder.connectionTimeout(WatcherDateTimeUtils.parseTimeValue(parser, + HttpRequest.Field.CONNECTION_TIMEOUT.toString())); } catch (ElasticsearchParseException pe) { - throw new ElasticsearchParseException("could not parse http request. invalid time value for [{}] field", pe, - currentFieldName); + throw new ElasticsearchParseException("could not parse http request template. invalid time value for [{}] field", + pe, currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.READ_TIMEOUT)) { + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.READ_TIMEOUT)) { + builder.readTimeout(TimeValue.timeValueMillis(parser.longValue())); + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.READ_TIMEOUT_HUMAN)) { + // Users and 2.x specify the timeout this way try { - builder.readTimeout(WatcherDateTimeUtils.parseTimeValue(parser, Field.READ_TIMEOUT.toString())); + builder.readTimeout(WatcherDateTimeUtils.parseTimeValue(parser, HttpRequest.Field.READ_TIMEOUT.toString())); } catch (ElasticsearchParseException pe) { - throw new ElasticsearchParseException("could not parse http request. invalid time value for [{}] field", pe, - currentFieldName); + throw new ElasticsearchParseException("could not parse http request template. invalid time value for [{}] field", + pe, currentFieldName); } } else if (token == XContentParser.Token.START_OBJECT) { if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.HEADERS)) { @@ -482,8 +479,10 @@ public class HttpRequest implements ToXContent { ParseField HEADERS = new ParseField("headers"); ParseField AUTH = new ParseField("auth"); ParseField BODY = new ParseField("body"); - ParseField CONNECTION_TIMEOUT = new ParseField("connection_timeout"); - ParseField READ_TIMEOUT = new ParseField("read_timeout"); + ParseField CONNECTION_TIMEOUT = new ParseField("connection_timeout_in_millis"); + ParseField CONNECTION_TIMEOUT_HUMAN = new ParseField("connection_timeout"); + ParseField READ_TIMEOUT = new ParseField("read_timeout_millis"); + ParseField READ_TIMEOUT_HUMAN = new ParseField("read_timeout"); ParseField PROXY = new ParseField("proxy"); ParseField URL = new ParseField("url"); } diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/http/HttpRequestTemplate.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/http/HttpRequestTemplate.java index 240bfc644c2..4a1c6d58950 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/http/HttpRequestTemplate.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/http/HttpRequestTemplate.java @@ -15,11 +15,12 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.xpack.common.http.auth.HttpAuth; import org.elasticsearch.xpack.common.http.auth.HttpAuthRegistry; +import org.elasticsearch.xpack.common.text.TextTemplate; import org.elasticsearch.xpack.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; -import org.elasticsearch.xpack.common.text.TextTemplate; import org.jboss.netty.handler.codec.http.HttpHeaders; import java.io.IOException; @@ -32,8 +33,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; -/** - */ public class HttpRequestTemplate implements ToXContent { private final Scheme scheme; @@ -193,10 +192,12 @@ public class HttpRequestTemplate implements ToXContent { builder.field(HttpRequest.Field.BODY.getPreferredName(), body, params); } if (connectionTimeout != null) { - builder.field(HttpRequest.Field.CONNECTION_TIMEOUT.getPreferredName(), connectionTimeout); + builder.timeValueField(HttpRequest.Field.CONNECTION_TIMEOUT.getPreferredName(), + HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN.getPreferredName(), connectionTimeout); } if (readTimeout != null) { - builder.field(HttpRequest.Field.READ_TIMEOUT.getPreferredName(), readTimeout); + builder.timeValueField(HttpRequest.Field.READ_TIMEOUT.getPreferredName(), + HttpRequest.Field.READ_TIMEOUT_HUMAN.getPreferredName(), readTimeout); } if (proxy != null) { proxy.toXContent(builder, params); @@ -242,6 +243,11 @@ public class HttpRequestTemplate implements ToXContent { return result; } + @Override + public String toString() { + return Strings.toString(this); + } + public static Builder builder(String host, int port) { return new Builder(host, port); } @@ -280,6 +286,9 @@ public class HttpRequestTemplate implements ToXContent { } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.URL)) { builder.fromUrl(parser.text()); } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.CONNECTION_TIMEOUT)) { + builder.connectionTimeout(TimeValue.timeValueMillis(parser.longValue())); + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN)) { + // Users and 2.x specify the timeout this way try { builder.connectionTimeout(WatcherDateTimeUtils.parseTimeValue(parser, HttpRequest.Field.CONNECTION_TIMEOUT.toString())); @@ -288,6 +297,9 @@ public class HttpRequestTemplate implements ToXContent { pe, currentFieldName); } } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.READ_TIMEOUT)) { + builder.readTimeout(TimeValue.timeValueMillis(parser.longValue())); + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.READ_TIMEOUT_HUMAN)) { + // Users and 2.x specify the timeout this way try { builder.readTimeout(WatcherDateTimeUtils.parseTimeValue(parser, HttpRequest.Field.READ_TIMEOUT.toString())); } catch (ElasticsearchParseException pe) { @@ -396,11 +408,7 @@ public class HttpRequestTemplate implements ToXContent { } public Builder path(String path) { - return path(TextTemplate.inline(path)); - } - - public Builder path(TextTemplate.Builder path) { - return path(path.build()); + return path(new TextTemplate(path)); } public Builder path(TextTemplate path) { @@ -413,10 +421,6 @@ public class HttpRequestTemplate implements ToXContent { return this; } - public Builder putParam(String key, TextTemplate.Builder value) { - return putParam(key, value.build()); - } - public Builder putParam(String key, TextTemplate value) { this.params.put(key, value); return this; @@ -427,10 +431,6 @@ public class HttpRequestTemplate implements ToXContent { return this; } - public Builder putHeader(String key, TextTemplate.Builder value) { - return putHeader(key, value.build()); - } - public Builder putHeader(String key, TextTemplate value) { this.headers.put(key, value); return this; @@ -442,11 +442,7 @@ public class HttpRequestTemplate implements ToXContent { } public Builder body(String body) { - return body(TextTemplate.inline(body)); - } - - public Builder body(TextTemplate.Builder body) { - return body(body.build()); + return body(new TextTemplate(body)); } public Builder body(TextTemplate body) { @@ -454,8 +450,8 @@ public class HttpRequestTemplate implements ToXContent { return this; } - public Builder body(XContentBuilder content) { - return body(TextTemplate.inline(content)); + public Builder body(XContentBuilder content) throws IOException { + return body(new TextTemplate(content.string(), content.contentType(), ScriptService.ScriptType.INLINE, null)); } public Builder connectionTimeout(TimeValue timeout) { @@ -492,7 +488,7 @@ public class HttpRequestTemplate implements ToXContent { port = uri.getPort() > 0 ? uri.getPort() : scheme.defaultPort(); host = uri.getHost(); if (Strings.hasLength(uri.getPath())) { - path = TextTemplate.inline(uri.getPath()).build(); + path = new TextTemplate(uri.getPath()); } String rawQuery = uri.getRawQuery(); @@ -500,7 +496,7 @@ public class HttpRequestTemplate implements ToXContent { Map stringParams = new HashMap<>(); RestUtils.decodeQueryString(rawQuery, 0, stringParams); for (Map.Entry entry : stringParams.entrySet()) { - params.put(entry.getKey(), TextTemplate.inline(entry.getValue()).build()); + params.put(entry.getKey(), new TextTemplate(entry.getValue())); } } } catch (URISyntaxException e) { diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java index 7dabf71449b..a3c31fac856 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java @@ -5,58 +5,65 @@ */ package org.elasticsearch.xpack.common.text; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import java.io.IOException; -import java.util.Collections; import java.util.Map; +import java.util.Objects; /** + * Holds a template to be used in many places in a watch as configuration. * + * One liner templates are kept around as just strings and {@link Script} is used for + * parsing/serialization logic for any non inlined templates and/or when templates + * have custom params, lang or content type. */ public class TextTemplate implements ToXContent { - private final String template; - @Nullable private final XContentType contentType; - @Nullable private final ScriptType type; - @Nullable private final Map params; + private final Script script; + private final String inlineTemplate; public TextTemplate(String template) { - this(template, null, null, null); + this.script = null; + this.inlineTemplate = template; } - public TextTemplate(String template, @Nullable XContentType contentType, @Nullable ScriptType type, + public TextTemplate(String template, @Nullable XContentType contentType, ScriptType type, @Nullable Map params) { - this.template = template; - this.contentType = contentType; - this.type = type; - this.params = params; + this.script = new Script(template, type, "mustache", params, contentType); + this.inlineTemplate = null; + } + + public TextTemplate(Script script) { + this.script = script; + this.inlineTemplate = null; + } + + public Script getScript() { + return script; } public String getTemplate() { - return template; + return script != null ? script.getScript() : inlineTemplate; } public XContentType getContentType() { - return contentType; + return script != null ? script.getContentType() : null; } public ScriptType getType() { - return type != null ? type : ScriptType.INLINE; + return script != null ? script.getType(): ScriptType.INLINE; } public Map getParams() { - return params != null ? params : Collections.emptyMap(); + return script != null ? script.getParams(): null; } @Override @@ -65,210 +72,31 @@ public class TextTemplate implements ToXContent { if (o == null || getClass() != o.getClass()) return false; TextTemplate template1 = (TextTemplate) o; - - if (!template.equals(template1.template)) return false; - if (contentType != template1.contentType) return false; - if (type != template1.type) return false; - return !(params != null ? !params.equals(template1.params) : template1.params != null); - + return Objects.equals(script, template1.script) && + Objects.equals(inlineTemplate, template1.inlineTemplate); } @Override public int hashCode() { - int result = template.hashCode(); - result = 31 * result + (contentType != null ? contentType.hashCode() : 0); - result = 31 * result + (type != null ? type.hashCode() : 0); - result = 31 * result + (params != null ? params.hashCode() : 0); - return result; + return Objects.hash(script, inlineTemplate); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (type == null) { - return builder.value(template); + if (script != null) { + script.toXContent(builder, params); + } else { + builder.value(inlineTemplate); } - builder.startObject(); - switch (type) { - case INLINE: - if (contentType != null && builder.contentType() == contentType) { - builder.rawField(Field.INLINE.getPreferredName(), new BytesArray(template)); - } else { - builder.field(Field.INLINE.getPreferredName(), template); - } - break; - case FILE: - builder.field(Field.FILE.getPreferredName(), template); - break; - default: // STORED - assert type == ScriptType.STORED : "template type [" + type + "] is not supported"; - builder.field(Field.ID.getPreferredName(), template); - } - if (this.params != null) { - builder.field(Field.PARAMS.getPreferredName(), this.params); - } - return builder.endObject(); + return builder; } public static TextTemplate parse(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - if (token.isValue()) { - return new TextTemplate(String.valueOf(parser.objectText())); + if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return new TextTemplate(parser.text()); + } else { + return new TextTemplate(Script.parse(parser, ParseFieldMatcher.STRICT, "mustache")); } - if (token != XContentParser.Token.START_OBJECT) { - throw new ElasticsearchParseException("expected a string value or an object, but found [{}] instead", token); - } - - String template = null; - XContentType contentType = null; - ScriptType type = null; - Map params = null; - - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.INLINE)) { - type = ScriptType.INLINE; - if (token.isValue()) { - template = String.valueOf(parser.objectText()); - } else { - contentType = parser.contentType(); - XContentBuilder builder = XContentFactory.contentBuilder(contentType); - template = builder.copyCurrentStructure(parser).bytes().utf8ToString(); - } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.FILE)) { - type = ScriptType.FILE; - if (token == XContentParser.Token.VALUE_STRING) { - template = parser.text(); - } else { - throw new ElasticsearchParseException("expected a string value for field [{}], but found [{}]", - currentFieldName, token); - } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ID)) { - type = ScriptType.STORED; - if (token == XContentParser.Token.VALUE_STRING) { - template = parser.text(); - } else { - throw new ElasticsearchParseException("expected a string value for field [{}], but found [{}]", - currentFieldName, token); - } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PARAMS)) { - if (token == XContentParser.Token.START_OBJECT) { - params = parser.map(); - } else { - throw new ElasticsearchParseException("expected an object for field [{}], but found [{}]", currentFieldName, token); - } - } else { - throw new ElasticsearchParseException("unexpected field [{}]", currentFieldName); - } - } - if (template == null) { - throw new ElasticsearchParseException("expected one of [{}], [{}] or [{}] fields, but found none", - Field.INLINE.getPreferredName(), Field.FILE.getPreferredName(), Field.ID.getPreferredName()); - } - assert type != null : "if template is not null, type should definitely not be null"; - return new TextTemplate(template, contentType, type, params); - } - - public static Builder inline(XContentBuilder template) { - return new Builder.Inline(template.bytes().utf8ToString()).contentType(template.contentType()); - } - - public static Builder inline(String text) { - return new Builder.Inline(text); - } - - public static Builder file(String file) { - return new Builder.File(file); - } - - public static Builder indexed(String id) { - return new Builder.Indexed(id); - } - - public static Builder.DefaultType defaultType(String text) { - return new Builder.DefaultType(text); - } - - public abstract static class Builder { - - protected final ScriptType type; - protected final String template; - protected Map params; - - protected Builder(String template, ScriptType type) { - this.template = template; - this.type = type; - } - - public B params(Map params) { - this.params = params; - return (B) this; - } - - public abstract TextTemplate build(); - - public static class Inline extends Builder { - - private XContentType contentType; - - public Inline(String script) { - super(script, ScriptType.INLINE); - } - - public Inline contentType(XContentType contentType) { - this.contentType = contentType; - return this; - } - - @Override - public TextTemplate build() { - return new TextTemplate(template, contentType, type, params); - } - } - - public static class File extends Builder { - - public File(String file) { - super(file, ScriptType.FILE); - } - - @Override - public TextTemplate build() { - return new TextTemplate(template, null, type, params); - } - } - - public static class Indexed extends Builder { - - public Indexed(String id) { - super(id, ScriptType.STORED); - } - - @Override - public TextTemplate build() { - return new TextTemplate(template, null, type, params); - } - } - - public static class DefaultType extends Builder { - - public DefaultType(String text) { - super(text, null); - } - - @Override - public TextTemplate build() { - return new TextTemplate(template, null, type, params); - } - } - } - - public interface Field { - ParseField INLINE = new ParseField("inline"); - ParseField FILE = new ParseField("file"); - ParseField ID = new ParseField("id"); - ParseField PARAMS = new ParseField("params"); } } diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/text/TextTemplateEngine.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/text/TextTemplateEngine.java index ba2b94230df..fa60aa00759 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/text/TextTemplateEngine.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/common/text/TextTemplateEngine.java @@ -28,18 +28,24 @@ public class TextTemplateEngine extends AbstractComponent { this.service = service; } - // TODO: move over to use o.e.script.Script instead - public String render(TextTemplate template, Map model) { - if (template == null) { + public String render(TextTemplate textTemplate, Map model) { + if (textTemplate == null) { return null; } + String template = textTemplate.getTemplate(); XContentType contentType = detectContentType(template); Map compileParams = compileParams(contentType); - template = trimContentType(template); + template = trimContentType(textTemplate); - CompiledScript compiledScript = service.compile(convert(template, model), Watcher.SCRIPT_CONTEXT, - compileParams); + Map mergedModel = new HashMap<>(); + if (textTemplate.getParams() != null) { + mergedModel.putAll(textTemplate.getParams()); + } + mergedModel.putAll(model); + + Script script = new Script(template, textTemplate.getType(), "mustache", mergedModel, textTemplate.getContentType()); + CompiledScript compiledScript = service.compile(script, Watcher.SCRIPT_CONTEXT, compileParams); ExecutableScript executable = service.executable(compiledScript, model); Object result = executable.run(); if (result instanceof BytesReference) { @@ -48,10 +54,10 @@ public class TextTemplateEngine extends AbstractComponent { return result.toString(); } - private TextTemplate trimContentType(TextTemplate textTemplate) { + private String trimContentType(TextTemplate textTemplate) { String template = textTemplate.getTemplate(); if (!template.startsWith("__")){ - return textTemplate; //Doesn't even start with __ so can't have a content type + return template; //Doesn't even start with __ so can't have a content type } // There must be a __ model) { - Map mergedModel = new HashMap<>(); - mergedModel.putAll(textTemplate.getParams()); - mergedModel.putAll(model); - return new Script(textTemplate.getTemplate(), textTemplate.getType(), "mustache", mergedModel, textTemplate.getContentType()); - } - private Map compileParams(XContentType contentType) { if (contentType == XContentType.JSON) { return Collections.singletonMap("content_type", "application/json"); diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/email/EmailTemplate.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/email/EmailTemplate.java index ff371c7a79b..bdca441cebc 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/email/EmailTemplate.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/email/EmailTemplate.java @@ -21,9 +21,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -/** - * - */ public class EmailTemplate implements ToXContent { final TextTemplate from; @@ -238,11 +235,7 @@ public class EmailTemplate implements ToXContent { } public Builder from(String from) { - return from(TextTemplate.inline(from)); - } - - public Builder from(TextTemplate.Builder from) { - return from(from.build()); + return from(new TextTemplate(from)); } public Builder from(TextTemplate from) { @@ -253,15 +246,7 @@ public class EmailTemplate implements ToXContent { public Builder replyTo(String... replyTo) { TextTemplate[] templates = new TextTemplate[replyTo.length]; for (int i = 0; i < templates.length; i++) { - templates[i] = TextTemplate.defaultType(replyTo[i]).build(); - } - return replyTo(templates); - } - - public Builder replyTo(TextTemplate.Builder... replyTo) { - TextTemplate[] templates = new TextTemplate[replyTo.length]; - for (int i = 0; i < templates.length; i++) { - templates[i] = replyTo[i].build(); + templates[i] = new TextTemplate(replyTo[i]); } return replyTo(templates); } @@ -272,11 +257,7 @@ public class EmailTemplate implements ToXContent { } public Builder priority(Email.Priority priority) { - return priority(TextTemplate.inline(priority.name())); - } - - public Builder priority(TextTemplate.Builder priority) { - return priority(priority.build()); + return priority(new TextTemplate(priority.name())); } public Builder priority(TextTemplate priority) { @@ -287,15 +268,7 @@ public class EmailTemplate implements ToXContent { public Builder to(String... to) { TextTemplate[] templates = new TextTemplate[to.length]; for (int i = 0; i < templates.length; i++) { - templates[i] = TextTemplate.defaultType(to[i]).build(); - } - return to(templates); - } - - public Builder to(TextTemplate.Builder... to) { - TextTemplate[] templates = new TextTemplate[to.length]; - for (int i = 0; i < templates.length; i++) { - templates[i] = to[i].build(); + templates[i] = new TextTemplate(to[i]); } return to(templates); } @@ -308,15 +281,7 @@ public class EmailTemplate implements ToXContent { public Builder cc(String... cc) { TextTemplate[] templates = new TextTemplate[cc.length]; for (int i = 0; i < templates.length; i++) { - templates[i] = TextTemplate.defaultType(cc[i]).build(); - } - return cc(templates); - } - - public Builder cc(TextTemplate.Builder... cc) { - TextTemplate[] templates = new TextTemplate[cc.length]; - for (int i = 0; i < templates.length; i++) { - templates[i] = cc[i].build(); + templates[i] = new TextTemplate(cc[i]); } return cc(templates); } @@ -329,15 +294,7 @@ public class EmailTemplate implements ToXContent { public Builder bcc(String... bcc) { TextTemplate[] templates = new TextTemplate[bcc.length]; for (int i = 0; i < templates.length; i++) { - templates[i] = TextTemplate.defaultType(bcc[i]).build(); - } - return bcc(templates); - } - - public Builder bcc(TextTemplate.Builder... bcc) { - TextTemplate[] templates = new TextTemplate[bcc.length]; - for (int i = 0; i < templates.length; i++) { - templates[i] = bcc[i].build(); + templates[i] = new TextTemplate(bcc[i]); } return bcc(templates); } @@ -348,11 +305,7 @@ public class EmailTemplate implements ToXContent { } public Builder subject(String subject) { - return subject(TextTemplate.defaultType(subject)); - } - - public Builder subject(TextTemplate.Builder subject) { - return subject(subject.build()); + return subject(new TextTemplate(subject)); } public Builder subject(TextTemplate subject) { @@ -361,11 +314,7 @@ public class EmailTemplate implements ToXContent { } public Builder textBody(String text) { - return textBody(TextTemplate.defaultType(text)); - } - - public Builder textBody(TextTemplate.Builder text) { - return textBody(text.build()); + return textBody(new TextTemplate(text)); } public Builder textBody(TextTemplate text) { @@ -374,11 +323,7 @@ public class EmailTemplate implements ToXContent { } public Builder htmlBody(String html) { - return htmlBody(TextTemplate.defaultType(html)); - } - - public Builder htmlBody(TextTemplate.Builder html) { - return htmlBody(html.build()); + return htmlBody(new TextTemplate(html)); } public Builder htmlBody(TextTemplate html) { diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/hipchat/HipChatMessage.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/hipchat/HipChatMessage.java index 0c4e8f08537..61f6feaa204 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/hipchat/HipChatMessage.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/hipchat/HipChatMessage.java @@ -23,9 +23,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; -/** - * - */ public class HipChatMessage implements ToXContent { final String body; @@ -405,7 +402,7 @@ public class HipChatMessage implements ToXContent { public enum Color implements ToXContent { YELLOW, GREEN, RED, PURPLE, GRAY, RANDOM; - private final TextTemplate template = TextTemplate.inline(name()).build(); + private final TextTemplate template = new TextTemplate(name()); public TextTemplate asTemplate() { return template; @@ -453,7 +450,7 @@ public class HipChatMessage implements ToXContent { TEXT, HTML; - private final TextTemplate template = TextTemplate.inline(name()).build(); + private final TextTemplate template = new TextTemplate(name()); public TextTemplate asTemplate() { return template; diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEvent.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEvent.java index 631c75d1aa4..0f1d093e8c4 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEvent.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEvent.java @@ -155,7 +155,7 @@ public class IncidentEvent implements ToXContent { return builder.endObject(); } public static Template.Builder templateBuilder(String description) { - return templateBuilder(TextTemplate.inline(description).build()); + return templateBuilder(new TextTemplate(description)); } public static Template.Builder templateBuilder(TextTemplate description) { diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEventContext.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEventContext.java index 36f1d6db891..472e3216690 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEventContext.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEventContext.java @@ -21,9 +21,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -/** - * - */ public class IncidentEventContext implements ToXContent { enum Type { diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/Attachment.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/Attachment.java index 4641324ba4a..a5b0eedeac6 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/Attachment.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/Attachment.java @@ -20,9 +20,6 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -/** - * - */ public class Attachment implements MessageElement { final String fallback; @@ -460,12 +457,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setFallback(TextTemplate.Builder fallback) { - return setFallback(fallback.build()); - } - public Builder setFallback(String fallback) { - return setFallback(TextTemplate.indexed(fallback)); + return setFallback(new TextTemplate(fallback)); } public Builder setColor(TextTemplate color) { @@ -473,12 +466,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setColor(TextTemplate.Builder color) { - return setColor(color.build()); - } - public Builder setColor(String color) { - return setColor(TextTemplate.inline(color)); + return setColor(new TextTemplate(color)); } public Builder setPretext(TextTemplate pretext) { @@ -486,12 +475,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setPretext(TextTemplate.Builder pretext) { - return setPretext(pretext.build()); - } - public Builder setPretext(String pretext) { - return setPretext(TextTemplate.inline(pretext)); + return setPretext(new TextTemplate(pretext)); } public Builder setAuthorName(TextTemplate authorName) { @@ -499,12 +484,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setAuthorName(TextTemplate.Builder authorName) { - return setAuthorName(authorName.build()); - } - public Builder setAuthorName(String authorName) { - return setAuthorName(TextTemplate.inline(authorName)); + return setAuthorName(new TextTemplate(authorName)); } public Builder setAuthorLink(TextTemplate authorLink) { @@ -512,12 +493,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setAuthorLink(TextTemplate.Builder authorLink) { - return setAuthorLink(authorLink.build()); - } - public Builder setAuthorLink(String authorLink) { - return setAuthorLink(TextTemplate.inline(authorLink)); + return setAuthorLink(new TextTemplate(authorLink)); } public Builder setAuthorIcon(TextTemplate authorIcon) { @@ -525,12 +502,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setAuthorIcon(TextTemplate.Builder authorIcon) { - return setAuthorIcon(authorIcon.build()); - } - public Builder setAuthorIcon(String authorIcon) { - return setAuthorIcon(TextTemplate.inline(authorIcon)); + return setAuthorIcon(new TextTemplate(authorIcon)); } public Builder setTitle(TextTemplate title) { @@ -538,12 +511,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setTitle(TextTemplate.Builder title) { - return setTitle(title.build()); - } - public Builder setTitle(String title) { - return setTitle(TextTemplate.inline(title)); + return setTitle(new TextTemplate(title)); } public Builder setTitleLink(TextTemplate titleLink) { @@ -551,12 +520,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setTitleLink(TextTemplate.Builder titleLink) { - return setTitleLink(titleLink.build()); - } - public Builder setTitleLink(String titleLink) { - return setTitleLink(TextTemplate.inline(titleLink)); + return setTitleLink(new TextTemplate(titleLink)); } public Builder setText(TextTemplate text) { @@ -564,12 +529,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setText(TextTemplate.Builder text) { - return setText(text.build()); - } - public Builder setText(String text) { - return setText(TextTemplate.inline(text)); + return setText(new TextTemplate(text)); } public Builder addField(TextTemplate title, TextTemplate value, boolean isShort) { @@ -577,12 +538,8 @@ public class Attachment implements MessageElement { return this; } - public Builder addField(TextTemplate.Builder title, TextTemplate.Builder value, boolean isShort) { - return addField(title.build(), value.build(), isShort); - } - public Builder addField(String title, String value, boolean isShort) { - return addField(TextTemplate.inline(title), TextTemplate.inline(value), isShort); + return addField(new TextTemplate(title), new TextTemplate(value), isShort); } public Builder setImageUrl(TextTemplate imageUrl) { @@ -590,12 +547,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setImageUrl(TextTemplate.Builder imageUrl) { - return setImageUrl(imageUrl.build()); - } - public Builder setImageUrl(String imageUrl) { - return setImageUrl(TextTemplate.inline(imageUrl)); + return setImageUrl(new TextTemplate(imageUrl)); } public Builder setThumbUrl(TextTemplate thumbUrl) { @@ -603,12 +556,8 @@ public class Attachment implements MessageElement { return this; } - public Builder setThumbUrl(TextTemplate.Builder thumbUrl) { - return setThumbUrl(thumbUrl.build()); - } - public Builder setThumbUrl(String thumbUrl) { - return setThumbUrl(TextTemplate.inline(thumbUrl)); + return setThumbUrl(new TextTemplate(thumbUrl)); } public Template build() { diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/Field.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/Field.java index a5697cc68ed..58e473e7971 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/Field.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/Field.java @@ -17,9 +17,6 @@ import org.elasticsearch.xpack.common.text.TextTemplateEngine; import java.io.IOException; import java.util.Map; -/** - * - */ class Field implements MessageElement { final String title; diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/SlackMessage.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/SlackMessage.java index 4d98df413d8..4515d528e35 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/SlackMessage.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/notification/slack/message/SlackMessage.java @@ -22,9 +22,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -/** - * - */ public class SlackMessage implements MessageElement { final String from; @@ -350,12 +347,8 @@ public class SlackMessage implements MessageElement { return this; } - public Builder setFrom(TextTemplate.Builder from) { - return setFrom(from.build()); - } - public Builder setFrom(String from) { - return setFrom(TextTemplate.inline(from)); + return setFrom(new TextTemplate(from)); } public Builder addTo(TextTemplate... to) { @@ -363,16 +356,9 @@ public class SlackMessage implements MessageElement { return this; } - public Builder addTo(TextTemplate.Builder... to) { - for (TextTemplate.Builder name : to) { - this.to.add(name.build()); - } - return this; - } - public Builder addTo(String... to) { for (String name : to) { - this.to.add(TextTemplate.inline(name).build()); + this.to.add(new TextTemplate(name)); } return this; } @@ -382,12 +368,8 @@ public class SlackMessage implements MessageElement { return this; } - public Builder setText(TextTemplate.Builder text) { - return setText(text.build()); - } - public Builder setText(String text) { - return setText(TextTemplate.inline(text)); + return setText(new TextTemplate(text)); } public Builder setIcon(TextTemplate icon) { @@ -395,12 +377,8 @@ public class SlackMessage implements MessageElement { return this; } - public Builder setIcon(TextTemplate.Builder icon) { - return setIcon(icon.build()); - } - public Builder setIcon(String icon) { - return setIcon(TextTemplate.inline(icon)); + return setIcon(new TextTemplate(icon)); } public Builder addAttachments(Attachment.Template... attachments) { diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/CertUtils.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/CertUtils.java index f2afc01f25d..1ec1121bbb6 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/CertUtils.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/CertUtils.java @@ -264,16 +264,16 @@ public class CertUtils { /** * Generates a CA certificate */ - static X509Certificate generateCACertificate(X500Principal x500Principal, KeyPair keyPair) throws Exception { - return generateSignedCertificate(x500Principal, null, keyPair, null, null, true); + static X509Certificate generateCACertificate(X500Principal x500Principal, KeyPair keyPair, int days) throws Exception { + return generateSignedCertificate(x500Principal, null, keyPair, null, null, true, days); } /** * Generates a signed certificate using the provided CA private key and information from the CA certificate */ static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair, - X509Certificate caCert, PrivateKey caPrivKey) throws Exception { - return generateSignedCertificate(principal, subjectAltNames, keyPair, caCert, caPrivKey, false); + X509Certificate caCert, PrivateKey caPrivKey, int days) throws Exception { + return generateSignedCertificate(principal, subjectAltNames, keyPair, caCert, caPrivKey, false, days); } /** @@ -289,9 +289,15 @@ public class CertUtils { * @throws Exception if an error occurs during the certificate creation */ private static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair, - X509Certificate caCert, PrivateKey caPrivKey, boolean isCa) throws Exception { + X509Certificate caCert, PrivateKey caPrivKey, boolean isCa + + , int days) throws + Exception { final DateTime notBefore = new DateTime(DateTimeZone.UTC); - final DateTime notAfter = notBefore.plusYears(1); + if (days < 1) { + throw new IllegalArgumentException("the certificate must be valid for at least one day"); + } + final DateTime notAfter = notBefore.plusDays(days); final BigInteger serial = CertUtils.getSerial(); JcaX509ExtensionUtils extUtils = new JcaX509ExtensionUtils(); diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/CertificateTool.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/CertificateTool.java index f8f20d32f01..1ce14b5f154 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/CertificateTool.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/CertificateTool.java @@ -67,6 +67,7 @@ public class CertificateTool extends SettingCommand { private static final String DESCRIPTION = "Simplifies certificate creation for use with the Elastic Stack"; private static final String DEFAULT_CSR_FILE = "csr-bundle.zip"; private static final String DEFAULT_CERT_FILE = "certificate-bundle.zip"; + private static final int DEFAULT_DAYS = 3 * 365; private static final int FILE_EXTENSION_LENGTH = 4; static final int MAX_FILENAME_LENGTH = 255 - FILE_EXTENSION_LENGTH; private static final Pattern ALLOWED_FILENAME_CHAR_PATTERN = @@ -95,6 +96,7 @@ public class CertificateTool extends SettingCommand { private final OptionSpec caDnSpec; private final OptionSpec keysizeSpec; private final OptionSpec inputFileSpec; + private final OptionSpec daysSpec; CertificateTool() { super(DESCRIPTION); @@ -114,6 +116,8 @@ public class CertificateTool extends SettingCommand { .withRequiredArg(); keysizeSpec = parser.accepts("keysize", "size in bits of RSA keys").withRequiredArg().ofType(Integer.class); inputFileSpec = parser.accepts("in", "file containing details of the instances in yaml format").withRequiredArg(); + daysSpec = + parser.accepts("days", "number of days that the generated certificates are valid").withRequiredArg().ofType(Integer.class); } public static void main(String[] args) throws Exception { @@ -135,10 +139,11 @@ public class CertificateTool extends SettingCommand { final String dn = options.has(caDnSpec) ? caDnSpec.value(options) : AUTO_GEN_CA_DN; final boolean prompt = options.has(caPasswordSpec); final char[] keyPass = options.hasArgument(caPasswordSpec) ? caPasswordSpec.value(options).toCharArray() : null; - CAInfo caInfo = - getCAInfo(terminal, dn, caCertPathSpec.value(options), caKeyPathSpec.value(options), keyPass, prompt, env, keysize); + final int days = options.hasArgument(daysSpec) ? daysSpec.value(options) : DEFAULT_DAYS; + CAInfo caInfo = getCAInfo(terminal, dn, caCertPathSpec.value(options), caKeyPathSpec.value(options), keyPass, prompt, env, + keysize, days); Collection certificateInformations = getCertificateInformationList(terminal, inputFile, env); - generateAndWriteSignedCertificates(outputFile, certificateInformations, caInfo, keysize); + generateAndWriteSignedCertificates(outputFile, certificateInformations, caInfo, keysize, days); } printConclusion(terminal, csrOnly, outputFile); } @@ -281,12 +286,15 @@ public class CertificateTool extends SettingCommand { * @param dn the distinguished name to use for the CA * @param caCertPath the path to the CA certificate or {@code null} if not provided * @param caKeyPath the path to the CA private key or {@code null} if not provided + * @param prompt whether we should prompt the user for a password * @param keyPass the password to the private key. If not present and the key is encrypted the user will be prompted * @param env the environment for this tool to resolve files with + * @param keysize the size of the key in bits + * @param days the number of days that the certificate should be valid for * @return CA cert and private key */ static CAInfo getCAInfo(Terminal terminal, String dn, String caCertPath, String caKeyPath, char[] keyPass, boolean prompt, - Environment env, int keysize) throws Exception { + Environment env, int keysize, int days) throws Exception { if (caCertPath != null) { assert caKeyPath != null; Certificate[] certificates = CertUtils.readCertificates(Collections.singletonList(caCertPath), env); @@ -302,7 +310,7 @@ public class CertificateTool extends SettingCommand { // generate the CA keys and cert X500Principal x500Principal = new X500Principal(dn); KeyPair keyPair = CertUtils.generateKeyPair(keysize); - Certificate caCert = CertUtils.generateCACertificate(x500Principal, keyPair); + Certificate caCert = CertUtils.generateCACertificate(x500Principal, keyPair, days); final char[] password; if (prompt) { password = terminal.readSecret("Enter password for CA private key: "); @@ -317,9 +325,11 @@ public class CertificateTool extends SettingCommand { * @param outputFile the file that the certificates will be written to. This file must not exist * @param certificateInformations details for creation of the certificates * @param caInfo the CA information to sign the certificates with + * @param keysize the size of the key in bits + * @param days the number of days that the certificate should be valid for */ static void generateAndWriteSignedCertificates(Path outputFile, Collection certificateInformations, - CAInfo caInfo, int keysize) throws Exception { + CAInfo caInfo, int keysize, int days) throws Exception { fullyWriteFile(outputFile, (outputStream, pemWriter) -> { // write out the CA info first if it was generated writeCAInfoIfGenerated(outputStream, pemWriter, caInfo); @@ -328,7 +338,7 @@ public class CertificateTool extends SettingCommand { KeyPair keyPair = CertUtils.generateKeyPair(keysize); Certificate certificate = CertUtils.generateSignedCertificate(certificateInformation.name.x500Principal, getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames), - keyPair, caInfo.caCert, caInfo.privateKey); + keyPair, caInfo.caCert, caInfo.privateKey, days); final String dirName = certificateInformation.name.filename + "/"; ZipEntry zipEntry = new ZipEntry(dirName); diff --git a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java index 4890636af61..0740b15e73e 100644 --- a/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java +++ b/elasticsearch/x-pack/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.ssl; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; @@ -13,6 +15,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xpack.XPackSettings; +import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; @@ -92,13 +95,63 @@ public class SSLService extends AbstractComponent { }; } + /** + * Create a new {@link SSLIOSessionStrategy} based on the provided settings. The settings are used to identify the SSL configuration + * that should be used to create the context. + * + * @param settings the settings used to identify the ssl configuration, typically under a *.ssl. prefix. An empty settings will return + * a context created from the default configuration + * @return Never {@code null}. + */ + public SSLIOSessionStrategy sslIOSessionStrategy(Settings settings) { + SSLConfiguration config = sslConfiguration(settings); + SSLContext sslContext = sslContext(config); + String[] ciphers = supportedCiphers(sslParameters(sslContext).getCipherSuites(), config.cipherSuites(), false); + String[] supportedProtocols = config.supportedProtocols().toArray(Strings.EMPTY_ARRAY); + HostnameVerifier verifier; + + if (config.verificationMode().isHostnameVerificationEnabled()) { + verifier = SSLIOSessionStrategy.getDefaultHostnameVerifier(); + } else { + verifier = NoopHostnameVerifier.INSTANCE; + } + + return sslIOSessionStrategy(sslContext, supportedProtocols, ciphers, verifier); + } + + /** + * The {@link SSLParameters} that are associated with the {@code sslContext}. + *

    + * This method exists to simplify testing since {@link SSLContext#getSupportedSSLParameters()} is {@code final}. + * + * @param sslContext The SSL context for the current SSL settings + * @return Never {@code null}. + */ + SSLParameters sslParameters(SSLContext sslContext) { + return sslContext.getSupportedSSLParameters(); + } + + /** + * This method only exists to simplify testing of {@link #sslIOSessionStrategy(Settings)} because {@link SSLIOSessionStrategy} does + * not expose any of the parameters that you give it. + * + * @param sslContext SSL Context used to handle SSL / TCP requests + * @param protocols Supported protocols + * @param ciphers Supported ciphers + * @param verifier Hostname verifier + * @return Never {@code null}. + */ + SSLIOSessionStrategy sslIOSessionStrategy(SSLContext sslContext, String[] protocols, String[] ciphers, HostnameVerifier verifier) { + return new SSLIOSessionStrategy(sslContext, protocols, ciphers, verifier); + } + /** * Create a new {@link SSLSocketFactory} based on the provided settings. The settings are used to identify the ssl configuration that * should be used to create the socket factory. The socket factory will also properly configure the ciphers and protocols on each * socket that is created * @param settings the settings used to identify the ssl configuration, typically under a *.ssl. prefix. An empty settings will return * a factory created from the default configuration - * @return {@link SSLSocketFactory} + * @return Never {@code null}. */ public SSLSocketFactory sslSocketFactory(Settings settings) { SSLConfiguration sslConfiguration = sslConfiguration(settings); diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/action/TransportXPackInfoActionTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/action/TransportXPackInfoActionTests.java index 7efd1695ee7..40d01a4d0c6 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/action/TransportXPackInfoActionTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/action/TransportXPackInfoActionTests.java @@ -12,14 +12,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackInfoResponse; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; -import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; -import org.junit.After; -import org.junit.Before; import java.util.EnumSet; import java.util.HashSet; @@ -40,22 +37,6 @@ import static org.mockito.Mockito.when; public class TransportXPackInfoActionTests extends ESTestCase { - private boolean anonymousEnabled; - - @Before - public void maybeEnableAnonymous() { - anonymousEnabled = randomBoolean(); - if (anonymousEnabled) { - Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build(); - AnonymousUser.initialize(settings); - } - } - - @After - public void resetAnonymous() { - AnonymousUser.initialize(Settings.EMPTY); - } - public void testDoExecute() throws Exception { LicenseService licenseService = mock(LicenseService.class); diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/http/HttpRequestTemplateTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/http/HttpRequestTemplateTests.java index aca7e83630f..a9677270b8c 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/http/HttpRequestTemplateTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/http/HttpRequestTemplateTests.java @@ -31,9 +31,6 @@ import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; -/** - * - */ public class HttpRequestTemplateTests extends ESTestCase { public void testBodyWithXContent() throws Exception { @@ -64,10 +61,10 @@ public class HttpRequestTemplateTests extends ESTestCase { public void testRender() { HttpRequestTemplate template = HttpRequestTemplate.builder("_host", 1234) - .body(TextTemplate.inline("_body")) - .path(TextTemplate.inline("_path")) - .putParam("_key1", TextTemplate.inline("_value1")) - .putHeader("_key2", TextTemplate.inline("_value2")) + .body(new TextTemplate("_body")) + .path(new TextTemplate("_path")) + .putParam("_key1", new TextTemplate("_value1")) + .putHeader("_key2", new TextTemplate("_value2")) .build(); HttpRequest result = template.render(new MockTextTemplateEngine(), Collections.emptyMap()); @@ -117,16 +114,16 @@ public class HttpRequestTemplateTests extends ESTestCase { builder.auth(new BasicAuth("_username", "_password".toCharArray())); } if (randomBoolean()) { - builder.putParam("_key", TextTemplate.inline("_value")); + builder.putParam("_key", new TextTemplate("_value")); } if (randomBoolean()) { - builder.putHeader("_key", TextTemplate.inline("_value")); + builder.putHeader("_key", new TextTemplate("_value")); } - long connectionTimeout = randomBoolean() ? 0 : randomIntBetween(5, 10); + long connectionTimeout = randomBoolean() ? 0 : randomIntBetween(5, 100000); if (connectionTimeout > 0) { builder.connectionTimeout(TimeValue.timeValueSeconds(connectionTimeout)); } - long readTimeout = randomBoolean() ? 0 : randomIntBetween(5, 10); + long readTimeout = randomBoolean() ? 0 : randomIntBetween(5, 100000); if (readTimeout > 0) { builder.readTimeout(TimeValue.timeValueSeconds(readTimeout)); } @@ -146,13 +143,13 @@ public class HttpRequestTemplateTests extends ESTestCase { xContentParser.nextToken(); HttpRequestTemplate parsed = parser.parse(xContentParser); - assertThat(parsed, equalTo(template)); + assertEquals(template, parsed); } public void testParsingFromUrl() throws Exception { HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder("www.example.org", 1234); builder.path("/foo/bar/org"); - builder.putParam("param", TextTemplate.inline("test")); + builder.putParam("param", new TextTemplate("test")); builder.scheme(Scheme.HTTPS); assertThatManualBuilderEqualsParsingFromUrl("https://www.example.org:1234/foo/bar/org?param=test", builder); @@ -165,7 +162,7 @@ public class HttpRequestTemplateTests extends ESTestCase { assertThatManualBuilderEqualsParsingFromUrl("http://www.example.org", builder); // encoded values - builder = HttpRequestTemplate.builder("www.example.org", 80).putParam("foo", TextTemplate.inline(" white space")); + builder = HttpRequestTemplate.builder("www.example.org", 80).putParam("foo", new TextTemplate(" white space")); assertThatManualBuilderEqualsParsingFromUrl("http://www.example.org?foo=%20white%20space", builder); } diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/http/HttpRequestTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/http/HttpRequestTests.java index 9b2e7802a4a..3be8dd14c1e 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/http/HttpRequestTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/http/HttpRequestTests.java @@ -5,7 +5,9 @@ */ package org.elasticsearch.xpack.common.http; +import com.carrotsearch.randomizedtesting.annotations.Repeat; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -14,6 +16,10 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.common.http.HttpRequest; import org.elasticsearch.xpack.common.http.Scheme; import org.elasticsearch.xpack.common.http.auth.HttpAuthRegistry; +import org.elasticsearch.xpack.common.http.auth.basic.BasicAuth; + +import java.util.HashMap; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -65,6 +71,54 @@ public class HttpRequestTests extends ESTestCase { } } + public void testXContentSerialization() throws Exception { + final HttpRequest.Builder builder; + if (randomBoolean()) { + builder = HttpRequest.builder(); + builder.fromUrl("http://localhost:9200/generic/createevent"); + } else { + builder = HttpRequest.builder("localhost", 9200); + if (randomBoolean()) { + builder.scheme(randomFrom(Scheme.values())); + if (usually()) { + builder.path(randomAsciiOfLength(50)); + } + } + } + if (usually()) { + builder.method(randomFrom(HttpMethod.values())); + } + if (randomBoolean()) { + builder.setParam(randomAsciiOfLength(10), randomAsciiOfLength(10)); + if (randomBoolean()) { + builder.setParam(randomAsciiOfLength(10), randomAsciiOfLength(10)); + } + } + if (randomBoolean()) { + builder.setHeader(randomAsciiOfLength(10), randomAsciiOfLength(10)); + if (randomBoolean()) { + builder.setHeader(randomAsciiOfLength(10), randomAsciiOfLength(10)); + } + } + if (randomBoolean()) { + builder.auth(new BasicAuth(randomAsciiOfLength(10), randomAsciiOfLength(20).toCharArray())); + } + if (randomBoolean()) { + builder.body(randomAsciiOfLength(200)); + } + if (randomBoolean()) { + builder.connectionTimeout(TimeValue.parseTimeValue(randomTimeValue(), "my.setting")); + } + if (randomBoolean()) { + builder.readTimeout(TimeValue.parseTimeValue(randomTimeValue(), "my.setting")); + } + if (randomBoolean()) { + builder.proxy(new HttpProxy(randomAsciiOfLength(10), randomIntBetween(1024, 65000))); + } + + builder.build().toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS); + } + private void assertThatManualBuilderEqualsParsingFromUrl(String url, HttpRequest.Builder builder) throws Exception { XContentBuilder urlContentBuilder = jsonBuilder().startObject().field("url", url).endObject(); XContentParser urlContentParser = JsonXContent.jsonXContent.createParser(urlContentBuilder.bytes()); diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/text/TextTemplateTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/text/TextTemplateTests.java index 54e796025fa..b204fece9f6 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/text/TextTemplateTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/common/text/TextTemplateTests.java @@ -27,7 +27,6 @@ import java.util.Map; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalArgument; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -64,7 +63,7 @@ public class TextTemplateTests extends ESTestCase { when(service.executable(compiledScript, model)).thenReturn(script); when(script.run()).thenReturn("rendered_text"); - TextTemplate template = templateBuilder(type, templateText).params(params).build(); + TextTemplate template = templateBuilder(type, templateText, params); assertThat(engine.render(template, model), is("rendered_text")); } @@ -80,7 +79,7 @@ public class TextTemplateTests extends ESTestCase { when(service.executable(compiledScript, model)).thenReturn(script); when(script.run()).thenReturn("rendered_text"); - TextTemplate template = templateBuilder(scriptType, templateText).params(params).build(); + TextTemplate template = templateBuilder(scriptType, templateText, params); assertThat(engine.render(template, model), is("rendered_text")); } @@ -100,7 +99,7 @@ public class TextTemplateTests extends ESTestCase { public void testParser() throws Exception { ScriptType type = randomScriptType(); - TextTemplate template = templateBuilder(type, "_template").params(singletonMap("param_key", "param_val")).build(); + TextTemplate template = templateBuilder(type, "_template", singletonMap("param_key", "param_val")); XContentBuilder builder = jsonBuilder().startObject(); switch (type) { case INLINE: @@ -123,7 +122,7 @@ public class TextTemplateTests extends ESTestCase { } public void testParserParserSelfGenerated() throws Exception { - TextTemplate template = templateBuilder(randomScriptType(), "_template").params(singletonMap("param_key", "param_val")).build(); + TextTemplate template = templateBuilder(randomScriptType(), "_template", singletonMap("param_key", "param_val")); XContentBuilder builder = jsonBuilder().value(template); BytesReference bytes = builder.bytes(); @@ -186,14 +185,8 @@ public class TextTemplateTests extends ESTestCase { assertThat(engine.render(null ,new HashMap<>()), is(nullValue())); } - private TextTemplate.Builder templateBuilder(ScriptType type, String text) { - switch (type) { - case INLINE: return TextTemplate.inline(text); - case FILE: return TextTemplate.file(text); - case STORED: return TextTemplate.indexed(text); - default: - throw illegalArgument("unsupported script type [{}]", type); - } + private TextTemplate templateBuilder(ScriptType type, String text, Map params) { + return new TextTemplate(text, null, type, params); } private static ScriptType randomScriptType() { diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/email/EmailTemplateTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/email/EmailTemplateTests.java index 2d8d600f2cc..0dac349e296 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/email/EmailTemplateTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/email/EmailTemplateTests.java @@ -24,29 +24,27 @@ import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -/** - */ public class EmailTemplateTests extends ESTestCase { public void testEmailTemplateParserSelfGenerated() throws Exception { - TextTemplate from = randomFrom(TextTemplate.inline("from@from.com").build(), null); + TextTemplate from = randomFrom(new TextTemplate("from@from.com"), null); List addresses = new ArrayList<>(); for( int i = 0; i < randomIntBetween(1, 5); ++i){ - addresses.add(TextTemplate.inline("address" + i + "@test.com").build()); + addresses.add(new TextTemplate("address" + i + "@test.com")); } TextTemplate[] possibleList = addresses.toArray(new TextTemplate[addresses.size()]); TextTemplate[] replyTo = randomFrom(possibleList, null); TextTemplate[] to = randomFrom(possibleList, null); TextTemplate[] cc = randomFrom(possibleList, null); TextTemplate[] bcc = randomFrom(possibleList, null); - TextTemplate priority = TextTemplate.inline(randomFrom(Email.Priority.values()).name()).build(); + TextTemplate priority = new TextTemplate(randomFrom(Email.Priority.values()).name()); - TextTemplate subjectTemplate = TextTemplate.inline("Templated Subject {{foo}}").build(); + TextTemplate subjectTemplate = new TextTemplate("Templated Subject {{foo}}"); String subject = "Templated Subject bar"; - TextTemplate textBodyTemplate = TextTemplate.inline("Templated Body {{foo}}").build(); + TextTemplate textBodyTemplate = new TextTemplate("Templated Body {{foo}}"); String textBody = "Templated Body bar"; - TextTemplate htmlBodyTemplate = TextTemplate.inline("Templated Html Body ").build(); + TextTemplate htmlBodyTemplate = new TextTemplate("Templated Html Body "); String htmlBody = "Templated Html Body "; String sanitizedHtmlBody = "Templated Html Body"; diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/hipchat/HipChatMessageTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/hipchat/HipChatMessageTests.java index 9b97591dff7..3c5b291b303 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/hipchat/HipChatMessageTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/hipchat/HipChatMessageTests.java @@ -25,9 +25,6 @@ import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -/** - * - */ public class HipChatMessageTests extends ESTestCase { public void testToXContent() throws Exception { String message = randomAsciiOfLength(10); @@ -163,14 +160,14 @@ public class HipChatMessageTests extends ESTestCase { XContentBuilder jsonBuilder = jsonBuilder(); jsonBuilder.startObject(); - TextTemplate body = TextTemplate.inline(randomAsciiOfLength(200)).build(); + TextTemplate body = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("body", body, ToXContent.EMPTY_PARAMS); TextTemplate[] rooms = null; if (randomBoolean()) { jsonBuilder.startArray("room"); rooms = new TextTemplate[randomIntBetween(1, 3)]; for (int i = 0; i < rooms.length; i++) { - rooms[i] = TextTemplate.inline(randomAsciiOfLength(10)).build(); + rooms[i] = new TextTemplate(randomAsciiOfLength(10)); rooms[i].toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS); } jsonBuilder.endArray(); @@ -180,7 +177,7 @@ public class HipChatMessageTests extends ESTestCase { jsonBuilder.startArray("user"); users = new TextTemplate[randomIntBetween(1, 3)]; for (int i = 0; i < users.length; i++) { - users[i] = TextTemplate.inline(randomAsciiOfLength(10)).build(); + users[i] = new TextTemplate(randomAsciiOfLength(10)); users[i].toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS); } jsonBuilder.endArray(); @@ -192,7 +189,7 @@ public class HipChatMessageTests extends ESTestCase { } TextTemplate color = null; if (randomBoolean()) { - color = TextTemplate.inline(randomAsciiOfLength(10)).build(); + color = new TextTemplate(randomAsciiOfLength(10)); jsonBuilder.field("color", color, ToXContent.EMPTY_PARAMS); } HipChatMessage.Format format = null; @@ -231,26 +228,26 @@ public class HipChatMessageTests extends ESTestCase { } public void testTemplateParseSelfGenerated() throws Exception { - TextTemplate body = TextTemplate.inline(randomAsciiOfLength(10)).build(); + TextTemplate body = new TextTemplate(randomAsciiOfLength(10)); HipChatMessage.Template.Builder templateBuilder = new HipChatMessage.Template.Builder(body); if (randomBoolean()) { int count = randomIntBetween(1, 3); for (int i = 0; i < count; i++) { - templateBuilder.addRooms(TextTemplate.inline(randomAsciiOfLength(10)).build()); + templateBuilder.addRooms(new TextTemplate(randomAsciiOfLength(10))); } } if (randomBoolean()) { int count = randomIntBetween(1, 3); for (int i = 0; i < count; i++) { - templateBuilder.addUsers(TextTemplate.inline(randomAsciiOfLength(10)).build()); + templateBuilder.addUsers(new TextTemplate(randomAsciiOfLength(10))); } } if (randomBoolean()) { templateBuilder.setFrom(randomAsciiOfLength(10)); } if (randomBoolean()) { - templateBuilder.setColor(TextTemplate.inline(randomAsciiOfLength(5)).build()); + templateBuilder.setColor(new TextTemplate(randomAsciiOfLength(5))); } if (randomBoolean()) { templateBuilder.setFormat(randomFrom(HipChatMessage.Format.values())); diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/hipchat/UserAccountTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/hipchat/UserAccountTests.java index 7954692b8ed..d0b71c4fd9d 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/hipchat/UserAccountTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/hipchat/UserAccountTests.java @@ -32,9 +32,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -/** - * - */ public class UserAccountTests extends ESTestCase { public void testSettings() throws Exception { @@ -258,8 +255,8 @@ public class UserAccountTests extends ESTestCase { .build(); UserAccount userAccount = createUserAccount(settings); - TextTemplate body = TextTemplate.inline("body").build(); - TextTemplate[] rooms = new TextTemplate[] { TextTemplate.inline("room").build() }; + TextTemplate body = new TextTemplate("body"); + TextTemplate[] rooms = new TextTemplate[] { new TextTemplate("room")}; HipChatMessage.Template template = new HipChatMessage.Template(body, rooms, null, "sender", HipChatMessage.Format.TEXT, null, true); HipChatMessage message = userAccount.render("watchId", "actionId", new MockTextTemplateEngine(), template, new HashMap<>()); @@ -273,10 +270,10 @@ public class UserAccountTests extends ESTestCase { .build(); UserAccount userAccount = createUserAccount(settings); - TextTemplate body = TextTemplate.inline("body").build(); - TextTemplate[] rooms = new TextTemplate[] { TextTemplate.inline("room").build() }; + TextTemplate body = new TextTemplate("body"); + TextTemplate[] rooms = new TextTemplate[] { new TextTemplate("room") }; HipChatMessage.Template template = new HipChatMessage.Template(body, rooms, null, "sender", null, - TextTemplate.inline("yellow").build(), true); + new TextTemplate("yellow"), true); HipChatMessage message = userAccount.render("watchId", "actionId", new MockTextTemplateEngine(), template, new HashMap<>()); assertThat(message.format, is(nullValue())); diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/slack/message/SlackMessageTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/slack/message/SlackMessageTests.java index 508308e77ab..5feb93d39d0 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/slack/message/SlackMessageTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/notification/slack/message/SlackMessageTests.java @@ -27,9 +27,6 @@ import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -/** - * - */ public class SlackMessageTests extends ESTestCase { public void testToXContent() throws Exception { String from = randomBoolean() ? null : randomAsciiOfLength(10); @@ -230,7 +227,7 @@ public class SlackMessageTests extends ESTestCase { TextTemplate from = null; if (randomBoolean()) { - from = TextTemplate.inline(randomAsciiOfLength(200)).build(); + from = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("from", from, params); } TextTemplate[] to = null; @@ -238,19 +235,19 @@ public class SlackMessageTests extends ESTestCase { jsonBuilder.startArray("to"); to = new TextTemplate[randomIntBetween(1, 3)]; for (int i = 0; i < to.length; i++) { - to[i] = TextTemplate.inline(randomAsciiOfLength(10)).build(); + to[i] = new TextTemplate(randomAsciiOfLength(10)); to[i].toXContent(jsonBuilder, params); } jsonBuilder.endArray(); } TextTemplate text = null; if (randomBoolean()) { - text = TextTemplate.inline(randomAsciiOfLength(200)).build(); + text = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("text", text, params); } TextTemplate icon = null; if (randomBoolean()) { - icon = TextTemplate.inline(randomAsciiOfLength(10)).build(); + icon = new TextTemplate(randomAsciiOfLength(10)); jsonBuilder.field("icon", icon); } Attachment.Template[] attachments = null; @@ -261,57 +258,57 @@ public class SlackMessageTests extends ESTestCase { jsonBuilder.startObject(); TextTemplate fallback = null; if (randomBoolean()) { - fallback = TextTemplate.inline(randomAsciiOfLength(200)).build(); + fallback = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("fallback", fallback, params); } TextTemplate color = null; if (randomBoolean()) { - color = TextTemplate.inline(randomAsciiOfLength(200)).build(); + color = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("color", color, params); } TextTemplate pretext = null; if (randomBoolean()) { - pretext = TextTemplate.inline(randomAsciiOfLength(200)).build(); + pretext = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("pretext", pretext, params); } TextTemplate authorName = null; TextTemplate authorLink = null; TextTemplate authorIcon = null; if (randomBoolean()) { - authorName = TextTemplate.inline(randomAsciiOfLength(200)).build(); + authorName = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("author_name", authorName, params); if (randomBoolean()) { - authorLink = TextTemplate.inline(randomAsciiOfLength(200)).build(); + authorLink = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("author_link", authorLink, params); } if (randomBoolean()) { - authorIcon = TextTemplate.inline(randomAsciiOfLength(200)).build(); + authorIcon = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("author_icon", authorIcon, params); } } TextTemplate title = null; TextTemplate titleLink = null; if (randomBoolean()) { - title = TextTemplate.inline(randomAsciiOfLength(200)).build(); + title = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("title", title, params); if (randomBoolean()) { - titleLink = TextTemplate.inline(randomAsciiOfLength(200)).build(); + titleLink = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("title_link", titleLink, params); } } TextTemplate attachmentText = null; if (randomBoolean()) { - attachmentText = TextTemplate.inline(randomAsciiOfLength(200)).build(); + attachmentText = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("text", attachmentText, params); } TextTemplate imageUrl = null; if (randomBoolean()) { - imageUrl = TextTemplate.inline(randomAsciiOfLength(200)).build(); + imageUrl = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("image_url", imageUrl, params); } TextTemplate thumbUrl = null; if (randomBoolean()) { - thumbUrl = TextTemplate.inline(randomAsciiOfLength(200)).build(); + thumbUrl = new TextTemplate(randomAsciiOfLength(200)); jsonBuilder.field("thumb_url", thumbUrl, params); } Field.Template[] fields = null; @@ -320,9 +317,9 @@ public class SlackMessageTests extends ESTestCase { fields = new Field.Template[randomIntBetween(1,3)]; for (int j = 0; j < fields.length; j++) { jsonBuilder.startObject(); - TextTemplate fieldTitle = TextTemplate.inline(randomAsciiOfLength(50)).build(); + TextTemplate fieldTitle = new TextTemplate(randomAsciiOfLength(50)); jsonBuilder.field("title", fieldTitle, params); - TextTemplate fieldValue = TextTemplate.inline(randomAsciiOfLength(50)).build(); + TextTemplate fieldValue = new TextTemplate(randomAsciiOfLength(50)); jsonBuilder.field("value", fieldValue, params); boolean isShort = randomBoolean(); jsonBuilder.field("short", isShort); diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/ssl/CertificateToolTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/ssl/CertificateToolTests.java index 4db0ae20d44..31d5788c774 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/ssl/CertificateToolTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/ssl/CertificateToolTests.java @@ -42,6 +42,7 @@ import java.security.cert.Certificate; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import java.security.interfaces.RSAKey; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -207,14 +208,15 @@ public class CertificateToolTests extends ESTestCase { assertEquals(4, certInfos.size()); final int keysize = randomFrom(1024, 2048); + final int days = randomIntBetween(1, 1024); KeyPair keyPair = CertUtils.generateKeyPair(keysize); - X509Certificate caCert = CertUtils.generateCACertificate(new X500Principal("CN=test ca"), keyPair); + X509Certificate caCert = CertUtils.generateCACertificate(new X500Principal("CN=test ca"), keyPair, days); final boolean generatedCa = randomBoolean(); final char[] keyPassword = randomBoolean() ? "changeme".toCharArray() : null; assertFalse(Files.exists(outputFile)); CAInfo caInfo = new CAInfo(caCert, keyPair.getPrivate(), generatedCa, keyPassword); - CertificateTool.generateAndWriteSignedCertificates(outputFile, certInfos, caInfo, keysize); + CertificateTool.generateAndWriteSignedCertificates(outputFile, certInfos, caInfo, keysize, days); assertTrue(Files.exists(outputFile)); FileSystem fileSystem = FileSystems.newFileSystem(new URI("jar:" + outputFile.toUri()), Collections.emptyMap()); @@ -229,6 +231,8 @@ public class CertificateToolTests extends ESTestCase { X509Certificate parsedCaCert = readX509Certificate(reader); assertThat(parsedCaCert.getSubjectX500Principal().getName(), containsString("test ca")); assertEquals(caCert, parsedCaCert); + long daysBetween = ChronoUnit.DAYS.between(caCert.getNotBefore().toInstant(), caCert.getNotAfter().toInstant()); + assertEquals(days, (int) daysBetween); } // check the CA key @@ -283,15 +287,17 @@ public class CertificateToolTests extends ESTestCase { terminal.addSecretInput("testnode"); } + final int days = randomIntBetween(1, 1024); CAInfo caInfo = CertificateTool.getCAInfo(terminal, "CN=foo", testNodeCertPath.toString(), testNodeKeyPath.toString(), - passwordPrompt ? null : "testnode".toCharArray(), passwordPrompt, env, randomFrom(1024, 2048)); + passwordPrompt ? null : "testnode".toCharArray(), passwordPrompt, env, randomFrom(1024, 2048), days); assertTrue(terminal.getOutput().isEmpty()); - assertThat(caInfo.caCert, instanceOf(X509Certificate.class)); assertEquals(caInfo.caCert.getSubjectX500Principal().getName(), "CN=Elasticsearch Test Node,OU=elasticsearch,O=org"); assertThat(caInfo.privateKey.getAlgorithm(), containsString("RSA")); assertEquals(2048, ((RSAKey) caInfo.privateKey).getModulus().bitLength()); assertFalse(caInfo.generated); + long daysBetween = ChronoUnit.DAYS.between(caInfo.caCert.getNotBefore().toInstant(), caInfo.caCert.getNotAfter().toInstant()); + assertEquals(1460L, daysBetween); // test generation final boolean passwordProtected = randomBoolean(); @@ -303,13 +309,16 @@ public class CertificateToolTests extends ESTestCase { password = "testnode".toCharArray(); } final int keysize = randomFrom(1024, 2048); - caInfo = CertificateTool.getCAInfo(terminal, "CN=foo bar", null, null, password, passwordProtected && passwordPrompt, env, keysize); + caInfo = CertificateTool.getCAInfo(terminal, "CN=foo bar", null, null, password, passwordProtected && passwordPrompt, env, + keysize, days); assertTrue(terminal.getOutput().isEmpty()); assertThat(caInfo.caCert, instanceOf(X509Certificate.class)); assertEquals(caInfo.caCert.getSubjectX500Principal().getName(), "CN=foo bar"); assertThat(caInfo.privateKey.getAlgorithm(), containsString("RSA")); assertTrue(caInfo.generated); assertEquals(keysize, ((RSAKey) caInfo.privateKey).getModulus().bitLength()); + daysBetween = ChronoUnit.DAYS.between(caInfo.caCert.getNotBefore().toInstant(), caInfo.caCert.getNotAfter().toInstant()); + assertEquals(days, (int) daysBetween); } public void testNameValues() throws Exception { diff --git a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/ssl/SSLServiceTests.java b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/ssl/SSLServiceTests.java index 2622fa4b239..ad837f4ffb6 100644 --- a/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/ssl/SSLServiceTests.java +++ b/elasticsearch/x-pack/src/test/java/org/elasticsearch/xpack/ssl/SSLServiceTests.java @@ -5,9 +5,17 @@ */ package org.elasticsearch.xpack.ssl; +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; +import org.apache.http.concurrent.FutureCallback; +import org.apache.http.conn.ssl.DefaultHostnameVerifier; +import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -15,10 +23,14 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.XPackSettings; + +import org.mockito.ArgumentCaptor; import org.junit.Before; +import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; +import javax.net.ssl.SSLParameters; import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; import java.nio.file.Path; @@ -30,11 +42,16 @@ import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class SSLServiceTests extends ESTestCase { @@ -283,6 +300,43 @@ public class SSLServiceTests extends ESTestCase { } } + public void testSSLStrategy() { + // this just exhaustively verifies that the right things are called and that it uses the right parameters + Settings settings = Settings.builder().build(); + SSLService sslService = mock(SSLService.class); + SSLConfiguration sslConfig = mock(SSLConfiguration.class); + SSLParameters sslParameters = mock(SSLParameters.class); + SSLContext sslContext = mock(SSLContext.class); + String[] protocols = new String[] { "protocols" }; + String[] ciphers = new String[] { "ciphers!!!" }; + String[] supportedCiphers = new String[] { "supported ciphers" }; + List requestedCiphers = new ArrayList<>(0); + VerificationMode mode = randomFrom(VerificationMode.values()); + ArgumentCaptor verifier = ArgumentCaptor.forClass(HostnameVerifier.class); + SSLIOSessionStrategy sslStrategy = mock(SSLIOSessionStrategy.class); + + when(sslService.sslConfiguration(settings)).thenReturn(sslConfig); + when(sslService.sslContext(sslConfig)).thenReturn(sslContext); + when(sslService.supportedCiphers(supportedCiphers, requestedCiphers, false)).thenReturn(ciphers); + when(sslService.sslParameters(sslContext)).thenReturn(sslParameters); + when(sslParameters.getCipherSuites()).thenReturn(supportedCiphers); + when(sslConfig.supportedProtocols()).thenReturn(Arrays.asList(protocols)); + when(sslConfig.cipherSuites()).thenReturn(requestedCiphers); + when(sslConfig.verificationMode()).thenReturn(mode); + when(sslService.sslIOSessionStrategy(eq(sslContext), eq(protocols), eq(ciphers), verifier.capture())).thenReturn(sslStrategy); + + // ensure it actually goes through and calls the real method + when(sslService.sslIOSessionStrategy(settings)).thenCallRealMethod(); + + assertThat(sslService.sslIOSessionStrategy(settings), sameInstance(sslStrategy)); + + if (mode.isHostnameVerificationEnabled()) { + assertThat(verifier.getValue(), instanceOf(DefaultHostnameVerifier.class)); + } else { + assertThat(verifier.getValue(), sameInstance(NoopHostnameVerifier.INSTANCE)); + } + } + @Network public void testThatSSLContextWithoutSettingsWorks() throws Exception { SSLService sslService = new SSLService(Settings.EMPTY, env); @@ -291,7 +345,7 @@ public class SSLServiceTests extends ESTestCase { // Execute a GET on a site known to have a valid certificate signed by a trusted public CA // This will result in a SSLHandshakeException if the SSLContext does not trust the CA, but the default // truststore trusts all common public CAs so the handshake will succeed - client.execute(new HttpGet("https://www.elastic.co/")); + client.execute(new HttpGet("https://www.elastic.co/")).close(); } } @@ -308,4 +362,55 @@ public class SSLServiceTests extends ESTestCase { client.execute(new HttpGet("https://www.elastic.co/")).close(); } } + + @Network + public void testThatSSLIOSessionStrategyWithoutSettingsWorks() throws Exception { + SSLService sslService = new SSLService(Settings.EMPTY, env); + SSLIOSessionStrategy sslStrategy = sslService.sslIOSessionStrategy(Settings.EMPTY); + try (CloseableHttpAsyncClient client = HttpAsyncClientBuilder.create().setSSLStrategy(sslStrategy).build()) { + client.start(); + + // Execute a GET on a site known to have a valid certificate signed by a trusted public CA + // This will result in a SSLHandshakeException if the SSLContext does not trust the CA, but the default + // truststore trusts all common public CAs so the handshake will succeed + client.execute(new HttpHost("elastic.co", 80, "https"), new HttpGet("/"), new AssertionCallback()); + } + } + + @Network + public void testThatSSLIOSessionStrategytTrustsJDKTrustedCAs() throws Exception { + Settings settings = Settings.builder() + .put("xpack.ssl.keystore.path", testclientStore) + .put("xpack.ssl.keystore.password", "testclient") + .build(); + SSLIOSessionStrategy sslStrategy = new SSLService(settings, env).sslIOSessionStrategy(Settings.EMPTY); + try (CloseableHttpAsyncClient client = HttpAsyncClientBuilder.create().setSSLStrategy(sslStrategy).build()) { + client.start(); + + // Execute a GET on a site known to have a valid certificate signed by a trusted public CA which will succeed because the JDK + // certs are trusted by default + client.execute(new HttpHost("elastic.co", 80, "https"), new HttpGet("/"), new AssertionCallback()); + } + } + + class AssertionCallback implements FutureCallback { + + @Override + public void completed(HttpResponse result) { + assertThat(result.getStatusLine().getStatusCode(), lessThan(300)); + } + + @Override + public void failed(Exception ex) { + logger.error(ex); + + fail(ex.toString()); + } + + @Override + public void cancelled() { + fail("The request was cancelled for some reason"); + } + } + } diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.0.0.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.0.0.zip new file mode 100644 index 00000000000..38c42da442e Binary files /dev/null and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.0.0.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.0.1.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.0.1.zip new file mode 100644 index 00000000000..dc351c0fcef Binary files /dev/null and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.0.1.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.0.2.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.0.2.zip new file mode 100644 index 00000000000..d706c58f5da Binary files /dev/null and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.0.2.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.1.0.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.1.0.zip new file mode 100644 index 00000000000..aae8a1091f7 Binary files /dev/null and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.1.0.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.1.1.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.1.1.zip new file mode 100644 index 00000000000..264d1164a4a Binary files /dev/null and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.1.1.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.1.2.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.1.2.zip new file mode 100644 index 00000000000..47da54c5097 Binary files /dev/null and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.1.2.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.2.0.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.2.0.zip new file mode 100644 index 00000000000..48ccb64bc6f Binary files /dev/null and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.2.0.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.2.1.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.2.1.zip new file mode 100644 index 00000000000..d1a18fc1850 Binary files /dev/null and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.2.1.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.2.2.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.2.2.zip new file mode 100644 index 00000000000..cf47dc5d8e4 Binary files /dev/null and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.2.2.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.0.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.0.zip index 6de975d614b..38bde376ab8 100644 Binary files a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.0.zip and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.0.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.1.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.1.zip index 26925d7090d..b7075c15099 100644 Binary files a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.1.zip and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.1.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.2.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.2.zip index 017a633f5b8..9e5154340c6 100644 Binary files a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.2.zip and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.2.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.3.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.3.zip index 6dae672f8d9..bd2257dcc60 100644 Binary files a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.3.zip and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.3.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.4.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.4.zip index defdf479ef0..aed4058b54f 100644 Binary files a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.4.zip and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.4.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.5.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.5.zip index 91f33818f1b..fb8351bd1aa 100644 Binary files a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.5.zip and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.3.5.zip differ diff --git a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.4.0.zip b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.4.0.zip index 8977cdf8cbf..e596d4b40ba 100644 Binary files a/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.4.0.zip and b/elasticsearch/x-pack/src/test/resources/indices/bwc/x-pack-2.4.0.zip differ diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java index daa21ac5d89..b96fed9f746 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ack.AckedRequest; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; @@ -22,6 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.watcher.watch.WatchStore; import java.util.concurrent.CountDownLatch; @@ -119,34 +121,33 @@ public class WatcherLifeCycleService extends AbstractComponent implements Cluste } if (!event.localNodeMaster()) { - if (watcherService.state() != WatcherState.STARTED) { - // to avoid unnecessary forking of threads... - return; + if (watcherService.state() == WatcherState.STARTED) { + // We're no longer the master so we need to stop the watcher. + // Stopping the watcher may take a while since it will wait on the scheduler to complete shutdown, + // so we fork here so that we don't wait too long. Other events may need to be processed and + // other cluster state listeners may need to be executed as well for this event. + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> stop(false)); } - - // We're no longer the master so we need to stop the watcher. - // Stopping the watcher may take a while since it will wait on the scheduler to complete shutdown, - // so we fork here so that we don't wait too long. Other events may need to be processed and - // other cluster state listeners may need to be executed as well for this event. - threadPool.executor(ThreadPool.Names.GENERIC).execute(new Runnable() { - @Override - public void run() { - stop(false); - } - }); } else { - if (watcherService.state() != WatcherState.STOPPED) { - // to avoid unnecessary forking of threads... - return; - } + if (watcherService.state() == WatcherState.STOPPED) { + final ClusterState state = event.state(); + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> start(state, false)); + } else { + boolean isWatchIndexDeleted = event.indicesDeleted().stream() + .filter(index -> WatchStore.INDEX.equals(index.getName())) + .findAny() + .isPresent(); - final ClusterState state = event.state(); - threadPool.executor(ThreadPool.Names.GENERIC).execute(new Runnable() { - @Override - public void run() { - start(state, false); + boolean isWatchIndexOpenInPreviousClusterState = event.previousState().metaData().hasIndex(WatchStore.INDEX) && + event.previousState().metaData().index(WatchStore.INDEX).getState() == IndexMetaData.State.OPEN; + boolean isWatchIndexClosedInCurrentClusterState = event.state().metaData().hasIndex(WatchStore.INDEX) && + event.state().metaData().index(WatchStore.INDEX).getState() == IndexMetaData.State.CLOSE; + boolean hasWatcherIndexBeenClosed = isWatchIndexOpenInPreviousClusterState && isWatchIndexClosedInCurrentClusterState; + + if (isWatchIndexDeleted || hasWatcherIndexBeenClosed) { + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> watcherService.watchIndexDeletedOrClosed()); } - }); + } } } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index fb66c4d5062..174eb77fa2f 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -16,9 +16,9 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.xpack.support.clock.Clock; import org.elasticsearch.xpack.watcher.execution.ExecutionService; import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry; -import org.elasticsearch.xpack.support.clock.Clock; import org.elasticsearch.xpack.watcher.trigger.TriggerService; import org.elasticsearch.xpack.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.watch.WatchLockService; @@ -292,4 +292,13 @@ public class WatcherService extends AbstractComponent { innerMap.putAll(watchStore.usageStats()); return innerMap; } + + /** + * Something deleted or closed the {@link WatchStore#INDEX} and thus we need to do some cleanup to prevent further execution of watches + * as those watches cannot be updated anymore + */ + public void watchIndexDeletedOrClosed() { + watchStore.clearWatchesInMemory(); + executionService.clearExecutions(); + } } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionBuilders.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionBuilders.java index fc4239856d5..493f0c7e2bc 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionBuilders.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionBuilders.java @@ -18,9 +18,6 @@ import org.elasticsearch.xpack.watcher.actions.webhook.WebhookAction; import org.elasticsearch.xpack.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.common.text.TextTemplate; -/** - * - */ public final class ActionBuilders { private ActionBuilders() { @@ -47,11 +44,7 @@ public final class ActionBuilders { } public static LoggingAction.Builder loggingAction(String text) { - return loggingAction(TextTemplate.inline(text)); - } - - public static LoggingAction.Builder loggingAction(TextTemplate.Builder text) { - return loggingAction(text.build()); + return loggingAction(new TextTemplate(text)); } public static LoggingAction.Builder loggingAction(TextTemplate text) { @@ -59,20 +52,13 @@ public final class ActionBuilders { } public static HipChatAction.Builder hipchatAction(String message) { - return hipchatAction(TextTemplate.inline(message)); + return hipchatAction(new TextTemplate(message)); } public static HipChatAction.Builder hipchatAction(String account, String body) { - return hipchatAction(account, TextTemplate.inline(body)); + return hipchatAction(account, new TextTemplate(body)); } - public static HipChatAction.Builder hipchatAction(TextTemplate.Builder body) { - return hipchatAction(body.build()); - } - - public static HipChatAction.Builder hipchatAction(String account, TextTemplate.Builder body) { - return hipchatAction(account, body.build()); - } public static HipChatAction.Builder hipchatAction(TextTemplate body) { return hipchatAction(null, body); diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java index ead7a5a30a9..219684a8b71 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java @@ -33,6 +33,8 @@ import org.elasticsearch.xpack.watcher.watch.Watch; import java.io.IOException; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + /** * */ @@ -180,7 +182,8 @@ public class ActionWrapper implements ToXContent { builder.startObject(); TimeValue throttlePeriod = throttler.throttlePeriod(); if (throttlePeriod != null) { - builder.field(Throttler.Field.THROTTLE_PERIOD.getPreferredName(), throttlePeriod); + builder.timeValueField(Throttler.Field.THROTTLE_PERIOD.getPreferredName(), + Throttler.Field.THROTTLE_PERIOD_HUMAN.getPreferredName(), throttlePeriod); } if (condition != null) { builder.startObject(Watch.Field.CONDITION.getPreferredName()) @@ -218,8 +221,10 @@ public class ActionWrapper implements ToXContent { } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Transform.Field.TRANSFORM)) { transform = transformRegistry.parse(watchId, parser); } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Throttler.Field.THROTTLE_PERIOD)) { + throttlePeriod = timeValueMillis(parser.longValue()); + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Throttler.Field.THROTTLE_PERIOD_HUMAN)) { try { - throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, Throttler.Field.THROTTLE_PERIOD.toString()); + throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, Throttler.Field.THROTTLE_PERIOD_HUMAN.toString()); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse action [{}/{}]. failed to parse field [{}] as time value", pe, watchId, actionId, currentFieldName); diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatAction.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatAction.java index bd8364e820a..f6ef32cd9c0 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatAction.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatAction.java @@ -19,9 +19,6 @@ import org.elasticsearch.xpack.common.text.TextTemplate; import java.io.IOException; -/** - * - */ public class HipChatAction implements Action { public static final String TYPE = "hipchat"; @@ -182,18 +179,10 @@ public class HipChatAction implements Action { return this; } - public Builder addRooms(TextTemplate.Builder... rooms) { - TextTemplate[] templates = new TextTemplate[rooms.length]; - for (int i = 0; i < rooms.length; i++) { - templates[i] = rooms[i].build(); - } - return addRooms(templates); - } - public Builder addRooms(String... rooms) { TextTemplate[] templates = new TextTemplate[rooms.length]; for (int i = 0; i < rooms.length; i++) { - templates[i] = TextTemplate.inline(rooms[i]).build(); + templates[i] = new TextTemplate(rooms[i]); } return addRooms(templates); } @@ -204,18 +193,10 @@ public class HipChatAction implements Action { return this; } - public Builder addUsers(TextTemplate.Builder... users) { - TextTemplate[] templates = new TextTemplate[users.length]; - for (int i = 0; i < users.length; i++) { - templates[i] = users[i].build(); - } - return addUsers(templates); - } - public Builder addUsers(String... users) { TextTemplate[] templates = new TextTemplate[users.length]; for (int i = 0; i < users.length; i++) { - templates[i] = TextTemplate.inline(users[i]).build(); + templates[i] = new TextTemplate(users[i]); } return addUsers(templates); } @@ -235,10 +216,6 @@ public class HipChatAction implements Action { return this; } - public Builder setColor(TextTemplate.Builder color) { - return setColor(color.build()); - } - public Builder setColor(HipChatMessage.Color color) { return setColor(color.asTemplate()); } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexAction.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexAction.java index 1bf8469012c..e529a75ef12 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexAction.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexAction.java @@ -19,6 +19,8 @@ import org.joda.time.DateTimeZone; import java.io.IOException; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + /** * */ @@ -96,7 +98,7 @@ public class IndexAction implements Action { builder.field(Field.EXECUTION_TIME_FIELD.getPreferredName(), executionTimeField); } if (timeout != null) { - builder.field(Field.TIMEOUT.getPreferredName(), timeout); + builder.timeValueField(Field.TIMEOUT.getPreferredName(), Field.TIMEOUT_HUMAN.getPreferredName(), timeout); } if (dynamicNameTimeZone != null) { builder.field(Field.DYNAMIC_NAME_TIMEZONE.getPreferredName(), dynamicNameTimeZone); @@ -123,13 +125,21 @@ public class IndexAction implements Action { throw new ElasticsearchParseException("could not parse [{}] action [{}/{}]. failed to parse index name value for " + "field [{}]", pe, TYPE, watchId, actionId, currentFieldName); } + } else if (token == XContentParser.Token.VALUE_NUMBER) { + if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT)) { + timeout = timeValueMillis(parser.longValue()); + } else { + throw new ElasticsearchParseException("could not parse [{}] action [{}/{}]. unexpected number field [{}]", TYPE, + watchId, actionId, currentFieldName); + } } else if (token == XContentParser.Token.VALUE_STRING) { if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.DOC_TYPE)) { docType = parser.text(); } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.EXECUTION_TIME_FIELD)) { executionTimeField = parser.text(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT)) { - timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT.toString()); + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT_HUMAN)) { + // Parser for human specified timeouts and 2.x compatibility + timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT_HUMAN.toString()); } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.DYNAMIC_NAME_TIMEZONE)) { if (token == XContentParser.Token.VALUE_STRING) { dynamicNameTimeZone = DateTimeZone.forID(parser.text()); @@ -266,7 +276,8 @@ public class IndexAction implements Action { ParseField SOURCE = new ParseField("source"); ParseField RESPONSE = new ParseField("response"); ParseField REQUEST = new ParseField("request"); - ParseField TIMEOUT = new ParseField("timeout"); + ParseField TIMEOUT = new ParseField("timeout_in_millis"); + ParseField TIMEOUT_HUMAN = new ParseField("timeout"); ParseField DYNAMIC_NAME_TIMEZONE = new ParseField("dynamic_name_timezone"); } } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingAction.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingAction.java index 496dc6c6804..0f93717072a 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingAction.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingAction.java @@ -17,9 +17,6 @@ import org.elasticsearch.xpack.common.text.TextTemplate; import java.io.IOException; import java.util.Locale; -/** - * - */ public class LoggingAction implements Action { public static final String TYPE = "logging"; diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java index c3e1b5c02d1..07ba18bd9ae 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java @@ -43,6 +43,7 @@ public interface Throttler { } interface Field { - ParseField THROTTLE_PERIOD = new ParseField("throttle_period"); + ParseField THROTTLE_PERIOD = new ParseField("throttle_period_in_millis"); + ParseField THROTTLE_PERIOD_HUMAN = new ParseField("throttle_period"); } } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java index 3bc6c56aec0..9b635a0952f 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java @@ -155,7 +155,8 @@ public class WatchSourceBuilder implements ToXContent { } if (defaultThrottlePeriod != null) { - builder.field(Watch.Field.THROTTLE_PERIOD.getPreferredName(), defaultThrottlePeriod.toString()); + builder.timeValueField(Watch.Field.THROTTLE_PERIOD.getPreferredName(), + Watch.Field.THROTTLE_PERIOD_HUMAN.getPreferredName(), defaultThrottlePeriod); } builder.startObject(Watch.Field.ACTIONS.getPreferredName()); @@ -203,7 +204,8 @@ public class WatchSourceBuilder implements ToXContent { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (throttlePeriod != null) { - builder.field(Throttler.Field.THROTTLE_PERIOD.getPreferredName(), throttlePeriod); + builder.timeValueField(Throttler.Field.THROTTLE_PERIOD.getPreferredName(), + Throttler.Field.THROTTLE_PERIOD_HUMAN.getPreferredName(), throttlePeriod); } if (condition != null) { builder.startObject(Watch.Field.CONDITION.getPreferredName()) diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java index 35ad6a9f9da..4091f291652 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.common.stats.Counters; import org.elasticsearch.xpack.support.clock.Clock; import org.elasticsearch.xpack.watcher.Watcher; @@ -60,13 +61,14 @@ public class ExecutionService extends AbstractComponent { private final Clock clock; private final TimeValue defaultThrottlePeriod; private final TimeValue maxStopTimeout; + private final ThreadPool threadPool; private volatile CurrentExecutions currentExecutions = null; private final AtomicBoolean started = new AtomicBoolean(false); @Inject public ExecutionService(Settings settings, HistoryStore historyStore, TriggeredWatchStore triggeredWatchStore, WatchExecutor executor, - WatchStore watchStore, WatchLockService watchLockService, Clock clock) { + WatchStore watchStore, WatchLockService watchLockService, Clock clock, ThreadPool threadPool) { super(settings); this.historyStore = historyStore; this.triggeredWatchStore = triggeredWatchStore; @@ -76,6 +78,7 @@ public class ExecutionService extends AbstractComponent { this.clock = clock; this.defaultThrottlePeriod = DEFAULT_THROTTLE_PERIOD_SETTING.get(settings); this.maxStopTimeout = Watcher.MAX_STOP_TIMEOUT_SETTING.get(settings); + this.threadPool = threadPool; } public void start(ClusterState state) throws Exception { @@ -141,12 +144,7 @@ public class ExecutionService extends AbstractComponent { currentExecutions.add(watchExecution.createSnapshot()); } // Lets show the longest running watch first: - Collections.sort(currentExecutions, new Comparator() { - @Override - public int compare(WatchExecutionSnapshot e1, WatchExecutionSnapshot e2) { - return e1.executionTime().compareTo(e2.executionTime()); - } - }); + Collections.sort(currentExecutions, Comparator.comparing(WatchExecutionSnapshot::executionTime)); return currentExecutions; } @@ -163,12 +161,8 @@ public class ExecutionService extends AbstractComponent { queuedWatches.add(new QueuedWatch(executionTask.ctx)); } // Lets show the execution that pending the longest first: - Collections.sort(queuedWatches, new Comparator() { - @Override - public int compare(QueuedWatch e1, QueuedWatch e2) { - return e1.executionTime().compareTo(e2.executionTime()); - } - }); + + Collections.sort(queuedWatches, Comparator.comparing(QueuedWatch::executionTime)); return queuedWatches; } @@ -332,20 +326,36 @@ public class ExecutionService extends AbstractComponent { thread pool that executes the watches is completely busy, we don't lose the fact that the watch was triggered (it'll have its history record) */ - - private void executeAsync(WatchExecutionContext ctx, TriggeredWatch triggeredWatch) throws Exception { + private void executeAsync(WatchExecutionContext ctx, final TriggeredWatch triggeredWatch) { try { executor.execute(new WatchExecutionTask(ctx)); } catch (EsRejectedExecutionException e) { - String message = "failed to run triggered watch [" + triggeredWatch.id() + "] due to thread pool capacity"; - logger.debug("{}", message); - WatchRecord record = ctx.abortBeforeExecution(ExecutionState.FAILED, message); - if (ctx.overrideRecordOnConflict()) { - historyStore.forcePut(record); - } else { - historyStore.put(record); - } - triggeredWatchStore.delete(triggeredWatch.id()); + // we are still in the transport thread here most likely, so we cannot run heavy operations + // this means some offloading needs to be done for indexing into the history and delete the triggered watches entry + threadPool.generic().execute(() -> { + String message = "failed to run triggered watch [" + triggeredWatch.id() + "] due to thread pool capacity"; + logger.debug("{}", message); + WatchRecord record = ctx.abortBeforeExecution(ExecutionState.FAILED, message); + try { + if (ctx.overrideRecordOnConflict()) { + historyStore.forcePut(record); + } else { + historyStore.put(record); + } + } catch (Exception exc) { + logger.error((Supplier) () -> + new ParameterizedMessage("Error storing watch history record for watch [{}] after thread pool rejection", + triggeredWatch.id()), exc); + } + + try { + triggeredWatchStore.delete(triggeredWatch.id()); + } catch (Exception exc) { + logger.error((Supplier) () -> + new ParameterizedMessage("Error deleting triggered watch store record for watch [{}] after thread pool " + + "rejection", triggeredWatch.id()), exc); + } + }); } } @@ -438,6 +448,15 @@ public class ExecutionService extends AbstractComponent { return counters.toMap(); } + /** + * This clears out the current executions and sets new empty current executions + * This is needed, because when this method is called, watcher keeps running, so sealing executions would be a bad idea + */ + public void clearExecutions() { + currentExecutions.sealAndAwaitEmpty(maxStopTimeout); + currentExecutions = new CurrentExecutions(); + } + private static final class StartupExecutionContext extends TriggeredExecutionContext { public StartupExecutionContext(Watch watch, DateTime executionTime, TriggerEvent triggerEvent, TimeValue defaultThrottlePeriod) { diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index c1b64a9a630..b964d0b74b2 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -222,7 +222,7 @@ public class TriggeredWatchStore extends AbstractComponent { } } - public void delete(Wid wid) throws Exception { + public void delete(Wid wid) { ensureStarted(); accessLock.lock(); try { diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInput.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInput.java index 6c12140a8a2..d9e8a1f075a 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInput.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInput.java @@ -28,6 +28,7 @@ import java.util.HashSet; import java.util.Set; import static java.util.Collections.unmodifiableSet; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; public class SearchInput implements Input { @@ -99,7 +100,7 @@ public class SearchInput implements Input { builder.field(Field.EXTRACT.getPreferredName(), extractKeys); } if (timeout != null) { - builder.field(Field.TIMEOUT.getPreferredName(), timeout); + builder.timeValueField(Field.TIMEOUT.getPreferredName(), Field.TIMEOUT_HUMAN.getPreferredName(), timeout); } if (dynamicNameTimeZone != null) { builder.field(Field.DYNAMIC_NAME_TIMEZONE.getPreferredName(), dynamicNameTimeZone); @@ -144,7 +145,10 @@ public class SearchInput implements Input { watchId, currentFieldName); } } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT)) { - timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT.toString()); + timeout = timeValueMillis(parser.longValue()); + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT_HUMAN)) { + // Parser for human specified timeouts and 2.x compatibility + timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT_HUMAN.toString()); } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.DYNAMIC_NAME_TIMEZONE)) { if (token == XContentParser.Token.VALUE_STRING) { dynamicNameTimeZone = DateTimeZone.forID(parser.text()); @@ -238,7 +242,8 @@ public class SearchInput implements Input { public interface Field extends Input.Field { ParseField REQUEST = new ParseField("request"); ParseField EXTRACT = new ParseField("extract"); - ParseField TIMEOUT = new ParseField("timeout"); + ParseField TIMEOUT = new ParseField("timeout_in_millis"); + ParseField TIMEOUT_HUMAN = new ParseField("timeout"); ParseField DYNAMIC_NAME_TIMEZONE = new ParseField("dynamic_name_timezone"); } } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtils.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtils.java index ee75ccc112d..40651786c1f 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtils.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtils.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.support; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.joda.DateMathParser; @@ -19,7 +20,10 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.IOException; +import java.util.Locale; +import java.util.Objects; import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; /** * */ @@ -140,7 +144,7 @@ public class WatcherDateTimeUtils { } if (token == XContentParser.Token.VALUE_STRING) { try { - TimeValue value = TimeValue.parseTimeValue(parser.text(), null, settingName); + TimeValue value = parseTimeValueSupportingFractional(parser.text(), settingName); if (value.millis() < 0) { throw new ElasticsearchParseException("could not parse time value [{}]. Time value cannot be negative.", parser.text()); } @@ -154,6 +158,47 @@ public class WatcherDateTimeUtils { "instead", token); } + /** + * Parse a {@link TimeValue} with support for fractional values. + */ + public static TimeValue parseTimeValueSupportingFractional(@Nullable String sValue, String settingName) { + // This code is lifted almost straight from 2.x's TimeValue.java + Objects.requireNonNull(settingName); + if (sValue == null) { + return null; + } + try { + long millis; + String lowerSValue = sValue.toLowerCase(Locale.ROOT).trim(); + if (lowerSValue.endsWith("ms")) { + millis = (long) (Double.parseDouble(lowerSValue.substring(0, lowerSValue.length() - 2))); + } else if (lowerSValue.endsWith("s")) { + millis = (long) (Double.parseDouble(lowerSValue.substring(0, lowerSValue.length() - 1)) * 1000); + } else if (lowerSValue.endsWith("m")) { + millis = (long) (Double.parseDouble(lowerSValue.substring(0, lowerSValue.length() - 1)) * 60 * 1000); + } else if (lowerSValue.endsWith("h")) { + millis = (long) (Double.parseDouble(lowerSValue.substring(0, lowerSValue.length() - 1)) * 60 * 60 * 1000); + } else if (lowerSValue.endsWith("d")) { + millis = (long) (Double.parseDouble(lowerSValue.substring(0, lowerSValue.length() - 1)) * 24 * 60 * 60 * 1000); + } else if (lowerSValue.endsWith("w")) { + millis = (long) (Double.parseDouble(lowerSValue.substring(0, lowerSValue.length() - 1)) * 7 * 24 * 60 * 60 * 1000); + } else if (lowerSValue.equals("-1")) { + // Allow this special value to be unit-less: + millis = -1; + } else if (lowerSValue.equals("0")) { + // Allow this special value to be unit-less: + millis = 0; + } else { + throw new ElasticsearchParseException( + "Failed to parse setting [{}] with value [{}] as a time value: unit is missing or unrecognized", + settingName, sValue); + } + return new TimeValue(millis, TimeUnit.MILLISECONDS); + } catch (NumberFormatException e) { + throw new ElasticsearchParseException("Failed to parse [{}]", e, sValue); + } + } + private static class ClockNowCallable implements Callable { private final Clock clock; diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java index eb5c563abd1..e6a252d60b8 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher.support; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -180,10 +181,19 @@ public class WatcherIndexTemplateRegistry extends AbstractComponent implements C .build(); request.settings(updatedSettings); } - PutIndexTemplateResponse response = client.putTemplate(request); - if (response.isAcknowledged() == false) { - logger.error("Error adding watcher template [{}], request was not acknowledged", config.getTemplateName()); - } + client.putTemplate(request, new ActionListener() { + @Override + public void onResponse(PutIndexTemplateResponse response) { + if (response.isAcknowledged() == false) { + logger.error("Error adding watcher template [{}], request was not acknowledged", config.getTemplateName()); + } + } + + @Override + public void onFailure(Exception e) { + logger.error("Error adding watcher template [{}]", e, config.getTemplateName()); + } + }); }); } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/init/proxy/WatcherClientProxy.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/init/proxy/WatcherClientProxy.java index 68a192351c3..f6b24cfdd6d 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/init/proxy/WatcherClientProxy.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/init/proxy/WatcherClientProxy.java @@ -24,11 +24,10 @@ import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.common.init.proxy.ClientProxy; +import org.elasticsearch.xpack.security.InternalClient; /** * A lazily initialized proxy to an elasticsearch {@link Client}. Inject this proxy whenever a client @@ -107,8 +106,8 @@ public class WatcherClientProxy extends ClientProxy { return client.admin().indices().refresh(preProcess(request)).actionGet(defaultSearchTimeout); } - public PutIndexTemplateResponse putTemplate(PutIndexTemplateRequest request) { + public void putTemplate(PutIndexTemplateRequest request, ActionListener listener) { preProcess(request); - return client.admin().indices().putTemplate(request).actionGet(defaultIndexTimeout); + client.admin().indices().putTemplate(request, listener); } } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransform.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransform.java index ffc8859d590..6bdee9e4fb8 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransform.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransform.java @@ -23,6 +23,8 @@ import org.joda.time.DateTimeZone; import java.io.IOException; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + public class SearchTransform implements Transform { public static final String TYPE = "search"; @@ -81,7 +83,7 @@ public class SearchTransform implements Transform { builder.field(Field.REQUEST.getPreferredName(), request); } if (timeout != null) { - builder.field(Field.TIMEOUT.getPreferredName(), timeout); + builder.timeValueField(Field.TIMEOUT.getPreferredName(), Field.TIMEOUT_HUMAN.getPreferredName(), timeout); } if (dynamicNameTimeZone != null) { builder.field(Field.DYNAMIC_NAME_TIMEZONE.getPreferredName(), dynamicNameTimeZone); @@ -110,7 +112,10 @@ public class SearchTransform implements Transform { TYPE, watchId, currentFieldName); } } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT)) { - timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT.toString()); + timeout = timeValueMillis(parser.longValue()); + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT_HUMAN)) { + // Parser for human specified timeouts and 2.x compatibility + timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT_HUMAN.toString()); } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.DYNAMIC_NAME_TIMEZONE)) { if (token == XContentParser.Token.VALUE_STRING) { dynamicNameTimeZone = DateTimeZone.forID(parser.text()); @@ -192,7 +197,8 @@ public class SearchTransform implements Transform { public interface Field extends Transform.Field { ParseField REQUEST = new ParseField("request"); - ParseField TIMEOUT = new ParseField("timeout"); + ParseField TIMEOUT = new ParseField("timeout_in_millis"); + ParseField TIMEOUT_HUMAN = new ParseField("timeout"); ParseField DYNAMIC_NAME_TIMEZONE = new ParseField("dynamic_name_timezone"); } } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java index cbe3d90bffd..16fcdad641f 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java @@ -51,6 +51,7 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentHelper.createParser; import static org.elasticsearch.xpack.watcher.support.Exceptions.ioException; @@ -183,11 +184,8 @@ public class Watch implements TriggerEngine.Job, ToXContent { builder.field(Field.TRANSFORM.getPreferredName()).startObject().field(transform.type(), transform, params).endObject(); } if (throttlePeriod != null) { - if (builder.humanReadable()) { - builder.field(Field.THROTTLE_PERIOD.getPreferredName(), throttlePeriod.format(PeriodType.seconds())); - } else { - builder.field(Field.THROTTLE_PERIOD.getPreferredName(), throttlePeriod); - } + builder.timeValueField(Field.THROTTLE_PERIOD.getPreferredName(), + Field.THROTTLE_PERIOD_HUMAN.getPreferredName(), throttlePeriod); } builder.field(Field.ACTIONS.getPreferredName(), actions, params); if (metadata != null) { @@ -307,8 +305,11 @@ public class Watch implements TriggerEngine.Job, ToXContent { } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TRANSFORM)) { transform = transformRegistry.parse(id, parser); } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.THROTTLE_PERIOD)) { + throttlePeriod = timeValueMillis(parser.longValue()); + } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.THROTTLE_PERIOD_HUMAN)) { + // Parser for human specified and 2.x backwards compatible throttle period try { - throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, Field.THROTTLE_PERIOD.toString()); + throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, Field.THROTTLE_PERIOD_HUMAN.toString()); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse watch [{}]. failed to parse time value for field [{}]", pe, id, currentFieldName); @@ -360,7 +361,8 @@ public class Watch implements TriggerEngine.Job, ToXContent { ParseField CONDITION = new ParseField("condition"); ParseField ACTIONS = new ParseField("actions"); ParseField TRANSFORM = new ParseField("transform"); - ParseField THROTTLE_PERIOD = new ParseField("throttle_period"); + ParseField THROTTLE_PERIOD = new ParseField("throttle_period_in_millis"); + ParseField THROTTLE_PERIOD_HUMAN = new ParseField("throttle_period"); ParseField METADATA = new ParseField("metadata"); ParseField STATUS = new ParseField("_status"); } diff --git a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStore.java b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStore.java index 4975baaa470..43305267267 100644 --- a/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStore.java +++ b/elasticsearch/x-pack/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStore.java @@ -333,6 +333,10 @@ public class WatchStore extends AbstractComponent { } } + public void clearWatchesInMemory() { + watches.clear(); + } + public class WatchPut { private final Watch previous; diff --git a/elasticsearch/x-pack/watcher/src/main/resources/watches.json b/elasticsearch/x-pack/watcher/src/main/resources/watches.json index 87e11945400..3c9ca1981e3 100644 --- a/elasticsearch/x-pack/watcher/src/main/resources/watches.json +++ b/elasticsearch/x-pack/watcher/src/main/resources/watches.json @@ -37,6 +37,11 @@ "index" : false, "doc_values" : false }, + "throttle_period_in_millis": { + "type" : "long", + "index" : false, + "doc_values" : false + }, "transform": { "type" : "object", "enabled" : false, diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/OldWatcherIndicesBackwardsCompatibilityIT.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/OldWatcherIndicesBackwardsCompatibilityIT.java new file mode 100644 index 00000000000..102a2ac837f --- /dev/null +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/OldWatcherIndicesBackwardsCompatibilityIT.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher; + +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; +import org.elasticsearch.AbstractOldXPackIndicesBackwardsCompatibilityTestCase; +import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.common.text.TextTemplate; +import org.elasticsearch.xpack.watcher.actions.logging.LoggingAction; +import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder; +import org.elasticsearch.xpack.watcher.client.WatcherClient; +import org.elasticsearch.xpack.watcher.condition.always.AlwaysCondition; +import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchResponse; +import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse; +import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; +import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule.Interval; +import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger; + +import java.util.Map; +import java.util.SortedSet; +import java.util.TreeSet; + +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; + +/** + * Tests for watcher indexes created before 5.0. + */ +public class OldWatcherIndicesBackwardsCompatibilityIT extends AbstractOldXPackIndicesBackwardsCompatibilityTestCase { + @Override + public Settings nodeSettings(int ord) { + return Settings.builder() + .put(super.nodeSettings(ord)) + .put(XPackSettings.WATCHER_ENABLED.getKey(), true) + .build(); + } + + public void testAllVersionsTested() throws Exception { + SortedSet expectedVersions = new TreeSet<>(); + for (Version v : VersionUtils.allVersions()) { + if (v.before(Version.V_2_0_0)) continue; // unsupported indexes + if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself + if (v.isBeta() == true || v.isAlpha() == true || v.isRC() == true) continue; // don't check alphas etc + expectedVersions.add("x-pack-" + v.toString() + ".zip"); + } + for (String index : dataFiles) { + if (expectedVersions.remove(index) == false) { + logger.warn("Old indexes tests contain extra index: {}", index); + } + } + if (expectedVersions.isEmpty() == false) { + StringBuilder msg = new StringBuilder("Old index tests are missing indexes:"); + for (String expected : expectedVersions) { + msg.append("\n" + expected); + } + fail(msg.toString()); + } + } + + @Override + public void testOldIndexes() throws Exception { + super.testOldIndexes(); + // Wait for watcher to fully start before shutting down + assertBusy(() -> { + assertEquals(WatcherState.STARTED, internalCluster().getInstance(WatcherService.class).state()); + }); + // Shutdown watcher on the last node so that the test can shutdown cleanly + internalCluster().getInstance(WatcherLifeCycleService.class).stop(); + } + + @Override + protected void checkVersion(Version version) throws Exception { + // Wait for watcher to actually start.... + assertBusy(() -> { + assertEquals(WatcherState.STARTED, internalCluster().getInstance(WatcherService.class).state()); + }); + assertWatchIndexContentsWork(version); + assertBasicWatchInteractions(); + } + + void assertWatchIndexContentsWork(Version version) throws Exception { + WatcherClient watcherClient = new WatcherClient(client()); + + // Fetch a basic watch + GetWatchResponse bwcWatch = watcherClient.prepareGetWatch("bwc_watch").get(); + assertTrue(bwcWatch.isFound()); + assertNotNull(bwcWatch.getSource()); + Map source = bwcWatch.getSource().getAsMap(); + assertEquals(1000, source.get("throttle_period_in_millis")); + Map input = (Map) source.get("input"); + Map search = (Map) input.get("search"); + assertEquals(96000, search.get("timeout_in_millis")); // We asked for 100s but 2.x converted that to 1.6m which is actually 96s... + Map actions = (Map) source.get("actions"); + Map indexPayload = (Map) actions.get("index_payload"); + Map transform = (Map) indexPayload.get("transform"); + search = (Map) transform.get("search"); + assertEquals(96000, search.get("timeout_in_millis")); // We asked for 100s but 2.x converted that to 1.6m which is actually 96s... + Map index = (Map) indexPayload.get("index"); + assertEquals("bwc_watch_index", index.get("index")); + assertEquals("bwc_watch_type", index.get("doc_type")); + assertEquals(96000, index.get("timeout_in_millis")); // We asked for 100s but 2.x converted that to 1.6m which is actually 96s... + + // Fetch a watch with "fun" throttle periods + bwcWatch = watcherClient.prepareGetWatch("bwc_throttle_period").get(); + assertTrue(bwcWatch.isFound()); + assertNotNull(bwcWatch.getSource()); + source = bwcWatch.getSource().getAsMap(); + // We asked for 100s but 2.x converted that to 1.6m which is actually 96s... + assertEquals(96000, source.get("throttle_period_in_millis")); + actions = (Map) source.get("actions"); + indexPayload = (Map) actions.get("index_payload"); + // We asked for 100s but 2.x converted that to 1.6m which is actually 96s... + assertEquals(96000, indexPayload.get("throttle_period_in_millis")); + + if (version.onOrAfter(Version.V_2_3_0)) { + /* Fetch a watch with a funny timeout to verify loading fractional time values. This watch is only built in >= 2.3 because + * email attachments aren't supported before that. */ + bwcWatch = watcherClient.prepareGetWatch("bwc_funny_timeout").get(); + assertTrue(bwcWatch.isFound()); + assertNotNull(bwcWatch.getSource()); + source = bwcWatch.getSource().getAsMap(); + actions = (Map) source.get("actions"); + Map work = (Map) actions.get("work"); + Map email = (Map) work.get("email"); + Map attachments = (Map) email.get("attachments"); + Map attachment = (Map) attachments.get("test_report.pdf"); + Map http = (Map) attachment.get("http"); + Map request = (Map) http.get("request"); + assertEquals(96000, request.get("read_timeout_millis")); + assertEquals("https", request.get("scheme")); + assertEquals("example.com", request.get("host")); + assertEquals("{{ctx.metadata.report_url}}", request.get("path")); + assertEquals(8443, request.get("port")); + Map auth = (Map) request.get("auth"); + Map basic = (Map) auth.get("basic"); + assertThat(basic, hasEntry("username", "Aladdin")); + // password doesn't come back because it is hidden + assertThat(basic, not(hasKey("password"))); + } + + SearchResponse history = client().prepareSearch(".watch_history*").get(); + assertThat(history.getHits().totalHits(), greaterThanOrEqualTo(10L)); + } + + void assertBasicWatchInteractions() throws Exception { + WatcherClient watcherClient = new WatcherClient(client()); + + PutWatchResponse put = watcherClient.preparePutWatch("new_watch").setSource(new WatchSourceBuilder() + .condition(AlwaysCondition.INSTANCE) + .trigger(ScheduleTrigger.builder(new IntervalSchedule(Interval.seconds(1)))) + .addAction("awesome", LoggingAction.builder(new TextTemplate("test")))).get(); + assertTrue(put.isCreated()); + assertEquals(1, put.getVersion()); + + put = watcherClient.preparePutWatch("new_watch").setSource(new WatchSourceBuilder() + .condition(AlwaysCondition.INSTANCE) + .trigger(ScheduleTrigger.builder(new IntervalSchedule(Interval.seconds(1)))) + .addAction("awesome", LoggingAction.builder(new TextTemplate("test")))).get(); + assertFalse(put.isCreated()); + assertEquals(2, put.getVersion()); + + GetWatchResponse get = watcherClient.prepareGetWatch(put.getId()).get(); + assertTrue(get.isFound()); + { + Map source = get.getSource().getAsMap(); + Map actions = (Map) source.get("actions"); + Map awesome = (Map) actions.get("awesome"); + Map logging = (Map) awesome.get("logging"); + assertEquals("info", logging.get("level")); + assertEquals("test", logging.get("text")); + } + } + +} diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherF.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherF.java index 6c7c7fd23c1..23266f81de3 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherF.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherF.java @@ -43,7 +43,7 @@ public class WatcherF { // this is for the Watcher Test account in HipChat settings.put("xpack.notification.hipchat.account.user.profile", "user"); - settings.put("xpack.notification.hipchat.account.user.auth_token", "FYVx16oDH78ZW9r13wtXbcszyoyA7oX5tiMWg9X0"); + settings.put("xpack.notification.hipchat.account.user.auth_token", "12rNQUuQ0wObfRVeoVD8OeoAnosCT8tSTV5UjsII"); // this is for the `test-watcher-v1` notification token (hipchat) settings.put("xpack.notification.hipchat.account.v1.profile", "v1"); diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java index 193e927cf43..feff0267558 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java @@ -5,18 +5,22 @@ */ package org.elasticsearch.xpack.watcher; +import org.elasticsearch.Version; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.watcher.watch.WatchStore; import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -165,4 +169,46 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { verify(watcherService, never()).start(any(ClusterState.class)); verify(watcherService, never()).stop(); } + + public void testWatchIndexDeletion() throws Exception { + DiscoveryNodes discoveryNodes = new DiscoveryNodes.Builder().masterNodeId("id1").localNodeId("id1").build(); + // old cluster state that contains watcher index + Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + ClusterState oldClusterState = ClusterState.builder(new ClusterName("my-cluster")) + .metaData(new MetaData.Builder().put(IndexMetaData.builder(WatchStore.INDEX) + .settings(indexSettings).numberOfReplicas(0).numberOfShards(1))) + .nodes(discoveryNodes).build(); + + // new cluster state that does not contain watcher index + ClusterState newClusterState = ClusterState.builder(new ClusterName("my-cluster")).nodes(discoveryNodes).build(); + when(watcherService.state()).thenReturn(WatcherState.STARTED); + + lifeCycleService.clusterChanged(new ClusterChangedEvent("any", newClusterState, oldClusterState)); + verify(watcherService, never()).start(any(ClusterState.class)); + verify(watcherService, never()).stop(); + verify(watcherService, times(1)).watchIndexDeletedOrClosed(); + } + + public void testWatchIndexClosing() throws Exception { + DiscoveryNodes discoveryNodes = new DiscoveryNodes.Builder().masterNodeId("id1").localNodeId("id1").build(); + // old cluster state that contains watcher index + Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + ClusterState oldClusterState = ClusterState.builder(new ClusterName("my-cluster")) + .metaData(new MetaData.Builder().put(IndexMetaData.builder(WatchStore.INDEX) + .settings(indexSettings).numberOfReplicas(0).numberOfShards(1))) + .nodes(discoveryNodes).build(); + + // new cluster state with a closed watcher index + ClusterState newClusterState = ClusterState.builder(new ClusterName("my-cluster")) + .metaData(new MetaData.Builder().put(IndexMetaData.builder(WatchStore.INDEX).state(IndexMetaData.State.CLOSE) + .settings(indexSettings).numberOfReplicas(0).numberOfShards(1))) + .nodes(discoveryNodes).build(); + when(watcherService.state()).thenReturn(WatcherState.STARTED); + + lifeCycleService.clusterChanged(new ClusterChangedEvent("any", newClusterState, oldClusterState)); + verify(watcherService, never()).start(any(ClusterState.class)); + verify(watcherService, never()).stop(); + verify(watcherService, times(1)).watchIndexDeletedOrClosed(); + } + } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionTests.java index ef697d1f66a..3fcfc2666b3 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionTests.java @@ -76,9 +76,6 @@ import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -/** - * - */ public class EmailActionTests extends ESTestCase { private HttpAuthRegistry registry = new HttpAuthRegistry(singletonMap("basic", new BasicAuthFactory(null))); @@ -113,17 +110,17 @@ public class EmailActionTests extends ESTestCase { EmailTemplate.Builder emailBuilder = EmailTemplate.builder(); TextTemplate subject = null; if (randomBoolean()) { - subject = TextTemplate.inline("_subject").build(); + subject = new TextTemplate("_subject"); emailBuilder.subject(subject); } TextTemplate textBody = null; if (randomBoolean()) { - textBody = TextTemplate.inline("_text_body").build(); + textBody = new TextTemplate("_text_body"); emailBuilder.textBody(textBody); } TextTemplate htmlBody = null; if (randomBoolean()) { - htmlBody = TextTemplate.inline("_html_body").build(); + htmlBody = new TextTemplate("_html_body"); emailBuilder.htmlBody(htmlBody); } EmailTemplate email = emailBuilder.build(); @@ -204,9 +201,9 @@ public class EmailActionTests extends ESTestCase { randomBoolean() ? "bcc@domain" : "bcc1@domain,bcc2@domain").toArray(); Email.Address[] replyTo = rarely() ? null : Email.AddressList.parse( randomBoolean() ? "reply@domain" : "reply1@domain,reply2@domain").toArray(); - TextTemplate subject = randomBoolean() ? TextTemplate.inline("_subject").build() : null; - TextTemplate textBody = randomBoolean() ? TextTemplate.inline("_text_body").build() : null; - TextTemplate htmlBody = randomBoolean() ? TextTemplate.inline("_text_html").build() : null; + TextTemplate subject = randomBoolean() ? new TextTemplate("_subject") : null; + TextTemplate textBody = randomBoolean() ? new TextTemplate("_text_body") : null; + TextTemplate htmlBody = randomBoolean() ? new TextTemplate("_text_html") : null; DataAttachment dataAttachment = randomDataAttachment(); XContentBuilder builder = jsonBuilder().startObject() .field("account", "_account") @@ -312,7 +309,7 @@ public class EmailActionTests extends ESTestCase { assertThat(executable.action().getAuth(), notNullValue()); assertThat(executable.action().getAuth().user(), is("_user")); assertThat(executable.action().getAuth().password(), is(new Secret("_passwd".toCharArray()))); - assertThat(executable.action().getEmail().priority(), is(TextTemplate.defaultType(priority.name()).build())); + assertThat(executable.action().getEmail().priority(), is(new TextTemplate(priority.name()))); if (to != null) { assertThat(executable.action().getEmail().to(), arrayContainingInAnyOrder(addressesToTemplates(to))); } else { @@ -338,7 +335,7 @@ public class EmailActionTests extends ESTestCase { private static TextTemplate[] addressesToTemplates(Email.Address[] addresses) { TextTemplate[] templates = new TextTemplate[addresses.length]; for (int i = 0; i < templates.length; i++) { - templates[i] = TextTemplate.defaultType(addresses[i].toString()).build(); + templates[i] = new TextTemplate(addresses[i].toString()); } return templates; } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionFactoryTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionFactoryTests.java index 794483d59df..a899d1663fc 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionFactoryTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionFactoryTests.java @@ -29,9 +29,6 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -/** - * - */ public class HipChatActionFactoryTests extends ESTestCase { private HipChatActionFactory factory; private HipChatService hipchatService; @@ -79,20 +76,20 @@ public class HipChatActionFactoryTests extends ESTestCase { builder.field("account", accountName); builder.startObject("message"); - TextTemplate body = TextTemplate.inline("_body").build(); + TextTemplate body = new TextTemplate("_body"); builder.field("body", body); TextTemplate[] rooms = null; if (randomBoolean()) { - TextTemplate r1 = TextTemplate.inline("_r1").build(); - TextTemplate r2 = TextTemplate.inline("_r2").build(); + TextTemplate r1 = new TextTemplate("_r1"); + TextTemplate r2 = new TextTemplate("_r2"); rooms = new TextTemplate[] { r1, r2 }; builder.array("room", r1, r2); } TextTemplate[] users = null; if (randomBoolean()) { - TextTemplate u1 = TextTemplate.inline("_u1").build(); - TextTemplate u2 = TextTemplate.inline("_u2").build(); + TextTemplate u1 = new TextTemplate("_u1"); + TextTemplate u2 = new TextTemplate("_u2"); users = new TextTemplate[] { u1, u2 }; builder.array("user", u1, u2); } @@ -108,7 +105,7 @@ public class HipChatActionFactoryTests extends ESTestCase { } TextTemplate color = null; if (randomBoolean()) { - color = TextTemplate.inline(randomFrom(HipChatMessage.Color.values()).value()).build(); + color = new TextTemplate(randomFrom(HipChatMessage.Color.values()).value()); builder.field("color", color); } Boolean notify = null; @@ -135,7 +132,7 @@ public class HipChatActionFactoryTests extends ESTestCase { public void testParserSelfGenerated() throws Exception { String accountName = randomAsciiOfLength(10); - TextTemplate body = TextTemplate.inline("_body").build(); + TextTemplate body = new TextTemplate("_body"); HipChatMessage.Template.Builder templateBuilder = new HipChatMessage.Template.Builder(body); XContentBuilder builder = jsonBuilder().startObject(); @@ -144,14 +141,14 @@ public class HipChatActionFactoryTests extends ESTestCase { builder.field("body", body); if (randomBoolean()) { - TextTemplate r1 = TextTemplate.inline("_r1").build(); - TextTemplate r2 = TextTemplate.inline("_r2").build(); + TextTemplate r1 = new TextTemplate("_r1"); + TextTemplate r2 = new TextTemplate("_r2"); templateBuilder.addRooms(r1, r2); builder.array("room", r1, r2); } if (randomBoolean()) { - TextTemplate u1 = TextTemplate.inline("_u1").build(); - TextTemplate u2 = TextTemplate.inline("_u2").build(); + TextTemplate u1 = new TextTemplate("_u1"); + TextTemplate u2 = new TextTemplate("_u2"); templateBuilder.addUsers(u1, u2); builder.array("user", u1, u2); } @@ -166,7 +163,7 @@ public class HipChatActionFactoryTests extends ESTestCase { builder.field("format", format.value()); } if (randomBoolean()) { - TextTemplate color = TextTemplate.inline(randomFrom(HipChatMessage.Color.values()).value()).build(); + TextTemplate color = new TextTemplate(randomFrom(HipChatMessage.Color.values()).value()); templateBuilder.setColor(color); builder.field("color", color); } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionTests.java index bcc9f41da29..4498ff2418d 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatActionTests.java @@ -45,9 +45,6 @@ import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -/** - * - */ public class HipChatActionTests extends ESTestCase { private HipChatService service; @@ -61,7 +58,7 @@ public class HipChatActionTests extends ESTestCase { TextTemplateEngine templateEngine = mock(TextTemplateEngine.class); - TextTemplate body = TextTemplate.inline("_body").build(); + TextTemplate body = new TextTemplate("_body"); HipChatMessage.Template.Builder messageBuilder = new HipChatMessage.Template.Builder(body); HipChatMessage.Template messageTemplate = messageBuilder.build(); @@ -129,20 +126,20 @@ public class HipChatActionTests extends ESTestCase { builder.field("account", accountName); builder.startObject("message"); - TextTemplate body = TextTemplate.inline("_body").build(); + TextTemplate body = new TextTemplate("_body"); builder.field("body", body); TextTemplate[] rooms = null; if (randomBoolean()) { - TextTemplate r1 = TextTemplate.inline("_r1").build(); - TextTemplate r2 = TextTemplate.inline("_r2").build(); + TextTemplate r1 = new TextTemplate("_r1"); + TextTemplate r2 = new TextTemplate("_r2"); rooms = new TextTemplate[] { r1, r2 }; builder.array("room", r1, r2); } TextTemplate[] users = null; if (randomBoolean()) { - TextTemplate u1 = TextTemplate.inline("_u1").build(); - TextTemplate u2 = TextTemplate.inline("_u2").build(); + TextTemplate u1 = new TextTemplate("_u1"); + TextTemplate u2 = new TextTemplate("_u2"); users = new TextTemplate[] { u1, u2 }; builder.array("user", u1, u2); } @@ -158,7 +155,7 @@ public class HipChatActionTests extends ESTestCase { } TextTemplate color = null; if (randomBoolean()) { - color = TextTemplate.inline(randomFrom(HipChatMessage.Color.values()).value()).build(); + color = new TextTemplate(randomFrom(HipChatMessage.Color.values()).value()); builder.field("color", color); } Boolean notify = null; @@ -185,7 +182,7 @@ public class HipChatActionTests extends ESTestCase { public void testParserSelfGenerated() throws Exception { String accountName = randomAsciiOfLength(10); - TextTemplate body = TextTemplate.inline("_body").build(); + TextTemplate body = new TextTemplate("_body"); HipChatMessage.Template.Builder templateBuilder = new HipChatMessage.Template.Builder(body); XContentBuilder builder = jsonBuilder().startObject(); @@ -194,14 +191,14 @@ public class HipChatActionTests extends ESTestCase { builder.field("body", body); if (randomBoolean()) { - TextTemplate r1 = TextTemplate.inline("_r1").build(); - TextTemplate r2 = TextTemplate.inline("_r2").build(); + TextTemplate r1 = new TextTemplate("_r1"); + TextTemplate r2 = new TextTemplate("_r2"); templateBuilder.addRooms(r1, r2); builder.array("room", r1, r2); } if (randomBoolean()) { - TextTemplate u1 = TextTemplate.inline("_u1").build(); - TextTemplate u2 = TextTemplate.inline("_u2").build(); + TextTemplate u1 = new TextTemplate("_u1"); + TextTemplate u2 = new TextTemplate("_u2"); templateBuilder.addUsers(u1, u2); builder.array("user", u1, u2); } @@ -216,7 +213,7 @@ public class HipChatActionTests extends ESTestCase { builder.field("format", format.value()); } if (randomBoolean()) { - TextTemplate color = TextTemplate.inline(randomFrom(HipChatMessage.Color.values()).value()).build(); + TextTemplate color = new TextTemplate(randomFrom(HipChatMessage.Color.values()).value()); templateBuilder.setColor(color); builder.field("color", color); } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java index a10634d1730..3a9a0b4fb30 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xpack.common.init.proxy.ClientProxy; import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.actions.Action; import org.elasticsearch.xpack.watcher.actions.Action.Result.Status; @@ -49,8 +48,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.joda.time.DateTimeZone.UTC; -/** - */ public class IndexActionTests extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { @@ -191,7 +188,7 @@ public class IndexActionTests extends ESIntegTestCase { } TimeValue writeTimeout = randomBoolean() ? TimeValue.timeValueSeconds(randomInt(10)) : null; if (writeTimeout != null) { - builder.field(IndexAction.Field.TIMEOUT.getPreferredName(), writeTimeout); + builder.field(IndexAction.Field.TIMEOUT.getPreferredName(), writeTimeout.millis()); } builder.endObject(); Client client = client(); diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingActionTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingActionTests.java index 6e25b6c112e..5fe4f86661e 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingActionTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingActionTests.java @@ -76,7 +76,7 @@ public class LoggingActionTests extends ESTestCase { Map expectedModel = singletonMap("ctx", ctxModel); String text = randomAsciiOfLength(10); - TextTemplate template = TextTemplate.inline(text).build(); + TextTemplate template = new TextTemplate(text); LoggingAction action = new LoggingAction(template, level, "_category"); ExecutableLoggingAction executable = new ExecutableLoggingAction(action, logger, actionLogger, engine); when(engine.render(template, expectedModel)).thenReturn(text); @@ -97,7 +97,7 @@ public class LoggingActionTests extends ESTestCase { LoggingActionFactory parser = new LoggingActionFactory(settings, engine); String text = randomAsciiOfLength(10); - TextTemplate template = TextTemplate.inline(text).build(); + TextTemplate template = new TextTemplate(text); XContentBuilder builder = jsonBuilder().startObject(); builder.field("text", template); @@ -131,7 +131,7 @@ public class LoggingActionTests extends ESTestCase { LoggingActionFactory parser = new LoggingActionFactory(settings, engine); String text = randomAsciiOfLength(10); - TextTemplate template = TextTemplate.inline(text).build(); + TextTemplate template = new TextTemplate(text); String category = randomAsciiOfLength(10); LoggingAction action = new LoggingAction(template, level, category); ExecutableLoggingAction executable = new ExecutableLoggingAction(action, logger, settings, engine); @@ -151,7 +151,7 @@ public class LoggingActionTests extends ESTestCase { LoggingActionFactory parser = new LoggingActionFactory(settings, engine); String text = randomAsciiOfLength(10); - TextTemplate template = TextTemplate.inline(text).build(); + TextTemplate template = new TextTemplate(text); LoggingAction.Builder actionBuilder = loggingAction(template); if (randomBoolean()) { actionBuilder.setCategory(randomAsciiOfLength(10)); diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionTests.java index 45fde4082c9..d62cc71255f 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionTests.java @@ -47,9 +47,6 @@ import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -/** - * - */ public class PagerDutyActionTests extends ESTestCase { private PagerDutyService service; @@ -64,7 +61,7 @@ public class PagerDutyActionTests extends ESTestCase { TextTemplateEngine templateEngine = mock(TextTemplateEngine.class); - TextTemplate description = TextTemplate.inline("_description").build(); + TextTemplate description = new TextTemplate("_description"); IncidentEvent.Template.Builder eventBuilder = new IncidentEvent.Template.Builder(description); boolean attachPayload = randomBoolean(); eventBuilder.setAttachPayload(attachPayload); @@ -133,31 +130,31 @@ public class PagerDutyActionTests extends ESTestCase { TextTemplate incidentKey = null; if (randomBoolean()) { - incidentKey = TextTemplate.inline("_incident_key").build(); + incidentKey = new TextTemplate("_incident_key"); builder.field("incident_key", incidentKey); } TextTemplate description = null; if (randomBoolean()) { - description = TextTemplate.inline("_description").build(); + description = new TextTemplate("_description"); builder.field("description", description); } TextTemplate client = null; if (randomBoolean()) { - client = TextTemplate.inline("_client").build(); + client = new TextTemplate("_client"); builder.field("client", client); } TextTemplate clientUrl = null; if (randomBoolean()) { - clientUrl = TextTemplate.inline("_client_url").build(); + clientUrl = new TextTemplate("_client_url"); builder.field("client_url", clientUrl); } TextTemplate eventType = null; if (randomBoolean()) { - eventType = TextTemplate.inline(randomFrom("trigger", "resolve", "acknowledge")).build(); + eventType = new TextTemplate(randomFrom("trigger", "resolve", "acknowledge")); builder.field("event_type", eventType); } @@ -169,9 +166,8 @@ public class PagerDutyActionTests extends ESTestCase { IncidentEventContext.Template[] contexts = null; if (randomBoolean()) { contexts = new IncidentEventContext.Template[] { - IncidentEventContext.Template.link(TextTemplate.inline("_href").build(), TextTemplate.inline("_text").build()), - IncidentEventContext.Template.image(TextTemplate.inline("_src").build(), TextTemplate.inline("_href").build(), - TextTemplate.inline("_alt").build()) + IncidentEventContext.Template.link(new TextTemplate("_href"), new TextTemplate("_text")), + IncidentEventContext.Template.image(new TextTemplate("_src"), new TextTemplate("_href"), new TextTemplate("_alt")) }; builder.array("context", (Object) contexts); } @@ -197,27 +193,26 @@ public class PagerDutyActionTests extends ESTestCase { IncidentEvent.Template.Builder event = IncidentEvent.templateBuilder(randomAsciiOfLength(50)); if (randomBoolean()) { - event.setIncidentKey(TextTemplate.inline(randomAsciiOfLength(50)).build()); + event.setIncidentKey(new TextTemplate(randomAsciiOfLength(50))); } if (randomBoolean()) { - event.setClient(TextTemplate.inline(randomAsciiOfLength(50)).build()); + event.setClient(new TextTemplate(randomAsciiOfLength(50))); } if (randomBoolean()) { - event.setClientUrl(TextTemplate.inline(randomAsciiOfLength(50)).build()); + event.setClientUrl(new TextTemplate(randomAsciiOfLength(50))); } if (randomBoolean()) { event.setAttachPayload(randomBoolean()); } if (randomBoolean()) { - event.addContext(IncidentEventContext.Template.link(TextTemplate.inline("_href").build(), - TextTemplate.inline("_text").build())); + event.addContext(IncidentEventContext.Template.link(new TextTemplate("_href"), new TextTemplate("_text"))); } if (randomBoolean()) { - event.addContext(IncidentEventContext.Template.image(TextTemplate.inline("_src").build(), - TextTemplate.inline("_href").build(), TextTemplate.inline("_alt").build())); + event.addContext(IncidentEventContext.Template.image(new TextTemplate("_src"), new TextTemplate("_href"), + new TextTemplate("_alt"))); } if (randomBoolean()) { - event.setEventType(TextTemplate.inline(randomAsciiOfLength(50)).build()); + event.setEventType(new TextTemplate(randomAsciiOfLength(50))); } if (randomBoolean()) { event.setAccount(randomAsciiOfLength(50)).build(); diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java index 84b6d2ac058..742172be679 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java @@ -399,8 +399,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase { LOGGING { @Override public Action.Builder action() throws Exception { - TextTemplate.Builder templateBuilder = new TextTemplate.Builder.Inline("_logging"); - return LoggingAction.builder(templateBuilder.build()); + return LoggingAction.builder(new TextTemplate("_logging")); } @Override diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java index 78e8ca95185..ad1c5079bce 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java @@ -67,9 +67,6 @@ import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; - -/** - */ public class WebhookActionTests extends ESTestCase { static final String TEST_HOST = "test.com"; @@ -87,8 +84,8 @@ public class WebhookActionTests extends ESTestCase { @Before public void init() throws Exception { templateEngine = new MockTextTemplateEngine(); - testBody = TextTemplate.inline(TEST_BODY_STRING).build(); - testPath = TextTemplate.inline(TEST_PATH_STRING).build(); + testBody = new TextTemplate(TEST_BODY_STRING); + testPath = new TextTemplate(TEST_PATH_STRING); authRegistry = new HttpAuthRegistry(singletonMap("basic", new BasicAuthFactory(null))); } @@ -128,8 +125,8 @@ public class WebhookActionTests extends ESTestCase { } public void testParser() throws Exception { - TextTemplate body = randomBoolean() ? TextTemplate.inline("_subject").build() : null; - TextTemplate path = TextTemplate.inline("_url").build(); + TextTemplate body = randomBoolean() ? new TextTemplate("_subject") : null; + TextTemplate path = new TextTemplate("_url"); String host = "test.host"; HttpMethod method = randomFrom(HttpMethod.GET, HttpMethod.POST, HttpMethod.PUT, HttpMethod.DELETE, HttpMethod.HEAD, null); HttpRequestTemplate request = getHttpRequestTemplate(method, host, TEST_PORT, path, body, null); @@ -148,8 +145,8 @@ public class WebhookActionTests extends ESTestCase { } public void testParserSelfGenerated() throws Exception { - TextTemplate body = randomBoolean() ? TextTemplate.inline("_body").build() : null; - TextTemplate path = TextTemplate.inline("_url").build(); + TextTemplate body = randomBoolean() ? new TextTemplate("_body") : null; + TextTemplate path = new TextTemplate("_url"); String host = "test.host"; String watchId = "_watch"; String actionId = randomAsciiOfLength(5); @@ -174,8 +171,8 @@ public class WebhookActionTests extends ESTestCase { } public void testParserBuilder() throws Exception { - TextTemplate body = randomBoolean() ? TextTemplate.inline("_body").build() : null; - TextTemplate path = TextTemplate.inline("_url").build(); + TextTemplate body = randomBoolean() ? new TextTemplate("_body") : null; + TextTemplate path = new TextTemplate("_url"); String host = "test.host"; String watchId = "_watch"; @@ -257,9 +254,9 @@ public class WebhookActionTests extends ESTestCase { String watchId = "test_url_encode" + randomAsciiOfLength(10); HttpMethod method = HttpMethod.POST; - TextTemplate path = TextTemplate.defaultType("/test_" + watchId).build(); + TextTemplate path = new TextTemplate("/test_" + watchId); String host = "test.host"; - TextTemplate testBody = TextTemplate.inline("ERROR HAPPENED").build(); + TextTemplate testBody = new TextTemplate("ERROR HAPPENED"); HttpRequestTemplate requestTemplate = getHttpRequestTemplate(method, host, TEST_PORT, path, testBody, null); WebhookAction action = new WebhookAction(requestTemplate); diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index f3af18ff56f..d79399f8eff 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -84,8 +84,8 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest webServer.enqueue(new MockResponse().setResponseCode(200).setBody("body")); HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder("localhost", webPort) .scheme(Scheme.HTTPS) - .path(TextTemplate.inline("/test/_id").build()) - .body(TextTemplate.inline("{key=value}").build()); + .path(new TextTemplate("/test/_id")) + .body(new TextTemplate("{key=value}")); watcherClient().preparePutWatch("_id") .setSource(watchBuilder() @@ -127,8 +127,8 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder("localhost", webPort) .scheme(Scheme.HTTPS) .auth(new BasicAuth("_username", "_password".toCharArray())) - .path(TextTemplate.inline("/test/_id").build()) - .body(TextTemplate.inline("{key=value}").build()); + .path(new TextTemplate("/test/_id")) + .body(new TextTemplate("{key=value}")); watcherClient().preparePutWatch("_id") .setSource(watchBuilder() diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java index c599e2c40c6..5dcce5de64f 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java @@ -39,8 +39,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -/** - */ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase { private int webPort; private MockWebServer webServer; @@ -70,10 +68,10 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase public void testWebhook() throws Exception { webServer.enqueue(new MockResponse().setResponseCode(200).setBody("body")); HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder("localhost", webPort) - .path(TextTemplate.inline("/test/_id")) - .putParam("param1", TextTemplate.inline("value1")) - .putParam("watch_id", TextTemplate.inline("_id")) - .body(TextTemplate.inline("_body")); + .path(new TextTemplate("/test/_id")) + .putParam("param1", new TextTemplate("value1")) + .putParam("watch_id", new TextTemplate("_id")) + .body(new TextTemplate("_body")); watcherClient().preparePutWatch("_id") .setSource(watchBuilder() @@ -115,10 +113,10 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase webServer.enqueue(new MockResponse().setResponseCode(200).setBody("body")); HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder("localhost", webPort) .auth(new BasicAuth("_username", "_password".toCharArray())) - .path(TextTemplate.inline("/test/_id").build()) - .putParam("param1", TextTemplate.inline("value1").build()) - .putParam("watch_id", TextTemplate.inline("_id").build()) - .body(TextTemplate.inline("_body").build()); + .path(new TextTemplate("/test/_id")) + .putParam("param1", new TextTemplate("value1")) + .putParam("watch_id", new TextTemplate("_id")) + .body(new TextTemplate("_body")); watcherClient().preparePutWatch("_id") .setSource(watchBuilder() diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java index 97846e9ba02..4ff96fe3597 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java @@ -5,10 +5,13 @@ */ package org.elasticsearch.xpack.watcher.execution; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.support.clock.Clock; import org.elasticsearch.xpack.support.clock.ClockMock; import org.elasticsearch.xpack.watcher.actions.Action; @@ -41,7 +44,9 @@ import org.junit.Before; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.Executor; import static java.util.Collections.singletonMap; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; @@ -51,7 +56,9 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; +import static org.joda.time.DateTime.now; import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -68,15 +75,16 @@ public class ExecutionServiceTests extends ESTestCase { private Input.Result inputResult; private WatchStore watchStore; + private TriggeredWatchStore triggeredWatchStore; + private WatchExecutor executor; private HistoryStore historyStore; private WatchLockService watchLockService; private ExecutionService executionService; private Clock clock; + private ThreadPool threadPool; @Before public void init() throws Exception { - TriggeredWatchStore triggeredWatchStore; - payload = mock(Payload.class); input = mock(ExecutableInput.class); inputResult = mock(Input.Result.class); @@ -88,13 +96,14 @@ public class ExecutionServiceTests extends ESTestCase { triggeredWatchStore = mock(TriggeredWatchStore.class); historyStore = mock(HistoryStore.class); - WatchExecutor executor = mock(WatchExecutor.class); + executor = mock(WatchExecutor.class); when(executor.queue()).thenReturn(new ArrayBlockingQueue<>(1)); watchLockService = mock(WatchLockService.class); clock = new ClockMock(); + threadPool = mock(ThreadPool.class); executionService = new ExecutionService(Settings.EMPTY, historyStore, triggeredWatchStore, executor, watchStore, - watchLockService, clock); + watchLockService, clock, threadPool); ClusterState clusterState = mock(ClusterState.class); when(triggeredWatchStore.loadTriggeredWatches(clusterState)).thenReturn(new ArrayList<>()); @@ -483,7 +492,7 @@ public class ExecutionServiceTests extends ESTestCase { } public void testExecuteInner() throws Exception { - DateTime now = DateTime.now(DateTimeZone.UTC); + DateTime now = now(DateTimeZone.UTC); Watch watch = mock(Watch.class); ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now); WatchExecutionContext context = new TriggeredExecutionContext(watch, now, event, timeValueSeconds(5)); @@ -560,7 +569,7 @@ public class ExecutionServiceTests extends ESTestCase { } public void testExecuteInnerThrottled() throws Exception { - DateTime now = DateTime.now(DateTimeZone.UTC); + DateTime now = now(DateTimeZone.UTC); Watch watch = mock(Watch.class); ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now); WatchExecutionContext context = new TriggeredExecutionContext(watch, now, event, timeValueSeconds(5)); @@ -613,7 +622,7 @@ public class ExecutionServiceTests extends ESTestCase { } public void testExecuteInnerConditionNotMet() throws Exception { - DateTime now = DateTime.now(DateTimeZone.UTC); + DateTime now = now(DateTimeZone.UTC); Watch watch = mock(Watch.class); ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now); WatchExecutionContext context = new TriggeredExecutionContext(watch, now, event, timeValueSeconds(5)); @@ -774,6 +783,28 @@ public class ExecutionServiceTests extends ESTestCase { verify(action, never()).execute("_action", context, payload); } + public void testThatTriggeredWatchDeletionWorksOnExecutionRejection() throws Exception { + Watch watch = mock(Watch.class); + when(watch.id()).thenReturn("foo"); + when(watch.nonce()).thenReturn(1L); + when(watchStore.get(any())).thenReturn(watch); + + // execute needs to fail as well as storing the history + doThrow(new EsRejectedExecutionException()).when(executor).execute(any()); + doThrow(new ElasticsearchException("whatever")).when(historyStore).forcePut(any()); + + Wid wid = new Wid(watch.id(), watch.nonce(), now()); + + Executor currentThreadExecutor = command -> command.run(); + when(threadPool.generic()).thenReturn(currentThreadExecutor); + + TriggeredWatch triggeredWatch = new TriggeredWatch(wid, new ScheduleTriggerEvent(now() ,now())); + executionService.executeTriggeredWatches(Collections.singleton(triggeredWatch)); + + verify(triggeredWatchStore, times(1)).delete(wid); + verify(historyStore, times(1)).forcePut(any(WatchRecord.class)); + } + private Tuple whenCondition(final WatchExecutionContext context) { Condition.Result conditionResult = mock(Condition.Result.class); when(conditionResult.met()).thenReturn(true); @@ -791,5 +822,4 @@ public class ExecutionServiceTests extends ESTestCase { return new Tuple<>(transform, transformResult); } - } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java index b7929672986..b135abe8dd1 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java @@ -142,7 +142,7 @@ public class ChainInputTests extends ESTestCase { HttpInput.Builder httpInputBuilder = httpInput(HttpRequestTemplate.builder("theHost", 1234) .path("/index/_search") - .body(jsonBuilder().startObject().field("size", 1).endObject()) + .body(jsonBuilder().startObject().field("size", 1).endObject().string()) .auth(new BasicAuth("test", "changeme".toCharArray()))); ChainInput.Builder chainedInputBuilder = chainInput() diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainIntegrationTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainIntegrationTests.java index c16d30c3ddf..e2d249a69e5 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainIntegrationTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainIntegrationTests.java @@ -61,7 +61,7 @@ public class ChainIntegrationTests extends AbstractWatcherIntegrationTestCase { InetSocketAddress address = internalCluster().httpAddresses()[0]; HttpInput.Builder httpInputBuilder = httpInput(HttpRequestTemplate.builder(address.getHostString(), address.getPort()) .path("/" + index + "/_search") - .body(jsonBuilder().startObject().field("size", 1).endObject()) + .body(jsonBuilder().startObject().field("size", 1).endObject().string()) .auth(securityEnabled() ? new BasicAuth("test", "changeme".toCharArray()) : null)); ChainInput.Builder chainedInputBuilder = chainInput() diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputIntegrationTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputIntegrationTests.java index 1200c88c235..6285bef8efa 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputIntegrationTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputIntegrationTests.java @@ -71,7 +71,7 @@ public class HttpInputIntegrationTests extends AbstractWatcherIntegrationTestCas .trigger(schedule(interval("5s"))) .input(httpInput(HttpRequestTemplate.builder(address.getHostString(), address.getPort()) .path("/index/_search") - .body(jsonBuilder().startObject().field("size", 1).endObject()) + .body(jsonBuilder().startObject().field("size", 1).endObject().string()) .auth(securityEnabled() ? new BasicAuth("test", "changeme".toCharArray()) : null))) .condition(compareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L)) .addAction("_id", loggingAction("watch [{{ctx.watch_id}}] matched"))) @@ -117,8 +117,8 @@ public class HttpInputIntegrationTests extends AbstractWatcherIntegrationTestCas .field("query").value(termQuery("field", "value")) .endObject(); HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder(address.getHostString(), address.getPort()) - .path(TextTemplate.inline("/idx/_search")) - .body(body); + .path(new TextTemplate("/idx/_search")) + .body(body.string()); if (securityEnabled()) { requestBuilder.auth(new BasicAuth("test", "changeme".toCharArray())); } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java index ec3c305f0f3..9af85ea6286 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java @@ -66,8 +66,6 @@ import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -/** - */ public class HttpInputTests extends ESTestCase { private HttpClient httpClient; private HttpInputFactory httpParser; @@ -123,7 +121,7 @@ public class HttpInputTests extends ESTestCase { ExecutableHttpInput input = new ExecutableHttpInput(httpInput, logger, httpClient, templateEngine); when(httpClient.execute(any(HttpRequest.class))).thenReturn(response); - when(templateEngine.render(eq(TextTemplate.inline("_body").build()), any(Map.class))).thenReturn("_body"); + when(templateEngine.render(eq(new TextTemplate("_body")), any(Map.class))).thenReturn("_body"); WatchExecutionContext ctx = createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, new Payload.Simple()); @@ -142,7 +140,7 @@ public class HttpInputTests extends ESTestCase { String notJson = "This is not json"; HttpResponse response = new HttpResponse(123, notJson.getBytes(StandardCharsets.UTF_8)); when(httpClient.execute(any(HttpRequest.class))).thenReturn(response); - when(templateEngine.render(eq(TextTemplate.inline("_body").build()), any(Map.class))).thenReturn("_body"); + when(templateEngine.render(eq(new TextTemplate("_body")), any(Map.class))).thenReturn("_body"); WatchExecutionContext ctx = createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, new Payload.Simple()); @@ -156,18 +154,18 @@ public class HttpInputTests extends ESTestCase { String host = randomAsciiOfLength(3); int port = randomIntBetween(8000, 9000); String path = randomAsciiOfLength(3); - TextTemplate pathTemplate = TextTemplate.inline(path).build(); + TextTemplate pathTemplate = new TextTemplate(path); String body = randomBoolean() ? randomAsciiOfLength(3) : null; Map params = - randomBoolean() ? new MapBuilder().put("a", TextTemplate.inline("b").build()).map() : null; + randomBoolean() ? new MapBuilder().put("a", new TextTemplate("b")).map() : null; Map headers = - randomBoolean() ? new MapBuilder().put("c", TextTemplate.inline("d").build()).map() : null; + randomBoolean() ? new MapBuilder().put("c", new TextTemplate("d")).map() : null; HttpAuth auth = randomBoolean() ? new BasicAuth("username", "password".toCharArray()) : null; HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder(host, port) .scheme(scheme) .method(httpMethod) .path(pathTemplate) - .body(body != null ? TextTemplate.inline(body).build() : null) + .body(body != null ? new TextTemplate(body) : null) .auth(auth); if (params != null) { @@ -197,7 +195,7 @@ public class HttpInputTests extends ESTestCase { assertThat(result.getRequest().method(), equalTo(httpMethod != null ? httpMethod : HttpMethod.GET)); // get is the default assertThat(result.getRequest().host(), equalTo(host)); assertThat(result.getRequest().port(), equalTo(port)); - assertThat(result.getRequest().path(), is(TextTemplate.inline(path).build())); + assertThat(result.getRequest().path(), is(new TextTemplate(path))); assertThat(result.getExpectedResponseXContentType(), equalTo(expectedResponseXContentType)); if (expectedResponseXContentType != HttpContentType.TEXT && extractKeys != null) { for (String key : extractKeys) { @@ -205,14 +203,14 @@ public class HttpInputTests extends ESTestCase { } } if (params != null) { - assertThat(result.getRequest().params(), hasEntry(is("a"), is(TextTemplate.inline("b").build()))); + assertThat(result.getRequest().params(), hasEntry(is("a"), is(new TextTemplate("b")))); } if (headers != null) { - assertThat(result.getRequest().headers(), hasEntry(is("c"), is(TextTemplate.inline("d").build()))); + assertThat(result.getRequest().headers(), hasEntry(is("c"), is(new TextTemplate("d")))); } assertThat(result.getRequest().auth(), equalTo(auth)); if (body != null) { - assertThat(result.getRequest().body(), is(TextTemplate.inline(body).build())); + assertThat(result.getRequest().body(), is(new TextTemplate(body))); } else { assertThat(result.getRequest().body(), nullValue()); } @@ -256,7 +254,7 @@ public class HttpInputTests extends ESTestCase { when(httpClient.execute(any(HttpRequest.class))).thenReturn(response); - when(templateEngine.render(eq(TextTemplate.inline("_body").build()), any(Map.class))).thenReturn("_body"); + when(templateEngine.render(eq(new TextTemplate("_body")), any(Map.class))).thenReturn("_body"); WatchExecutionContext ctx = createWatchExecutionContext(); HttpInput.Result result = input.execute(ctx, new Payload.Simple()); diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtilsTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtilsTests.java index 13a2458ccb6..bca06a09dd9 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtilsTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtilsTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.support; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -22,6 +23,7 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.concurrent.TimeUnit.SECONDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils.parseTimeValueSupportingFractional; import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.xContentParser; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.either; @@ -122,4 +124,56 @@ public class WatcherDateTimeUtilsTests extends ESTestCase { TimeValue parsed = WatcherDateTimeUtils.parseTimeValue(parser, "test"); assertThat(parsed, nullValue()); } + + public void testParseTimeValueWithFractional() { + assertEquals("This function exists so 5.x can be compatible with 2.x indices. It should be removed with 6.x", 5, + Version.CURRENT.major); + + // This code is lifted strait from 2.x's TimeValueTests.java + assertEquals(new TimeValue(10, TimeUnit.MILLISECONDS), parseTimeValueSupportingFractional("10 ms", "test")); + assertEquals(new TimeValue(10, TimeUnit.MILLISECONDS), parseTimeValueSupportingFractional("10ms", "test")); + assertEquals(new TimeValue(10, TimeUnit.MILLISECONDS), parseTimeValueSupportingFractional("10 MS", "test")); + assertEquals(new TimeValue(10, TimeUnit.MILLISECONDS), parseTimeValueSupportingFractional("10MS", "test")); + + assertEquals(new TimeValue(10, TimeUnit.SECONDS), parseTimeValueSupportingFractional("10 s", "test")); + assertEquals(new TimeValue(10, TimeUnit.SECONDS), parseTimeValueSupportingFractional("10s", "test")); + assertEquals(new TimeValue(10, TimeUnit.SECONDS), parseTimeValueSupportingFractional("10 S", "test")); + assertEquals(new TimeValue(10, TimeUnit.SECONDS), parseTimeValueSupportingFractional("10S", "test")); + + assertEquals(new TimeValue(100, TimeUnit.MILLISECONDS), parseTimeValueSupportingFractional("0.1s", "test")); + + assertEquals(new TimeValue(10, TimeUnit.MINUTES), parseTimeValueSupportingFractional("10 m", "test")); + assertEquals(new TimeValue(10, TimeUnit.MINUTES), parseTimeValueSupportingFractional("10m", "test")); + assertEquals(new TimeValue(10, TimeUnit.MINUTES), parseTimeValueSupportingFractional("10 M", "test")); + assertEquals(new TimeValue(10, TimeUnit.MINUTES), parseTimeValueSupportingFractional("10M", "test")); + + assertEquals(new TimeValue(10, TimeUnit.HOURS), parseTimeValueSupportingFractional("10 h", "test")); + assertEquals(new TimeValue(10, TimeUnit.HOURS), parseTimeValueSupportingFractional("10h", "test")); + assertEquals(new TimeValue(10, TimeUnit.HOURS), parseTimeValueSupportingFractional("10 H", "test")); + assertEquals(new TimeValue(10, TimeUnit.HOURS), parseTimeValueSupportingFractional("10H", "test")); + + assertEquals(new TimeValue(10, TimeUnit.DAYS), parseTimeValueSupportingFractional("10 d", "test")); + assertEquals(new TimeValue(10, TimeUnit.DAYS), parseTimeValueSupportingFractional("10d", "test")); + assertEquals(new TimeValue(10, TimeUnit.DAYS), parseTimeValueSupportingFractional("10 D", "test")); + assertEquals(new TimeValue(10, TimeUnit.DAYS), parseTimeValueSupportingFractional("10D", "test")); + + assertEquals(new TimeValue(70, TimeUnit.DAYS), parseTimeValueSupportingFractional("10 w", "test")); + assertEquals(new TimeValue(70, TimeUnit.DAYS), parseTimeValueSupportingFractional("10w", "test")); + assertEquals(new TimeValue(70, TimeUnit.DAYS), parseTimeValueSupportingFractional("10 W", "test")); + assertEquals(new TimeValue(70, TimeUnit.DAYS), parseTimeValueSupportingFractional("10W", "test")); + + // Extra fractional tests just because that is the point + assertEquals(new TimeValue(100, TimeUnit.MILLISECONDS), parseTimeValueSupportingFractional("0.1s", "test")); + assertEquals(new TimeValue(6, TimeUnit.SECONDS), parseTimeValueSupportingFractional("0.1m", "test")); + assertEquals(new TimeValue(6, TimeUnit.MINUTES), parseTimeValueSupportingFractional("0.1h", "test")); + assertEquals(new TimeValue(144, TimeUnit.MINUTES), parseTimeValueSupportingFractional("0.1d", "test")); + assertEquals(new TimeValue(1008, TimeUnit.MINUTES), parseTimeValueSupportingFractional("0.1w", "test")); + + // And some crazy fractions just for fun + assertEquals(new TimeValue(1700, TimeUnit.MILLISECONDS), parseTimeValueSupportingFractional("1.7s", "test")); + assertEquals(new TimeValue(162, TimeUnit.SECONDS), parseTimeValueSupportingFractional("2.7m", "test")); + assertEquals(new TimeValue(5988, TimeUnit.MINUTES), parseTimeValueSupportingFractional("99.8h", "test")); + assertEquals(new TimeValue(1057968, TimeUnit.SECONDS), parseTimeValueSupportingFractional("12.245d", "test")); + assertEquals(new TimeValue(7258204799L, TimeUnit.MILLISECONDS), parseTimeValueSupportingFractional("12.001w", "test")); + } } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java index 368ccd3e8be..04611dcea70 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java @@ -108,7 +108,7 @@ public class WatcherUtilsTests extends ESTestCase { } String text = randomAsciiOfLengthBetween(1, 5); ScriptService.ScriptType scriptType = randomFrom(ScriptService.ScriptType.values()); - expectedTemplate = new Script(text, scriptType, randomBoolean() ? null : "mustache", params); + expectedTemplate = new Script(text, scriptType, "mustache", params); request = new WatcherSearchTemplateRequest(expectedIndices, expectedTypes, expectedSearchType, expectedIndicesOptions, expectedTemplate); } else { @@ -206,7 +206,7 @@ public class WatcherUtilsTests extends ESTestCase { } String text = randomAsciiOfLengthBetween(1, 5); ScriptService.ScriptType scriptType = randomFrom(ScriptService.ScriptType.values()); - template = new Script(text, scriptType, randomBoolean() ? null : "mustache", params); + template = new Script(text, scriptType, "mustache", params); builder.field("template", template); } builder.endObject(); diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 583ca36cd90..70601128b78 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.plugins.Plugin; @@ -102,12 +103,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; -/** - */ @ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, randomDynamicTemplates = false, maxNumDataNodes = 3) public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase { - public static final String WATCHER_LANG = ScriptSettings.DEFAULT_LANG; + public static final String WATCHER_LANG = Script.DEFAULT_SCRIPT_LANG; private static final boolean timeWarpEnabled = SystemPropertyUtil.getBoolean("tests.timewarp", true); @@ -676,9 +675,6 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase "\n" + "admin:\n" + " cluster: [ 'manage' ]\n" + - "transport_client:\n" + - " cluster: [ 'transport_client' ]\n" + - "\n" + "monitor:\n" + " cluster: [ 'monitor' ]\n" ; diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/MockTextTemplateEngine.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/MockTextTemplateEngine.java index 68aff89f308..207a4da20ac 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/MockTextTemplateEngine.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/MockTextTemplateEngine.java @@ -17,11 +17,11 @@ public class MockTextTemplateEngine extends TextTemplateEngine { } @Override - public String render(TextTemplate template, Map model) { - if (template == null ) { + public String render(TextTemplate textTemplate, Map model) { + if (textTemplate == null ) { return null; } - return template.getTemplate(); + return textTemplate.getTemplate(); } } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java index 3717c620999..1bdb2e941d8 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java @@ -27,7 +27,6 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.common.http.HttpClient; import org.elasticsearch.xpack.common.http.HttpMethod; import org.elasticsearch.xpack.common.http.HttpRequestTemplate; @@ -191,9 +190,9 @@ public final class WatcherTestUtils { HttpRequestTemplate.Builder httpRequest = HttpRequestTemplate.builder("localhost", 80); httpRequest.method(HttpMethod.POST); - TextTemplate path = TextTemplate.inline("/foobarbaz/{{ctx.watch_id}}").build(); + TextTemplate path = new TextTemplate("/foobarbaz/{{ctx.watch_id}}"); httpRequest.path(path); - TextTemplate body = TextTemplate.inline("{{ctx.watch_id}} executed with {{ctx.payload.response.hits.total_hits}} hits").build(); + TextTemplate body = new TextTemplate("{{ctx.watch_id}} executed with {{ctx.payload.response.hits.total_hits}} hits"); httpRequest.body(body); TextTemplateEngine engine = new MockTextTemplateEngine(); diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HipChatServiceTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HipChatServiceTests.java index 6168f944ab6..106637a42a7 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HipChatServiceTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HipChatServiceTests.java @@ -9,11 +9,11 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.xpack.watcher.actions.hipchat.HipChatAction; import org.elasticsearch.xpack.notification.hipchat.HipChatAccount; import org.elasticsearch.xpack.notification.hipchat.HipChatMessage; import org.elasticsearch.xpack.notification.hipchat.HipChatService; import org.elasticsearch.xpack.notification.hipchat.SentMessages; +import org.elasticsearch.xpack.watcher.actions.hipchat.HipChatAction; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse; @@ -56,7 +56,7 @@ public class HipChatServiceTests extends AbstractWatcherIntegrationTestCase { // this is for the Watcher Test account in HipChat .put("xpack.notification.hipchat.account.user_account.profile", "user") - .put("xpack.notification.hipchat.account.user_account.auth_token", "FYVx16oDH78ZW9r13wtXbcszyoyA7oX5tiMWg9X0") + .put("xpack.notification.hipchat.account.user_account.auth_token", "12rNQUuQ0wObfRVeoVD8OeoAnosCT8tSTV5UjsII") // this is for the `test-watcher-v1` notification token .put("xpack.notification.hipchat.account.v1_account.profile", "v1") @@ -99,7 +99,6 @@ public class HipChatServiceTests extends AbstractWatcherIntegrationTestCase { assertSentMessagesAreValid(1, messages); } - @AwaitsFix(bugUrl = "https://github.com/elastic/x-plugins/issues/3162") public void testSendMessageUserAccount() throws Exception { HipChatService service = getInstanceFromMaster(HipChatService.class); HipChatMessage.Color color = randomFrom(HipChatMessage.Color.values()); @@ -118,7 +117,6 @@ public class HipChatServiceTests extends AbstractWatcherIntegrationTestCase { assertSentMessagesAreValid(3, messages); } - @AwaitsFix(bugUrl = "https://github.com/elastic/x-plugins/issues/3162") public void testWatchWithHipChatAction() throws Exception { HipChatAccount.Profile profile = randomFrom(HipChatAccount.Profile.values()); HipChatMessage.Color color = randomFrom(HipChatMessage.Color.values()); @@ -180,7 +178,6 @@ public class HipChatServiceTests extends AbstractWatcherIntegrationTestCase { assertThat(response.getHits().getTotalHits(), is(1L)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/x-plugins/issues/3162") public void testDefaultValuesForColorAndFormatWorks() { HipChatService service = getInstanceFromMaster(HipChatService.class); HipChatMessage hipChatMessage = new HipChatMessage( diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java index b6cfc8fcef4..97bca8f6a80 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java @@ -194,7 +194,7 @@ public class SearchTransformTests extends ESIntegTestCase { builder.field("search_type", searchType.name()); } if (templateName != null) { - TextTemplate template = TextTemplate.file(templateName).build(); + TextTemplate template = new TextTemplate(templateName, null, ScriptService.ScriptType.FILE, null); builder.field("template", template); } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java index 91e84587ae3..b618026b3bd 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java @@ -78,7 +78,7 @@ public class WatchMetadataTests extends AbstractWatcherIntegrationTestCase { metadata.put("foo", "bar"); metadata.put("logtext", "This is a test"); - LoggingAction.Builder loggingAction = loggingAction(TextTemplate.inline("_logging")) + LoggingAction.Builder loggingAction = loggingAction(new TextTemplate("_logging")) .setLevel(LoggingLevel.DEBUG) .setCategory("test"); diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchStoreTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchStoreTests.java index dc5c0838c91..44221316b23 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchStoreTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchStoreTests.java @@ -54,6 +54,7 @@ import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsEqual.equalTo; @@ -123,22 +124,7 @@ public class WatchStoreTests extends ESTestCase { public void testStartRefreshFailed() { ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - MetaData.Builder metaDateBuilder = MetaData.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - Settings settings = settings(Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - metaDateBuilder.put(IndexMetaData.builder(WatchStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); - final Index index = metaDateBuilder.get(WatchStore.INDEX).getIndex(); - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index); - indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0)) - .addShard(TestShardRouting.newShardRouting(WatchStore.INDEX, 0, "_node_id", null, true, ShardRoutingState.STARTED)) - .build()); - indexRoutingTableBuilder.addReplica(); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - csBuilder.metaData(metaDateBuilder); - csBuilder.routingTable(routingTableBuilder.build()); + createWatchIndexMetaData(csBuilder); RefreshResponse refreshResponse = mockRefreshResponse(1, 0); when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse); @@ -158,22 +144,7 @@ public class WatchStoreTests extends ESTestCase { public void testStartSearchFailed() { ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - MetaData.Builder metaDateBuilder = MetaData.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - Settings settings = settings(Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - metaDateBuilder.put(IndexMetaData.builder(WatchStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); - final Index index = metaDateBuilder.get(WatchStore.INDEX).getIndex(); - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index); - indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0)) - .addShard(TestShardRouting.newShardRouting(WatchStore.INDEX, 0, "_node_id", null, true, ShardRoutingState.STARTED)) - .build()); - indexRoutingTableBuilder.addReplica(); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - csBuilder.metaData(metaDateBuilder); - csBuilder.routingTable(routingTableBuilder.build()); + createWatchIndexMetaData(csBuilder); RefreshResponse refreshResponse = mockRefreshResponse(1, 1); when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse); @@ -197,22 +168,7 @@ public class WatchStoreTests extends ESTestCase { public void testStartNoWatchStored() throws Exception { ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - MetaData.Builder metaDateBuilder = MetaData.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - Settings settings = settings(Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - metaDateBuilder.put(IndexMetaData.builder(WatchStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); - final Index index = metaDateBuilder.get(WatchStore.INDEX).getIndex(); - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index); - indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0)) - .addShard(TestShardRouting.newShardRouting(WatchStore.INDEX, 0, "_node_id", null, true, ShardRoutingState.STARTED)) - .build()); - indexRoutingTableBuilder.addReplica(); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - csBuilder.metaData(metaDateBuilder); - csBuilder.routingTable(routingTableBuilder.build()); + createWatchIndexMetaData(csBuilder); RefreshResponse refreshResponse = mockRefreshResponse(1, 1); when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse); @@ -234,22 +190,7 @@ public class WatchStoreTests extends ESTestCase { public void testStartWatchStored() throws Exception { ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - MetaData.Builder metaDateBuilder = MetaData.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - Settings settings = settings(Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - metaDateBuilder.put(IndexMetaData.builder(WatchStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); - final Index index = metaDateBuilder.get(WatchStore.INDEX).getIndex(); - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index); - indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0)) - .addShard(TestShardRouting.newShardRouting(WatchStore.INDEX, 0, "_node_id", null, true, ShardRoutingState.STARTED)) - .build()); - indexRoutingTableBuilder.addReplica(); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - csBuilder.metaData(metaDateBuilder); - csBuilder.routingTable(routingTableBuilder.build()); + createWatchIndexMetaData(csBuilder); RefreshResponse refreshResponse = mockRefreshResponse(1, 1); when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse); @@ -300,22 +241,7 @@ public class WatchStoreTests extends ESTestCase { public void testUsageStats() throws Exception { ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); - MetaData.Builder metaDateBuilder = MetaData.builder(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); - Settings settings = settings(Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - metaDateBuilder.put(IndexMetaData.builder(WatchStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); - final Index index = metaDateBuilder.get(WatchStore.INDEX).getIndex(); - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index); - indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0)) - .addShard(TestShardRouting.newShardRouting(WatchStore.INDEX, 0, "_node_id", null, true, ShardRoutingState.STARTED)) - .build()); - indexRoutingTableBuilder.addReplica(); - routingTableBuilder.add(indexRoutingTableBuilder.build()); - csBuilder.metaData(metaDateBuilder); - csBuilder.routingTable(routingTableBuilder.build()); + createWatchIndexMetaData(csBuilder); RefreshResponse refreshResponse = mockRefreshResponse(1, 1); when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse); @@ -419,6 +345,65 @@ public class WatchStoreTests extends ESTestCase { assertThat(stats.getValue("watch.transform.TYPE.active"), is(greaterThan(0))); } + public void testThatCleaningWatchesWorks() throws Exception { + ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); + createWatchIndexMetaData(csBuilder); + + RefreshResponse refreshResponse = mockRefreshResponse(1, 1); + when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse); + + BytesReference source = new BytesArray("{}"); + InternalSearchHit hit = new InternalSearchHit(0, "_id1", new Text("type"), Collections.emptyMap()); + hit.sourceRef(source); + + SearchResponse searchResponse = mockSearchResponse(1, 1, 1, hit); + when(clientProxy.search(any(SearchRequest.class), any(TimeValue.class))).thenReturn(searchResponse); + + SearchResponse finalSearchResponse = mockSearchResponse(1, 1, 0); + when(clientProxy.searchScroll(anyString(), any(TimeValue.class))).thenReturn(finalSearchResponse); + + Watch watch = mock(Watch.class); + WatchStatus status = mock(WatchStatus.class); + when(watch.status()).thenReturn(status); + when(parser.parse("_id1", true, source)).thenReturn(watch); + + when(clientProxy.clearScroll(anyString())).thenReturn(new ClearScrollResponse(true, 0)); + + ClusterState cs = csBuilder.build(); + assertThat(watchStore.validate(cs), is(true)); + watchStore.start(cs); + assertThat(watchStore.started(), is(true)); + assertThat(watchStore.watches(), hasSize(1)); + + watchStore.clearWatchesInMemory(); + assertThat(watchStore.started(), is(true)); + assertThat(watchStore.watches(), hasSize(0)); + assertThat(watchStore.activeWatches(), hasSize(0)); + } + + /* + * Creates the standard cluster state metadata for the watches index + * with shards/replicas being marked as started + */ + private void createWatchIndexMetaData(ClusterState.Builder builder) { + MetaData.Builder metaDateBuilder = MetaData.builder(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + Settings settings = settings(Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + metaDateBuilder.put(IndexMetaData.builder(WatchStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1)); + final Index index = metaDateBuilder.get(WatchStore.INDEX).getIndex(); + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index); + indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0)) + .addShard(TestShardRouting.newShardRouting(WatchStore.INDEX, 0, "_node_id", null, true, ShardRoutingState.STARTED)) + .build()); + indexRoutingTableBuilder.addReplica(); + routingTableBuilder.add(indexRoutingTableBuilder.build()); + builder.metaData(metaDateBuilder); + builder.routingTable(routingTableBuilder.build()); + } + private RefreshResponse mockRefreshResponse(int total, int successful) { RefreshResponse refreshResponse = mock(RefreshResponse.class); when(refreshResponse.getTotalShards()).thenReturn(total); @@ -432,7 +417,6 @@ public class WatchStoreTests extends ESTestCase { when(searchResponse.getTotalShards()).thenReturn(total); when(searchResponse.getSuccessfulShards()).thenReturn(successful); when(searchResponse.getHits()).thenReturn(internalSearchHits); - when(searchResponse.getHits()).thenReturn(internalSearchHits); return searchResponse; } diff --git a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java index 9ff4ef0c06b..b778264365a 100644 --- a/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java +++ b/elasticsearch/x-pack/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java @@ -128,6 +128,7 @@ import java.util.Map; import static java.util.Collections.singleton; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.xpack.watcher.input.InputBuilders.searchInput; import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.templateRequest; @@ -198,7 +199,7 @@ public class WatchTests extends ESTestCase { } WatchStatus watchStatus = new WatchStatus(clock.nowUTC(), unmodifiableMap(actionsStatuses)); - TimeValue throttlePeriod = randomBoolean() ? null : TimeValue.timeValueSeconds(randomIntBetween(5, 10)); + TimeValue throttlePeriod = randomBoolean() ? null : TimeValue.timeValueSeconds(randomIntBetween(5, 10000)); Watch watch = new Watch("_name", trigger, input, condition, transform, throttlePeriod, actions, metadata, watchStatus); @@ -335,7 +336,9 @@ public class WatchTests extends ESTestCase { String type = randomFrom(SearchInput.TYPE, SimpleInput.TYPE); switch (type) { case SearchInput.TYPE: - SearchInput searchInput = searchInput(WatcherTestUtils.templateRequest(searchSource(), "idx")).build(); + SearchInput searchInput = searchInput(WatcherTestUtils.templateRequest(searchSource(), "idx")) + .timeout(randomBoolean() ? null : timeValueSeconds(between(1, 10000))) + .build(); return new ExecutableSearchInput(searchInput, logger, client, searchTemplateService, null); default: SimpleInput simpleInput = InputBuilders.simpleInput(singletonMap("_key", "_val")).build(); @@ -387,7 +390,7 @@ public class WatchTests extends ESTestCase { private ExecutableTransform randomTransform() { String type = randomFrom(ScriptTransform.TYPE, SearchTransform.TYPE, ChainTransform.TYPE); - TimeValue timeout = randomBoolean() ? TimeValue.timeValueSeconds(5) : null; + TimeValue timeout = randomBoolean() ? timeValueSeconds(between(1, 10000)) : null; DateTimeZone timeZone = randomBoolean() ? DateTimeZone.UTC : null; switch (type) { case ScriptTransform.TYPE: @@ -432,7 +435,7 @@ public class WatchTests extends ESTestCase { } if (randomBoolean()) { DateTimeZone timeZone = randomBoolean() ? DateTimeZone.UTC : null; - TimeValue timeout = randomBoolean() ? TimeValue.timeValueSeconds(30) : null; + TimeValue timeout = randomBoolean() ? timeValueSeconds(between(1, 10000)) : null; IndexAction action = new IndexAction("_index", "_type", null, timeout, timeZone); list.add(new ActionWrapper("_index_" + randomAsciiOfLength(8), randomThrottler(), randomCondition(), randomTransform(), new ExecutableIndexAction(action, logger, client, null))); @@ -440,7 +443,7 @@ public class WatchTests extends ESTestCase { if (randomBoolean()) { HttpRequestTemplate httpRequest = HttpRequestTemplate.builder("test.host", randomIntBetween(8000, 9000)) .method(randomFrom(HttpMethod.GET, HttpMethod.POST, HttpMethod.PUT)) - .path(TextTemplate.inline("_url").build()) + .path(new TextTemplate("_url")) .build(); WebhookAction action = new WebhookAction(httpRequest); list.add(new ActionWrapper("_webhook_" + randomAsciiOfLength(8), randomThrottler(), randomCondition(), randomTransform(), @@ -470,7 +473,7 @@ public class WatchTests extends ESTestCase { } private ActionThrottler randomThrottler() { - return new ActionThrottler(SystemClock.INSTANCE, randomBoolean() ? null : TimeValue.timeValueMinutes(randomIntBetween(3, 5)), + return new ActionThrottler(SystemClock.INSTANCE, randomBoolean() ? null : timeValueSeconds(randomIntBetween(1, 10000)), licenseState); } diff --git a/elasticsearch/x-pack/watcher/src/test/resources/rest-api-spec/test/xpack/watcher/put_watch/20_put_watch_with_throttle_period.yaml b/elasticsearch/x-pack/watcher/src/test/resources/rest-api-spec/test/xpack/watcher/put_watch/20_put_watch_with_throttle_period.yaml index 4d36f92e92d..7592d3246eb 100644 --- a/elasticsearch/x-pack/watcher/src/test/resources/rest-api-spec/test/xpack/watcher/put_watch/20_put_watch_with_throttle_period.yaml +++ b/elasticsearch/x-pack/watcher/src/test/resources/rest-api-spec/test/xpack/watcher/put_watch/20_put_watch_with_throttle_period.yaml @@ -53,4 +53,4 @@ teardown: id: "my_watch1" - match: { found : true} - match: { _id: "my_watch1" } - - match: { watch.throttle_period: "10s" } + - match: { watch.throttle_period_in_millis: 10000 } diff --git a/elasticsearch/x-pack/watcher/src/test/resources/rest-api-spec/test/xpack/watcher/put_watch/30_put_watch_with_action_throttle_period.yaml b/elasticsearch/x-pack/watcher/src/test/resources/rest-api-spec/test/xpack/watcher/put_watch/30_put_watch_with_action_throttle_period.yaml index 30348737d2e..1ba2921d255 100644 --- a/elasticsearch/x-pack/watcher/src/test/resources/rest-api-spec/test/xpack/watcher/put_watch/30_put_watch_with_action_throttle_period.yaml +++ b/elasticsearch/x-pack/watcher/src/test/resources/rest-api-spec/test/xpack/watcher/put_watch/30_put_watch_with_action_throttle_period.yaml @@ -53,4 +53,4 @@ teardown: id: "my_watch1" - match: { found : true} - match: { _id: "my_watch1" } - - match: { watch.actions.test_index.throttle_period: "10s" } + - match: { watch.actions.test_index.throttle_period_in_millis: 10000 }