From eaf67f8bc24fa1b5948c2e25b22f542b4ab57730 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Thu, 7 Dec 2017 14:40:08 +0100 Subject: [PATCH] =?UTF-8?q?Tests:=20Replace=20YAML=20tests=20with=20ESTest?= =?UTF-8?q?RestCase=20to=20be=20able=20to=20wait=20for=20=E2=80=A6=20(elas?= =?UTF-8?q?tic/x-pack-elasticsearch#3252)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Tests: Replace YAML tests with ESTestRestCase to be able to wait for events The YAML tests did not have any possibility to wait for the watches to be created. A hard ten second timeout was used, that could not be aborted, by simulating a sleep when waiting for a number of nodes that never occured in the cluster. This commit replaces those waiting YAML tests with ESRestTestCases, that use `assertBusy()` to exit early once the watches have been added. Also this increases the wait time if needed, as these tests tend to fail on CI. relates elastic/x-pack-elasticsearch#3217 Original commit: elastic/x-pack-elasticsearch@74b9945d88f1d294832083a0e1ae403e622a0c4d --- ...oringWithWatcherClientYamlTestSuiteIT.java | 24 ---- .../MonitoringWithWatcherRestIT.java | 118 ++++++++++++++++++ .../10_local_exporter.yml | 80 ------------ .../20_http_exporter.yml | 87 ------------- 4 files changed, 118 insertions(+), 191 deletions(-) delete mode 100644 qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherClientYamlTestSuiteIT.java create mode 100644 qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherRestIT.java delete mode 100644 qa/smoke-test-monitoring-with-watcher/src/test/resources/rest-api-spec/test/smoke_test_monitoring_with_watcher/10_local_exporter.yml delete mode 100644 qa/smoke-test-monitoring-with-watcher/src/test/resources/rest-api-spec/test/smoke_test_monitoring_with_watcher/20_http_exporter.yml diff --git a/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherClientYamlTestSuiteIT.java b/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherClientYamlTestSuiteIT.java deleted file mode 100644 index 4fa807cfd7e..00000000000 --- a/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherClientYamlTestSuiteIT.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.smoketest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -public class MonitoringWithWatcherClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - - public MonitoringWithWatcherClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); - } -} - diff --git a/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherRestIT.java b/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherRestIT.java new file mode 100644 index 00000000000..2b1b15dc150 --- /dev/null +++ b/qa/smoke-test-monitoring-with-watcher/src/test/java/org/elasticsearch/smoketest/MonitoringWithWatcherRestIT.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.smoketest; + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.yaml.ObjectPath; +import org.elasticsearch.xpack.watcher.actions.ActionBuilders; +import org.elasticsearch.xpack.watcher.client.WatchSourceBuilders; +import org.elasticsearch.xpack.watcher.trigger.TriggerBuilders; +import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; +import org.junit.After; + +import java.io.IOException; +import java.util.Collections; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput; +import static org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule.Interval.Unit.MINUTES; +import static org.hamcrest.Matchers.is; + +public class MonitoringWithWatcherRestIT extends ESRestTestCase { + + @After + public void cleanExporters() throws Exception { + String body = jsonBuilder().startObject().startObject("transient") + .nullField("xpack.monitoring.exporters.*") + .endObject().endObject().string(); + assertOK(adminClient().performRequest("PUT", "_cluster/settings", Collections.emptyMap(), + new StringEntity(body, ContentType.APPLICATION_JSON))); + + assertOK(adminClient().performRequest("DELETE", ".watch*", Collections.emptyMap())); + } + + public void testThatLocalExporterAddsWatches() throws Exception { + String watchId = createMonitoringWatch(); + + String body = jsonBuilder().startObject().startObject("transient") + .field("xpack.monitoring.exporters.my_local_exporter.type", "local") + .field("xpack.monitoring.exporters.my_local_exporter.cluster_alerts.management.enabled", true) + .endObject().endObject().bytes().utf8ToString(); + + adminClient().performRequest("PUT", "_cluster/settings", Collections.emptyMap(), + new StringEntity(body, ContentType.APPLICATION_JSON)); + + assertTotalWatchCount(5); + + assertMonitoringWatchHasBeenOverWritten(watchId); + } + + public void testThatHttpExporterAddsWatches() throws Exception { + String watchId = createMonitoringWatch(); + String httpHost = getHttpHost(); + + String body = jsonBuilder().startObject().startObject("transient") + .field("xpack.monitoring.exporters.my_http_exporter.type", "http") + .field("xpack.monitoring.exporters.my_http_exporter.host", httpHost) + .field("xpack.monitoring.exporters.my_http_exporter.cluster_alerts.management.enabled", true) + .endObject().endObject().bytes().utf8ToString(); + + adminClient().performRequest("PUT", "_cluster/settings", Collections.emptyMap(), + new StringEntity(body, ContentType.APPLICATION_JSON)); + + assertTotalWatchCount(5); + + assertMonitoringWatchHasBeenOverWritten(watchId); + } + + private void assertMonitoringWatchHasBeenOverWritten(String watchId) throws Exception { + ObjectPath path = ObjectPath.createFromResponse(client().performRequest("GET", "_xpack/watcher/watch/" + watchId)); + String interval = path.evaluate("watch.trigger.schedule.interval"); + assertThat(interval, is("1m")); + } + + private void assertTotalWatchCount(int expectedWatches) throws Exception { + assertBusy(() -> { + assertOK(client().performRequest("POST", ".watches/_refresh")); + ObjectPath path = ObjectPath.createFromResponse(client().performRequest("POST", ".watches/_count")); + int count = path.evaluate("count"); + assertThat(count, is(expectedWatches)); + }); + } + + private String createMonitoringWatch() throws Exception { + String clusterUUID = getClusterUUID(); + String watchId = clusterUUID + "_kibana_version_mismatch"; + String sampleWatch = WatchSourceBuilders.watchBuilder() + .trigger(TriggerBuilders.schedule(new IntervalSchedule(new IntervalSchedule.Interval(1000, MINUTES)))) + .input(simpleInput()) + .addAction("logme", ActionBuilders.loggingAction("foo")) + .buildAsBytes(XContentType.JSON).utf8ToString(); + client().performRequest("PUT", "_xpack/watcher/watch/" + watchId, Collections.emptyMap(), + new StringEntity(sampleWatch, ContentType.APPLICATION_JSON)); + return watchId; + } + + private String getClusterUUID() throws Exception { + Response response = client().performRequest("GET", "_cluster/state/metadata", Collections.emptyMap()); + ObjectPath objectPath = ObjectPath.createFromResponse(response); + String clusterUUID = objectPath.evaluate("metadata.cluster_uuid"); + return clusterUUID; + } + + public String getHttpHost() throws IOException { + ObjectPath path = ObjectPath.createFromResponse(client().performRequest("GET", "_cluster/state", Collections.emptyMap())); + String masterNodeId = path.evaluate("master_node"); + + ObjectPath nodesPath = ObjectPath.createFromResponse(client().performRequest("GET", "_nodes", Collections.emptyMap())); + String httpHost = nodesPath.evaluate("nodes." + masterNodeId + ".http.publish_address"); + return httpHost; + } +} diff --git a/qa/smoke-test-monitoring-with-watcher/src/test/resources/rest-api-spec/test/smoke_test_monitoring_with_watcher/10_local_exporter.yml b/qa/smoke-test-monitoring-with-watcher/src/test/resources/rest-api-spec/test/smoke_test_monitoring_with_watcher/10_local_exporter.yml deleted file mode 100644 index d350dd40753..00000000000 --- a/qa/smoke-test-monitoring-with-watcher/src/test/resources/rest-api-spec/test/smoke_test_monitoring_with_watcher/10_local_exporter.yml +++ /dev/null @@ -1,80 +0,0 @@ ---- -teardown: - - do: - cluster.put_settings: - body: - transient: - xpack.monitoring.exporters.*: null - - # delete all watcher indices, so we will start clean again - - do: - indices.delete: - index: .watch* - ---- -"Watches are installed on startup with local exporter": - - - do: - cluster.state: - metric: [ metadata ] - - set: { metadata.cluster_uuid : cluster_uuid } - - - do: - xpack.watcher.put_watch: - id: ${cluster_uuid}_kibana_version_mismatch - body: > - { - "trigger" : { - "schedule": { - "interval" : "10m" - } - }, - "input" : { - "simple" : {} - }, - "actions" : { - "logme" : { - "logging" : { - "text" : "{{ctx}}" - } - } - } - } - - - do: - cluster.put_settings: - body: - transient: - xpack.monitoring.exporters.my_local_exporter.type: "local" - xpack.monitoring.exporters.my_local_exporter.cluster_alerts.management.enabled: true - flat_settings: true - - - match: {transient: { - "xpack.monitoring.exporters.my_local_exporter.type": "local", - "xpack.monitoring.exporters.my_local_exporter.cluster_alerts.management.enabled": "true" - }} - - # sleep - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 10s - - match: { "timed_out": true } - - - do: - indices.refresh: - index: [ ".watches" ] - - - do: - search: - index: .watches - - - match: { hits.total: 5 } - - - do: - xpack.watcher.get_watch: - id: ${cluster_uuid}_kibana_version_mismatch - - # different interval than above means the watch was correctly replaced - - match: { watch.trigger.schedule.interval: "1m" } diff --git a/qa/smoke-test-monitoring-with-watcher/src/test/resources/rest-api-spec/test/smoke_test_monitoring_with_watcher/20_http_exporter.yml b/qa/smoke-test-monitoring-with-watcher/src/test/resources/rest-api-spec/test/smoke_test_monitoring_with_watcher/20_http_exporter.yml deleted file mode 100644 index ae1dde98c64..00000000000 --- a/qa/smoke-test-monitoring-with-watcher/src/test/resources/rest-api-spec/test/smoke_test_monitoring_with_watcher/20_http_exporter.yml +++ /dev/null @@ -1,87 +0,0 @@ ---- -teardown: - - do: - cluster.put_settings: - body: - transient: - xpack.monitoring.exporters.*: null - - # delete all watcher indices, so we will start clean again - - do: - indices.delete: - index: .watch* - ---- -"Watches are installed on startup with http exporter": - - - do: - cluster.state: {} - - set: { metadata.cluster_uuid : cluster_uuid } - - set: { master_node: master } - - - do: - nodes.info: {} - - set: { nodes.$master.http.publish_address: http_host } - - # install a watch that is going to be overwritten - - do: - xpack.watcher.put_watch: - id: ${cluster_uuid}_elasticsearch_cluster_status - body: > - { - "trigger" : { - "schedule": { - "interval" : "10m" - } - }, - "input" : { - "simple" : {} - }, - "actions" : { - "logme" : { - "logging" : { - "text" : "{{ctx}}" - } - } - } - } - - - do: - cluster.put_settings: - body: - transient: - xpack.monitoring.exporters.my_http_exporter.type: "http" - xpack.monitoring.exporters.my_http_exporter.host: $http_host - xpack.monitoring.exporters.my_http_exporter.cluster_alerts.management.enabled: true - flat_settings: true - - - match: {transient: { - "xpack.monitoring.exporters.my_http_exporter.type": "http", - "xpack.monitoring.exporters.my_http_exporter.host": "$http_host", - "xpack.monitoring.exporters.my_http_exporter.cluster_alerts.management.enabled": "true" - }} - - # sleep - - do: - catch: request_timeout - cluster.health: - wait_for_nodes: 99 - timeout: 10s - - match: { "timed_out": true } - - - do: - indices.refresh: - index: [ ".watches" ] - - - do: - search: - index: .watches - - - match: { hits.total: 5 } - - - do: - xpack.watcher.get_watch: - id: ${cluster_uuid}_elasticsearch_cluster_status - - # different interval than above means the watch was correctly replaced - - match: { watch.trigger.schedule.interval: "1m" }