From 6c56fe11109b64711886a36bf3b003a089d1bf35 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 22 May 2013 11:12:09 +0200 Subject: [PATCH 01/39] Initial working ExporingService, plugin + an ESExport implementation Original commit: elastic/x-pack-elasticsearch@982144e30817a64430aabf26afb7bea687c89385 --- NOTICE.txt | 5 + README.md | 0 pom.xml | 141 ++++++++++++++++++ .../java/com/elasticsearch/dash/Exporter.java | 16 ++ .../elasticsearch/dash/ExportersService.java | 101 +++++++++++++ .../java/com/elasticsearch/dash/Plugin.java | 53 +++++++ .../dash/exporters/ESExporter.java | 125 ++++++++++++++++ src/main/resources/es-plugin.properties | 1 + 8 files changed, 442 insertions(+) create mode 100644 NOTICE.txt create mode 100644 README.md create mode 100644 pom.xml create mode 100644 src/main/java/com/elasticsearch/dash/Exporter.java create mode 100644 src/main/java/com/elasticsearch/dash/ExportersService.java create mode 100644 src/main/java/com/elasticsearch/dash/Plugin.java create mode 100644 src/main/java/com/elasticsearch/dash/exporters/ESExporter.java create mode 100644 src/main/resources/es-plugin.properties diff --git a/NOTICE.txt b/NOTICE.txt new file mode 100644 index 00000000000..86016475acd --- /dev/null +++ b/NOTICE.txt @@ -0,0 +1,5 @@ +ElasticSearch +Copyright 2009-2013 ElasticSearch + +This product includes software developed by The Apache Software +Foundation (http://www.apache.org/). diff --git a/README.md b/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/pom.xml b/pom.xml new file mode 100644 index 00000000000..be513e39fbd --- /dev/null +++ b/pom.xml @@ -0,0 +1,141 @@ + + + elasticsearch-dash + 4.0.0 + com.elasticsearch + elasticsearch-dash + 0.1.0-SNAPSHOT + jar + Management & monitoring plugin for ElasticSearch + 2013 + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + scm:git:git@github.com:elasticsearch/elasticsearch-m-m.git + scm:git:git@github.com:elasticsearch/elasticsearch-m-m.git + + http://github.com/elasticsearch/elasticsearch-m-m + + + + org.sonatype.oss + oss-parent + 7 + + + + 1.0.0.Beta1-SNAPSHOT + + + + + sonatype + http://oss.sonatype.org/content/repositories/releases/ + + + + + + org.elasticsearch + elasticsearch + ${elasticsearch.version} + compile + + + + log4j + log4j + 1.2.17 + compile + true + + + + org.testng + testng + 6.8 + test + + + org.hamcrest + hamcrest-core + + + junit + junit + + + + + + org.hamcrest + hamcrest-all + 1.3 + test + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.6 + 1.6 + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.11 + + + **/*Tests.java + + + + + org.apache.maven.plugins + maven-source-plugin + 2.1.2 + + + attach-sources + + jar + + + + + + maven-assembly-plugin + 2.3 + + false + ${project.build.directory}/releases/ + + ${basedir}/src/main/assemblies/plugin.xml + + + + + package + + single + + + + + + + \ No newline at end of file diff --git a/src/main/java/com/elasticsearch/dash/Exporter.java b/src/main/java/com/elasticsearch/dash/Exporter.java new file mode 100644 index 00000000000..7da504080e3 --- /dev/null +++ b/src/main/java/com/elasticsearch/dash/Exporter.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package com.elasticsearch.dash; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.common.component.CloseableComponent; +import org.elasticsearch.common.component.LifecycleComponent; + +public interface Exporter extends LifecycleComponent { + + String name(); + + void exportNodeStats(NodeStats nodeStats); +} diff --git a/src/main/java/com/elasticsearch/dash/ExportersService.java b/src/main/java/com/elasticsearch/dash/ExportersService.java new file mode 100644 index 00000000000..b01013826c7 --- /dev/null +++ b/src/main/java/com/elasticsearch/dash/ExportersService.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package com.elasticsearch.dash;/* +import com.elasticsearch.dash.exporters.ESExporter; +import org.elasticsearch.ElasticSearchException; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.cluster.ClusterName; +import com.google.common.collect.ImmutableSet; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.node.service.NodeService; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.Collection; + +public class ExportersService extends AbstractLifecycleComponent { + + private final IndicesService indicesService; + private final NodeService nodeService; + + private volatile ExportingWorker exp; + private volatile Thread thread; + private final TimeValue interval; + + private Collection exporters; + + @Inject + public ExportersService(Settings settings, IndicesService indicesService,ClusterName clusterName, NodeService nodeService) { + super(settings); + this.indicesService = indicesService; + this.nodeService = nodeService; + this.interval = componentSettings.getAsTime("interval", TimeValue.timeValueSeconds(5)); + + Exporter esExporter = new ESExporter(settings.getComponentSettings(ESExporter.class), clusterName ); + this.exporters = ImmutableSet.of(esExporter); + } + + @Override + protected void doStart() throws ElasticSearchException { + for (Exporter e: exporters) + e.start(); + + this.exp = new ExportingWorker(); + this.thread = new Thread(exp, EsExecutors.threadName(settings, "dash")); + this.thread.setDaemon(true); + this.thread.start(); + } + + @Override + protected void doStop() throws ElasticSearchException { + this.exp.closed = true; + this.thread.interrupt(); + for (Exporter e: exporters) + e.stop(); + } + + @Override + protected void doClose() throws ElasticSearchException { + for (Exporter e: exporters) + e.close(); + } + + class ExportingWorker implements Runnable { + + volatile boolean closed; + + @Override + public void run() { + while (!closed) { + // do the actual export..., go over the actual exporters list and... + try { + logger.debug("Collecting node stats"); + NodeStats nodeStats = nodeService.stats(); + + logger.debug("Exporting node stats"); + for (Exporter e: exporters) { + try { + e.exportNodeStats(nodeStats); + } + catch (Throwable t){ + logger.error("Exporter {} has thrown an exception:", t, e.name()); + } + } + Thread.sleep(interval.millis()); + } catch (InterruptedException e) { + // ignore, if closed, good.... + } catch (Throwable t) { + logger.error("Background thread had an uncaught exception:", t); + } + + } + } + } +} diff --git a/src/main/java/com/elasticsearch/dash/Plugin.java b/src/main/java/com/elasticsearch/dash/Plugin.java new file mode 100644 index 00000000000..83c7450a0cd --- /dev/null +++ b/src/main/java/com/elasticsearch/dash/Plugin.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package com.elasticsearch.dash; + +import com.google.common.collect.ImmutableList; +import org.elasticsearch.common.component.LifecycleComponent; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.AbstractPlugin; + +import java.util.ArrayList; +import java.util.Collection; + +public class Plugin extends AbstractPlugin { + + public Plugin() { + } + + @Override + public String name() { + return "Dash"; + } + + @Override + public String description() { + return "Monitoring with an elastic sauce"; + } + + @Override + public Collection modules(Settings settings) { + Module m = new AbstractModule() { + + @Override + protected void configure() { + bind(ExportersService.class).asEagerSingleton(); + } + }; + return ImmutableList.of(m); + } + + @Override + public Collection> services() { + Collection> l = new ArrayList>(); + l.add(ExportersService.class); + return l; + } +} diff --git a/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java b/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java new file mode 100644 index 00000000000..639109ba38d --- /dev/null +++ b/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package com.elasticsearch.dash.exporters; +import com.elasticsearch.dash.Exporter; +import org.elasticsearch.ElasticSearchException; +import org.elasticsearch.ElasticSearchIllegalArgumentException; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.io.OutputStream; +import java.io.UnsupportedEncodingException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLEncoder; +import java.util.Date; + +public class ESExporter extends AbstractLifecycleComponent implements Exporter { + + final String targetHost; + final int targetPort; + + final String targetPathPrefix; + final ClusterName clusterName; + + final ESLogger logger = ESLoggerFactory.getLogger(ESExporter.class.getName()); + + public ESExporter(Settings settings, ClusterName clusterName) { + super(settings); + + this.clusterName = clusterName; + + // TODO: move to a single settings. + targetHost = settings.get("target.host", "localhost"); + targetPort = settings.getAsInt("target.post", 9200); + String targetIndexPrefix = settings.get("target.index.prefix", "dash"); + + try { + targetPathPrefix = String.format("/%s_%s_", + URLEncoder.encode(targetIndexPrefix,"UTF-8"), + URLEncoder.encode(clusterName.value(),"UTF-8")); + + } catch (UnsupportedEncodingException e) { + throw new ElasticSearchException("Can't encode target url", e); + } + + + logger.info("ESExporter initialized. Target: {}:{} Index prefix set to {}", targetHost, targetPort, targetIndexPrefix ); + // explode early on broken settings + getTargetURL("test"); + + } + + private URL getTargetURL(String type) { + try { + String path = String.format("%1$s%2$tY.%2$tm.%2$td/%3$s", targetPathPrefix, new Date(), type); + return new URL("http", targetHost, targetPort, path); + } catch (MalformedURLException e) { + throw new ElasticSearchIllegalArgumentException("Target settings result in a malformed url"); + } + } + + @Override + public String name() { + return "ESExporter"; + } + + @Override + public void exportNodeStats(NodeStats nodeStats) { + URL url = getTargetURL("nodestats"); + logger.debug("Exporting node stats to {}", url); + HttpURLConnection conn; + try { + conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod("POST"); + conn.setDoOutput(true); + conn.setRequestProperty("Content-Type", XContentType.SMILE.restContentType()); + OutputStream os = conn.getOutputStream(); + XContentBuilder builder = XContentFactory.smileBuilder(os); + + builder.startObject(); + nodeStats.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + + builder.close(); + + if (conn.getResponseCode() != 201) { + logger.error("Remote target didn't respond with 201 Created"); + } + + } catch (IOException e) { + logger.error("Error connecting to target", e); + return; + } + + + } + + + @Override + protected void doStart() throws ElasticSearchException { + } + + + @Override + protected void doStop() throws ElasticSearchException { + } + + @Override + protected void doClose() throws ElasticSearchException { + } + +} diff --git a/src/main/resources/es-plugin.properties b/src/main/resources/es-plugin.properties new file mode 100644 index 00000000000..98245f4d48c --- /dev/null +++ b/src/main/resources/es-plugin.properties @@ -0,0 +1 @@ +plugin=com.elasticsearch.dash.Plugin \ No newline at end of file From e6c3646d9ae58de1ad52ea0d7bb20598debee89f Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 22 May 2013 13:47:24 +0200 Subject: [PATCH 02/39] Setting human_readable to false while exporting node stats. Original commit: elastic/x-pack-elasticsearch@c0c3f0bab568678c5cc445f472a69c7859515061 --- .../com/elasticsearch/dash/exporters/ESExporter.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java b/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java index 639109ba38d..b7becdc3b8a 100644 --- a/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java +++ b/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java @@ -5,6 +5,7 @@ */ package com.elasticsearch.dash.exporters; import com.elasticsearch.dash.Exporter; +import com.google.common.collect.ImmutableMap; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -25,7 +26,7 @@ import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.net.URLEncoder; -import java.util.Date; +import java.util.*; public class ESExporter extends AbstractLifecycleComponent implements Exporter { @@ -37,6 +38,9 @@ public class ESExporter extends AbstractLifecycleComponent implement final ESLogger logger = ESLoggerFactory.getLogger(ESExporter.class.getName()); + final ToXContent.Params xContentParams; + + public ESExporter(Settings settings, ClusterName clusterName) { super(settings); @@ -57,6 +61,9 @@ public class ESExporter extends AbstractLifecycleComponent implement } + xContentParams = new ToXContent.MapParams(ImmutableMap.of("human_readable","false")); + + logger.info("ESExporter initialized. Target: {}:{} Index prefix set to {}", targetHost, targetPort, targetIndexPrefix ); // explode early on broken settings getTargetURL("test"); @@ -77,6 +84,7 @@ public class ESExporter extends AbstractLifecycleComponent implement return "ESExporter"; } + @Override public void exportNodeStats(NodeStats nodeStats) { URL url = getTargetURL("nodestats"); @@ -91,7 +99,7 @@ public class ESExporter extends AbstractLifecycleComponent implement XContentBuilder builder = XContentFactory.smileBuilder(os); builder.startObject(); - nodeStats.toXContent(builder, ToXContent.EMPTY_PARAMS); + nodeStats.toXContent(builder, xContentParams); builder.endObject(); builder.close(); From 9fcfa712f4fb33297a1a3510b958cc0b8f8d85ef Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Thu, 23 May 2013 13:50:55 +0200 Subject: [PATCH 03/39] Some renaming to put stats in the name of things + added shard stats export Original commit: elastic/x-pack-elasticsearch@b1f284f912c431a20009a31fce4e51acfbe23a3b --- .../java/com/elasticsearch/dash/Plugin.java | 6 +-- .../{Exporter.java => StatsExporter.java} | 5 +- ...ervice.java => StatsExportersService.java} | 52 +++++++++++++------ .../dash/exporters/ESExporter.java | 22 +++++--- 4 files changed, 56 insertions(+), 29 deletions(-) rename src/main/java/com/elasticsearch/dash/{Exporter.java => StatsExporter.java} (74%) rename src/main/java/com/elasticsearch/dash/{ExportersService.java => StatsExportersService.java} (62%) diff --git a/src/main/java/com/elasticsearch/dash/Plugin.java b/src/main/java/com/elasticsearch/dash/Plugin.java index 83c7450a0cd..f6936515d80 100644 --- a/src/main/java/com/elasticsearch/dash/Plugin.java +++ b/src/main/java/com/elasticsearch/dash/Plugin.java @@ -9,8 +9,6 @@ import com.google.common.collect.ImmutableList; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.AbstractPlugin; @@ -38,7 +36,7 @@ public class Plugin extends AbstractPlugin { @Override protected void configure() { - bind(ExportersService.class).asEagerSingleton(); + bind(StatsExportersService.class).asEagerSingleton(); } }; return ImmutableList.of(m); @@ -47,7 +45,7 @@ public class Plugin extends AbstractPlugin { @Override public Collection> services() { Collection> l = new ArrayList>(); - l.add(ExportersService.class); + l.add(StatsExportersService.class); return l; } } diff --git a/src/main/java/com/elasticsearch/dash/Exporter.java b/src/main/java/com/elasticsearch/dash/StatsExporter.java similarity index 74% rename from src/main/java/com/elasticsearch/dash/Exporter.java rename to src/main/java/com/elasticsearch/dash/StatsExporter.java index 7da504080e3..eb99c59220a 100644 --- a/src/main/java/com/elasticsearch/dash/Exporter.java +++ b/src/main/java/com/elasticsearch/dash/StatsExporter.java @@ -5,12 +5,15 @@ */ package com.elasticsearch.dash; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.common.component.CloseableComponent; import org.elasticsearch.common.component.LifecycleComponent; -public interface Exporter extends LifecycleComponent { +public interface StatsExporter extends LifecycleComponent { String name(); void exportNodeStats(NodeStats nodeStats); + + void exportShardStats(ShardStats shardStats); } diff --git a/src/main/java/com/elasticsearch/dash/ExportersService.java b/src/main/java/com/elasticsearch/dash/StatsExportersService.java similarity index 62% rename from src/main/java/com/elasticsearch/dash/ExportersService.java rename to src/main/java/com/elasticsearch/dash/StatsExportersService.java index b01013826c7..dcb15969b78 100644 --- a/src/main/java/com/elasticsearch/dash/ExportersService.java +++ b/src/main/java/com/elasticsearch/dash/StatsExportersService.java @@ -5,46 +5,49 @@ */ package com.elasticsearch.dash;/* import com.elasticsearch.dash.exporters.ESExporter; +import com.google.common.collect.ImmutableSet; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.ClusterName; -import com.google.common.collect.ImmutableSet; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.indices.InternalIndicesService; import org.elasticsearch.node.service.NodeService; -import org.elasticsearch.threadpool.ThreadPool; import java.util.Collection; +import java.util.List; -public class ExportersService extends AbstractLifecycleComponent { +public class StatsExportersService extends AbstractLifecycleComponent { - private final IndicesService indicesService; + private final InternalIndicesService indicesService; private final NodeService nodeService; private volatile ExportingWorker exp; private volatile Thread thread; private final TimeValue interval; - private Collection exporters; + private Collection exporters; @Inject - public ExportersService(Settings settings, IndicesService indicesService,ClusterName clusterName, NodeService nodeService) { + public StatsExportersService(Settings settings, IndicesService indicesService, ClusterName clusterName, NodeService nodeService) { super(settings); - this.indicesService = indicesService; + this.indicesService = (InternalIndicesService) indicesService; this.nodeService = nodeService; this.interval = componentSettings.getAsTime("interval", TimeValue.timeValueSeconds(5)); - Exporter esExporter = new ESExporter(settings.getComponentSettings(ESExporter.class), clusterName ); + StatsExporter esExporter = new ESExporter(settings.getComponentSettings(ESExporter.class), clusterName); this.exporters = ImmutableSet.of(esExporter); } @Override protected void doStart() throws ElasticSearchException { - for (Exporter e: exporters) + for (StatsExporter e : exporters) e.start(); this.exp = new ExportingWorker(); @@ -57,13 +60,13 @@ public class ExportersService extends AbstractLifecycleComponent shardStatsList = indicesService.shardLevelStats(CommonStatsFlags.ALL); + + logger.debug("Exporting shards stats"); + for (StatsExporter e : exporters) { + try { + for (ShardStats shardStats : shardStatsList) + e.exportShardStats(shardStats); + } catch (Throwable t) { + logger.error("StatsExporter {} has thrown an exception:", t, e.name()); + } + } + } catch (Throwable t) { + logger.error("Background thread had an uncaught exception:", t); + } + + try { Thread.sleep(interval.millis()); } catch (InterruptedException e) { // ignore, if closed, good.... - } catch (Throwable t) { - logger.error("Background thread had an uncaught exception:", t); } } diff --git a/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java b/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java index b7becdc3b8a..e7e22e26e54 100644 --- a/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java +++ b/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java @@ -4,11 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ package com.elasticsearch.dash.exporters; -import com.elasticsearch.dash.Exporter; +import com.elasticsearch.dash.StatsExporter; import com.google.common.collect.ImmutableMap; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.logging.ESLogger; @@ -28,7 +29,7 @@ import java.net.URL; import java.net.URLEncoder; import java.util.*; -public class ESExporter extends AbstractLifecycleComponent implements Exporter { +public class ESExporter extends AbstractLifecycleComponent implements StatsExporter { final String targetHost; final int targetPort; @@ -87,8 +88,17 @@ public class ESExporter extends AbstractLifecycleComponent implement @Override public void exportNodeStats(NodeStats nodeStats) { - URL url = getTargetURL("nodestats"); - logger.debug("Exporting node stats to {}", url); + exportXContent("nodestats", nodeStats); + } + + @Override + public void exportShardStats(ShardStats shardStats) { + exportXContent("shardstats", shardStats); + } + + private void exportXContent(String type,ToXContent xContent) { + URL url = getTargetURL(type); + logger.debug("Exporting {} to {}", type, url); HttpURLConnection conn; try { conn = (HttpURLConnection) url.openConnection(); @@ -99,7 +109,7 @@ public class ESExporter extends AbstractLifecycleComponent implement XContentBuilder builder = XContentFactory.smileBuilder(os); builder.startObject(); - nodeStats.toXContent(builder, xContentParams); + xContent.toXContent(builder, xContentParams); builder.endObject(); builder.close(); @@ -113,10 +123,8 @@ public class ESExporter extends AbstractLifecycleComponent implement return; } - } - @Override protected void doStart() throws ElasticSearchException { } From 14ae5fab3a2d4184a6894554cac09fe21c6abf8b Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 28 May 2013 12:39:54 +0200 Subject: [PATCH 04/39] explicitly closing URLConnection inputstream as required by documentation (since we don't read it). Original commit: elastic/x-pack-elasticsearch@51d7bef23fb6ebed08c6038559425030d8785ae4 --- src/main/java/com/elasticsearch/dash/exporters/ESExporter.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java b/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java index e7e22e26e54..0f327353718 100644 --- a/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java +++ b/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java @@ -117,6 +117,7 @@ public class ESExporter extends AbstractLifecycleComponent implement if (conn.getResponseCode() != 201) { logger.error("Remote target didn't respond with 201 Created"); } + conn.getInputStream().close(); // close and release to connection pool. } catch (IOException e) { logger.error("Error connecting to target", e); From c5c2462f12c6cd643c51533a92f5d2c64b21be25 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 21 Aug 2013 11:48:30 +0200 Subject: [PATCH 05/39] Create CONTRIBUTING.md Original commit: elastic/x-pack-elasticsearch@cfd501658b970dd1ddbc811fca736835d82121e7 --- CONTRIBUTING.md | 98 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000000..56a9dafbf09 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,98 @@ +Contributing to elasticsearch +============================= + +Elasticsearch is an open source project and we love to receive contributions from our community — you! There are many ways to contribute, from writing tutorials or blog posts, improving the documentation, submitting bug reports and feature requests or writing code which can be incorporated into Elasticsearch itself. + +Bug reports +----------- + +If you think you have found a bug in Elasticsearch, first make sure that you are testing against the [latest version of Elasticsearch](http://www.elasticsearch.org/download/) - your issue may already have been fixed. If not, search our [issues list](https://github.com/elasticsearch/elasticsearch/issues) on GitHub in case a similar issue has already been opened. + +It is very helpful if you can prepare a reproduction of the bug. In other words, provide a small test case which we can run to confirm your bug. It makes it easier to find the problem and to fix it. Test cases should be provided as `curl` commands which we can copy and paste into a terminal to run it locally, for example: + +```sh +# delete the index +curl -XDELETE localhost:9200/test + +# insert a document +curl -XPUT localhost:9200/test/test/1 -d '{ + "title": "test document" +}' + +# this should return XXXX but instead returns YYY +curl .... +``` + +Provide as much information as you can. You may think that the problem lies with your query, when actually it depends on how your data is indexed. The easier it is for us to recreate your problem, the faster it is likely to be fixed. + +Feature requests +---------------- + +If you find yourself wishing for a feature that doesn't exist in Elasticsearch, you are probably not alone. There are bound to be others out there with similar needs. Many of the features that Elasticsearch has today have been added because our users saw the need. +Open an issue on our [issues list](https://github.com/elasticsearch/elasticsearch/issues) on GitHub which describes the feature you would like to see, why you need it, and how it should work. + +Contributing code and documentation changes +------------------------------------------- + +If you have a bugfix or new feature that you would like to contribute to Elasticsearch, please find or open an issue about it first. Talk about what you would like to do. It may be that somebody is already working on it, or that there are particular issues that you should know about before implementing the change. + +We enjoy working with contributors to get their code accepted. There are many approaches to fixing a problem and it is important to find the best approach before writing too much code. + +The process for contributing to any of the [Elasticsearch repositories](https://github.com/elasticsearch/) is similar. Details for individual projects can be found below. + +### Fork and clone the repository + +You will need to fork the main Elasticsearch code or documentation repository and clone it to your local machine. See +[github help page](https://help.github.com/articles/fork-a-repo) for help. + +Further instructions for specific projects are given below. + +### Submitting your changes + +Once your changes and tests are ready to submit for review: + +1. Test your changes +Run the test suite to make sure that nothing is broken. + +2. Sign the Contributor License Agreement +Please make sure you have signed our [Contributor License Agreement](http://www.elasticsearch.org/contributor-agreement/). We are not asking you to assign copyright to us, but to give us the right to distribute your code without restriction. We ask this of all contributors in order to assure our users of the origin and continuing existence of the code. You only need to sign the CLA once. + +3. Rebase your changes +Update your local repository with the most recent code from the main Elasticsearch repository, and rebase your branch on top of the latest master branch. We prefer your changes to be squashed into a single commit. + +4. Submit a pull request +Push your local changes to your forked copy of the repository and [submit a pull request](https://help.github.com/articles/using-pull-requests). In the pull request, describe what your changes do and mention the number of the issue where discussion has taken place, eg "Closes #123". + +Then sit back and wait. There will probably be discussion about the pull request and, if any changes are needed, we would love to work with you to get your pull request merged into Elasticsearch. + + +Contributing to the Elasticsearch plugin +---------------------------------------- + +**Repository:** [https://github.com/elasticsearch/elasticsearch-dash](https://github.com/elasticsearch/elasticsearch-dash) + +Make sure you have [Maven](http://maven.apache.org) installed, as Elasticsearch uses it as its build system. Integration with IntelliJ and Eclipse should work out of the box. Eclipse users can automatically configure their IDE by running `mvn eclipse:eclipse` and then importing the project into their workspace: `File > Import > Existing project into workspace`. + +Please follow these formatting guidelines: + +* Java indent is 4 spaces +* Line width is 140 characters +* The rest is left to Java coding standards +* Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do. + +To create a distribution from the source, simply run: + +```sh +cd elasticsearch-dash/ +mvn clean package -DskipTests +``` + +You will find the newly built packages under: `./target/releases/`. + +Before submitting your changes, run the test suite to make sure that nothing is broken, with: + +```sh +mvn clean test +``` + +Source: [Contributing to elasticsearch](http://www.elasticsearch.org/contributing-to-elasticsearch/) From 72357446aa7c7cdc74d2a35bf5b38eadec6d7836 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 27 Sep 2013 14:50:03 +0200 Subject: [PATCH 06/39] moved to elasticsearch-enterprise Original commit: elastic/x-pack-elasticsearch@55251278704c7988c23fa0297e343839a9a0fa76 --- pom.xml | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/pom.xml b/pom.xml index be513e39fbd..0b5171ac2dc 100644 --- a/pom.xml +++ b/pom.xml @@ -2,13 +2,13 @@ - elasticsearch-dash + elasticsearch-enterprise 4.0.0 - com.elasticsearch - elasticsearch-dash + org.elasticsearch + enterprise 0.1.0-SNAPSHOT jar - Management & monitoring plugin for ElasticSearch + Enterprise Elasticsearch 2013 @@ -17,12 +17,6 @@ repo - - scm:git:git@github.com:elasticsearch/elasticsearch-m-m.git - scm:git:git@github.com:elasticsearch/elasticsearch-m-m.git - - http://github.com/elasticsearch/elasticsearch-m-m - org.sonatype.oss @@ -31,7 +25,7 @@ - 1.0.0.Beta1-SNAPSHOT + 0.90.5 From 318b0e7b8849d2da7a9952aa0630b0d8984b13a3 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 27 Sep 2013 16:55:18 +0200 Subject: [PATCH 07/39] moved to elasticsearch-enterprise - part 2 Original commit: elastic/x-pack-elasticsearch@e7a7b2fa60f5515c7ef6fda8d0470d4c20f86b7a --- pom.xml | 2 +- .../enterprise/monitor}/Plugin.java | 4 +-- .../monitor}/StatsExportersService.java | 31 ++++++++++--------- .../monitor/exporter}/ESExporter.java | 16 +++++----- .../monitor/exporter}/StatsExporter.java | 3 +- src/main/resources/es-plugin.properties | 2 +- 6 files changed, 28 insertions(+), 30 deletions(-) rename src/main/java/{com/elasticsearch/dash => org/elasticsearch/enterprise/monitor}/Plugin.java (93%) rename src/main/java/{com/elasticsearch/dash => org/elasticsearch/enterprise/monitor}/StatsExportersService.java (80%) rename src/main/java/{com/elasticsearch/dash/exporters => org/elasticsearch/enterprise/monitor/exporter}/ESExporter.java (91%) rename src/main/java/{com/elasticsearch/dash => org/elasticsearch/enterprise/monitor/exporter}/StatsExporter.java (86%) diff --git a/pom.xml b/pom.xml index 0b5171ac2dc..c868e5a15bd 100644 --- a/pom.xml +++ b/pom.xml @@ -25,7 +25,7 @@ - 0.90.5 + 0.90.6-SNAPSHOT diff --git a/src/main/java/com/elasticsearch/dash/Plugin.java b/src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java similarity index 93% rename from src/main/java/com/elasticsearch/dash/Plugin.java rename to src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java index f6936515d80..90c30d36f39 100644 --- a/src/main/java/com/elasticsearch/dash/Plugin.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java @@ -3,9 +3,9 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package com.elasticsearch.dash; +package org.elasticsearch.enterprise.monitor; -import com.google.common.collect.ImmutableList; +import org.elasticsearch.common.collect.ImmutableList; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Module; diff --git a/src/main/java/com/elasticsearch/dash/StatsExportersService.java b/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java similarity index 80% rename from src/main/java/com/elasticsearch/dash/StatsExportersService.java rename to src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java index dcb15969b78..8d2b32363f8 100644 --- a/src/main/java/com/elasticsearch/dash/StatsExportersService.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java @@ -3,9 +3,9 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package com.elasticsearch.dash;/* -import com.elasticsearch.dash.exporters.ESExporter; -import com.google.common.collect.ImmutableSet; +package org.elasticsearch.enterprise.monitor; +import org.elasticsearch.common.collect.ImmutableSet; +import org.elasticsearch.enterprise.monitor.exporter.ESExporter; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; @@ -16,6 +16,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.enterprise.monitor.exporter.StatsExporter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InternalIndicesService; import org.elasticsearch.node.service.NodeService; @@ -91,18 +92,18 @@ public class StatsExportersService extends AbstractLifecycleComponent shardStatsList = indicesService.shardLevelStats(CommonStatsFlags.ALL); - - logger.debug("Exporting shards stats"); - for (StatsExporter e : exporters) { - try { - for (ShardStats shardStats : shardStatsList) - e.exportShardStats(shardStats); - } catch (Throwable t) { - logger.error("StatsExporter {} has thrown an exception:", t, e.name()); - } - } +// logger.warn("Collecting shard stats"); +// List shardStatsList = indicesService.shardLevelStats(CommonStatsFlags.ALL); +// +// logger.debug("Exporting shards stats"); +// for (StatsExporter e : exporters) { +// try { +// for (ShardStats shardStats : shardStatsList) +// e.exportShardStats(shardStats); +// } catch (Throwable t) { +// logger.error("StatsExporter {} has thrown an exception:", t, e.name()); +// } +// } } catch (Throwable t) { logger.error("Background thread had an uncaught exception:", t); } diff --git a/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java similarity index 91% rename from src/main/java/com/elasticsearch/dash/exporters/ESExporter.java rename to src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java index 0f327353718..b537ac2beed 100644 --- a/src/main/java/com/elasticsearch/dash/exporters/ESExporter.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java @@ -3,9 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package com.elasticsearch.dash.exporters; -import com.elasticsearch.dash.StatsExporter; -import com.google.common.collect.ImmutableMap; +package org.elasticsearch.enterprise.monitor.exporter; +import org.elasticsearch.common.collect.ImmutableMap; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -50,19 +49,18 @@ public class ESExporter extends AbstractLifecycleComponent implement // TODO: move to a single settings. targetHost = settings.get("target.host", "localhost"); targetPort = settings.getAsInt("target.post", 9200); - String targetIndexPrefix = settings.get("target.index.prefix", "dash"); + String targetIndexPrefix = settings.get("target.index.prefix", ""); try { - targetPathPrefix = String.format("/%s_%s_", - URLEncoder.encode(targetIndexPrefix,"UTF-8"), - URLEncoder.encode(clusterName.value(),"UTF-8")); + if (!targetIndexPrefix.isEmpty()) targetIndexPrefix += targetIndexPrefix + "_"; + targetPathPrefix = "/"+ URLEncoder.encode(targetIndexPrefix,"UTF-8") + URLEncoder.encode(clusterName.value(),"UTF-8"); } catch (UnsupportedEncodingException e) { throw new ElasticSearchException("Can't encode target url", e); } - xContentParams = new ToXContent.MapParams(ImmutableMap.of("human_readable","false")); + xContentParams = new ToXContent.MapParams(ImmutableMap.of("human_readable", "false")); logger.info("ESExporter initialized. Target: {}:{} Index prefix set to {}", targetHost, targetPort, targetIndexPrefix ); @@ -93,7 +91,7 @@ public class ESExporter extends AbstractLifecycleComponent implement @Override public void exportShardStats(ShardStats shardStats) { - exportXContent("shardstats", shardStats); + //exportXContent("shardstats", shardStats); } private void exportXContent(String type,ToXContent xContent) { diff --git a/src/main/java/com/elasticsearch/dash/StatsExporter.java b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java similarity index 86% rename from src/main/java/com/elasticsearch/dash/StatsExporter.java rename to src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java index eb99c59220a..9e841848c60 100644 --- a/src/main/java/com/elasticsearch/dash/StatsExporter.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java @@ -3,10 +3,9 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package com.elasticsearch.dash; +package org.elasticsearch.enterprise.monitor.exporter; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.common.component.CloseableComponent; import org.elasticsearch.common.component.LifecycleComponent; public interface StatsExporter extends LifecycleComponent { diff --git a/src/main/resources/es-plugin.properties b/src/main/resources/es-plugin.properties index 98245f4d48c..609ef586d11 100644 --- a/src/main/resources/es-plugin.properties +++ b/src/main/resources/es-plugin.properties @@ -1 +1 @@ -plugin=com.elasticsearch.dash.Plugin \ No newline at end of file +plugin=org.elasticsearch.enterprise.monitor.Plugin \ No newline at end of file From 41c7e045a732b6b1b1cef4c1d296d80a9e45062b Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 30 Sep 2013 17:01:09 +0200 Subject: [PATCH 08/39] Extended ESExporter infrastructure to use index templates and multiple hosts Original commit: elastic/x-pack-elasticsearch@d0f4837110a01edf720c93a03b088adf3f331e0c --- .../enterprise/monitor/Plugin.java | 2 +- .../monitor/StatsExportersService.java | 30 ++- .../monitor/exporter/ESExporter.java | 224 +++++++++++++----- .../monitor/exporter/StatsExporter.java | 2 +- 4 files changed, 183 insertions(+), 75 deletions(-) diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java b/src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java index 90c30d36f39..e4a7371cac0 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java @@ -22,7 +22,7 @@ public class Plugin extends AbstractPlugin { @Override public String name() { - return "Dash"; + return "Elasticsearch enterprise - monitor"; } @Override diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java b/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java index 8d2b32363f8..342ee6e5db0 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java @@ -4,25 +4,24 @@ * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.enterprise.monitor; -import org.elasticsearch.common.collect.ImmutableSet; -import org.elasticsearch.enterprise.monitor.exporter.ESExporter; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.collect.ImmutableSet; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.enterprise.monitor.exporter.ESExporter; import org.elasticsearch.enterprise.monitor.exporter.StatsExporter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InternalIndicesService; import org.elasticsearch.node.service.NodeService; import java.util.Collection; -import java.util.List; public class StatsExportersService extends AbstractLifecycleComponent { @@ -52,7 +51,7 @@ public class StatsExportersService extends AbstractLifecycleComponent shardStatsList = indicesService.shardLevelStats(CommonStatsFlags.ALL); -// -// logger.debug("Exporting shards stats"); -// for (StatsExporter e : exporters) { -// try { -// for (ShardStats shardStats : shardStatsList) -// e.exportShardStats(shardStats); -// } catch (Throwable t) { -// logger.error("StatsExporter {} has thrown an exception:", t, e.name()); -// } -// } + logger.debug("Collecting shard stats"); + ShardStats[] shardStatsArray = indicesService.shardStats(CommonStatsFlags.ALL); + + logger.debug("Exporting shards stats"); + for (StatsExporter e : exporters) { + try { + e.exportShardStats(shardStatsArray); + } catch (Throwable t) { + logger.error("StatsExporter {} has thrown an exception:", t, e.name()); + } + } } catch (Throwable t) { logger.error("Background thread had an uncaught exception:", t); } diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java index b537ac2beed..65d41fae289 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java @@ -4,78 +4,68 @@ * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.enterprise.monitor.exporter; -import org.elasticsearch.common.collect.ImmutableMap; import org.elasticsearch.ElasticSearchException; -import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.collect.ImmutableMap; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.joda.time.DateTimeZone; +import org.elasticsearch.common.joda.time.format.DateTimeFormat; +import org.elasticsearch.common.joda.time.format.DateTimeFormatter; +import org.elasticsearch.common.joda.time.format.ISODateTimeFormat; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.smile.SmileXContent; import java.io.IOException; +import java.io.InputStream; import java.io.OutputStream; -import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; -import java.net.MalformedURLException; import java.net.URL; -import java.net.URLEncoder; -import java.util.*; public class ESExporter extends AbstractLifecycleComponent implements StatsExporter { - final String targetHost; - final int targetPort; + final String[] hosts; + final String indexPrefix; + final DateTimeFormatter indexTimeFormatter; + final int timeout; - final String targetPathPrefix; final ClusterName clusterName; final ESLogger logger = ESLoggerFactory.getLogger(ESExporter.class.getName()); final ToXContent.Params xContentParams; + public final static DateTimeFormatter defaultDatePrinter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC); + + boolean checkedForIndexTemplate = false; + public ESExporter(Settings settings, ClusterName clusterName) { super(settings); this.clusterName = clusterName; - // TODO: move to a single settings. - targetHost = settings.get("target.host", "localhost"); - targetPort = settings.getAsInt("target.post", 9200); - String targetIndexPrefix = settings.get("target.index.prefix", ""); + hosts = settings.getAsArray("hosts", new String[]{"localhost:9200"}); + indexPrefix = settings.get("index.prefix", "es_monitor"); + String indexTimeFormat = settings.get("index.timeformat", "YYYY.MM.dd"); + indexTimeFormatter = DateTimeFormat.forPattern(indexTimeFormat); - try { - if (!targetIndexPrefix.isEmpty()) targetIndexPrefix += targetIndexPrefix + "_"; - targetPathPrefix = "/"+ URLEncoder.encode(targetIndexPrefix,"UTF-8") + URLEncoder.encode(clusterName.value(),"UTF-8"); + timeout = (int) settings.getAsTime("timeout", new TimeValue(6000)).seconds(); - } catch (UnsupportedEncodingException e) { - throw new ElasticSearchException("Can't encode target url", e); - } + xContentParams = new ToXContent.MapParams( + ImmutableMap.of("load_average_format", "hash")); - xContentParams = new ToXContent.MapParams(ImmutableMap.of("human_readable", "false")); + logger.info("ESExporter initialized. Targets: {}, index prefix [{}], index time format [{}]", hosts, indexPrefix, indexTimeFormat); - - logger.info("ESExporter initialized. Target: {}:{} Index prefix set to {}", targetHost, targetPort, targetIndexPrefix ); - // explode early on broken settings - getTargetURL("test"); - - } - - private URL getTargetURL(String type) { - try { - String path = String.format("%1$s%2$tY.%2$tm.%2$td/%3$s", targetPathPrefix, new Date(), type); - return new URL("http", targetHost, targetPort, path); - } catch (MalformedURLException e) { - throw new ElasticSearchIllegalArgumentException("Target settings result in a malformed url"); - } } @Override @@ -86,36 +76,61 @@ public class ESExporter extends AbstractLifecycleComponent implement @Override public void exportNodeStats(NodeStats nodeStats) { - exportXContent("nodestats", nodeStats); + exportXContent("nodestats", new ToXContent[]{nodeStats}, nodeStats.getTimestamp()); } @Override - public void exportShardStats(ShardStats shardStats) { - //exportXContent("shardstats", shardStats); + public void exportShardStats(ShardStats[] shardStatsArray) { + exportXContent("shardstats", shardStatsArray, System.currentTimeMillis()); } - private void exportXContent(String type,ToXContent xContent) { - URL url = getTargetURL(type); - logger.debug("Exporting {} to {}", type, url); - HttpURLConnection conn; - try { - conn = (HttpURLConnection) url.openConnection(); - conn.setRequestMethod("POST"); - conn.setDoOutput(true); - conn.setRequestProperty("Content-Type", XContentType.SMILE.restContentType()); - OutputStream os = conn.getOutputStream(); - XContentBuilder builder = XContentFactory.smileBuilder(os); + private void exportXContent(String type, ToXContent[] xContentArray, long collectionTimestamp) { + if (xContentArray == null || xContentArray.length == 0) { + return; + } - builder.startObject(); - xContent.toXContent(builder, xContentParams); - builder.endObject(); - - builder.close(); - - if (conn.getResponseCode() != 201) { - logger.error("Remote target didn't respond with 201 Created"); + if (!checkedForIndexTemplate) { + if (!checkForIndexTemplate()) { + logger.debug("no template defined yet. skipping"); + return; } - conn.getInputStream().close(); // close and release to connection pool. + ; + } + + logger.debug("Exporting {}", type); + HttpURLConnection conn = openConnection("POST", "/_bulk", XContentType.SMILE.restContentType()); + if (conn == null) { + logger.error("Could not connect to any configured elasticsearch instances: [{}]", hosts); + return; + } + try { + OutputStream os = conn.getOutputStream(); + // TODO: find a way to disable builder's substream flushing. + for (ToXContent xContent : xContentArray) { + XContentBuilder builder = XContentFactory.smileBuilder(os); + builder.startObject().startObject("index") + .field("_index", getIndexName()).field("_type", type).endObject().endObject(); + builder.flush(); + os.write(SmileXContent.smileXContent.streamSeparator()); + + builder = XContentFactory.smileBuilder(os); + builder.humanReadable(false); + builder.startObject(); + builder.field("@timestamp", defaultDatePrinter.print(collectionTimestamp)); + xContent.toXContent(builder, xContentParams); + builder.endObject(); + builder.flush(); + os.write(SmileXContent.smileXContent.streamSeparator()); + + } + os.close(); + + if (conn.getResponseCode() != 200) { + logConnectionError("remote target didn't respond with 200 OK", conn); + } else { + conn.getInputStream().close(); // close and release to connection pool. + } + } catch (IOException e) { logger.error("Error connecting to target", e); @@ -137,4 +152,99 @@ public class ESExporter extends AbstractLifecycleComponent implement protected void doClose() throws ElasticSearchException { } + + private String getIndexName() { + return indexPrefix + "-" + indexTimeFormatter.print(System.currentTimeMillis()); + + } + + + private HttpURLConnection openConnection(String method, String uri) { + return openConnection(method, uri, null); + } + + private HttpURLConnection openConnection(String method, String uri, String contentType) { + for (String host : hosts) { + try { + URL templateUrl = new URL("http://" + host + uri); + HttpURLConnection conn = (HttpURLConnection) templateUrl.openConnection(); + conn.setRequestMethod(method); + conn.setConnectTimeout(timeout); + if (contentType != null) { + conn.setRequestProperty("Content-Type", XContentType.SMILE.restContentType()); + } + conn.setUseCaches(false); + if (method.equalsIgnoreCase("POST") || method.equalsIgnoreCase("PUT")) { + conn.setDoOutput(true); + } + conn.connect(); + + return conn; + } catch (IOException e) { + logger.error("error connecting to [{}]: {}", host, e); + } + } + + return null; + } + + private boolean checkForIndexTemplate() { + try { + + + String templateName = "enterprise.monitor." + indexPrefix; + + logger.debug("checking of target has template [{}]", templateName); + // DO HEAD REQUEST, when elasticsearch supports it + HttpURLConnection conn = openConnection("GET", "/_template/" + templateName); + if (conn == null) { + logger.error("Could not connect to any configured elasticsearch instances: [{}]", hosts); + return false; + } + + boolean hasTemplate = conn.getResponseCode() == 200; + + // nothing there, lets create it + if (!hasTemplate) { + logger.debug("no template found in elasticsearch for [{}]. Adding...", templateName); + conn = openConnection("PUT", "/_template/" + templateName, XContentType.SMILE.restContentType()); + OutputStream os = conn.getOutputStream(); + XContentBuilder builder = XContentFactory.smileBuilder(os); + builder.startObject(); + builder.field("template", indexPrefix + "*"); + builder.startObject("mappings").startObject("_default_"); + builder.startArray("dynamic_templates").startObject().startObject("string_fields") + .field("match", "*") + .field("match_mapping_type", "string") + .startObject("mapping").field("index", "not_analyzed").endObject() + .endObject().endObject().endArray(); + builder.endObject().endObject(); // mapping + root object. + builder.close(); + os.close(); + + if (conn.getResponseCode() != 200) { + logConnectionError("error adding index template to elasticsearch", conn); + } + conn.getInputStream().close(); // close and release to connection pool. + + } + checkedForIndexTemplate = true; + } catch (IOException e) { + logger.error("Error when checking/adding metrics template to elasticsearch", e); + return false; + } + return true; + } + + private void logConnectionError(String msg, HttpURLConnection conn) { + InputStream inputStream = conn.getErrorStream(); + java.util.Scanner s = new java.util.Scanner(inputStream, "UTF-8").useDelimiter("\\A"); + String err = s.hasNext() ? s.next() : ""; + try { + logger.error("{} response code [{} {}]. content: {}", msg, conn.getResponseCode(), conn.getResponseMessage(), err); + } catch (IOException e) { + logger.error("connection had an error while reporting the error. tough life."); + } + } } + diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java index 9e841848c60..93875ef978b 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java @@ -14,5 +14,5 @@ public interface StatsExporter extends LifecycleComponent { void exportNodeStats(NodeStats nodeStats); - void exportShardStats(ShardStats shardStats); + void exportShardStats(ShardStats[] shardStatsArray); } From f4e41585d93c54b4c22804d75b3af7f8b76ec94c Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 30 Sep 2013 20:13:58 +0200 Subject: [PATCH 09/39] added routing_format=full to default toXContent params Original commit: elastic/x-pack-elasticsearch@362429d9b75ed74428ba4790b22e82180b3f52b0 --- .../enterprise/monitor/exporter/ESExporter.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java index 65d41fae289..2d82a7a0cd1 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java @@ -61,7 +61,7 @@ public class ESExporter extends AbstractLifecycleComponent implement timeout = (int) settings.getAsTime("timeout", new TimeValue(6000)).seconds(); xContentParams = new ToXContent.MapParams( - ImmutableMap.of("load_average_format", "hash")); + ImmutableMap.of("load_average_format", "hash", "routing_format", "full")); logger.info("ESExporter initialized. Targets: {}, index prefix [{}], index time format [{}]", hosts, indexPrefix, indexTimeFormat); @@ -97,15 +97,15 @@ public class ESExporter extends AbstractLifecycleComponent implement ; } - logger.debug("Exporting {}", type); + logger.debug("exporting {}", type); HttpURLConnection conn = openConnection("POST", "/_bulk", XContentType.SMILE.restContentType()); if (conn == null) { - logger.error("Could not connect to any configured elasticsearch instances: [{}]", hosts); + logger.error("could not connect to any configured elasticsearch instances: [{}]", hosts); return; } try { OutputStream os = conn.getOutputStream(); - // TODO: find a way to disable builder's substream flushing. + // TODO: find a way to disable builder's substream flushing or something neat solution for (ToXContent xContent : xContentArray) { XContentBuilder builder = XContentFactory.smileBuilder(os); builder.startObject().startObject("index") @@ -133,7 +133,7 @@ public class ESExporter extends AbstractLifecycleComponent implement } catch (IOException e) { - logger.error("Error connecting to target", e); + logger.error("error sending data", e); return; } @@ -181,7 +181,7 @@ public class ESExporter extends AbstractLifecycleComponent implement return conn; } catch (IOException e) { - logger.error("error connecting to [{}]: {}", host, e); + logger.error("error connecting to [{}]", e, host); } } @@ -230,7 +230,7 @@ public class ESExporter extends AbstractLifecycleComponent implement } checkedForIndexTemplate = true; } catch (IOException e) { - logger.error("Error when checking/adding metrics template to elasticsearch", e); + logger.error("error when checking/adding template to elasticsearch", e); return false; } return true; From 63d0e9791007b42da07d3d5cf507c91784abfb09 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 1 Oct 2013 11:18:35 +0200 Subject: [PATCH 10/39] Added global node info to output Original commit: elastic/x-pack-elasticsearch@d81b88d2c4427daaffb809ca745fbde1b4f28fba --- .../monitor/StatsExportersService.java | 19 +-- .../monitor/exporter/ESExporter.java | 133 +++++++++++++++--- 2 files changed, 126 insertions(+), 26 deletions(-) diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java b/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java index 342ee6e5db0..4349c7083e1 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.discovery.Discovery; import org.elasticsearch.enterprise.monitor.exporter.ESExporter; import org.elasticsearch.enterprise.monitor.exporter.StatsExporter; import org.elasticsearch.indices.IndicesService; @@ -35,13 +36,14 @@ public class StatsExportersService extends AbstractLifecycleComponent exporters; @Inject - public StatsExportersService(Settings settings, IndicesService indicesService, ClusterName clusterName, NodeService nodeService) { + public StatsExportersService(Settings settings, IndicesService indicesService, + NodeService nodeService, Discovery discovery) { super(settings); this.indicesService = (InternalIndicesService) indicesService; this.nodeService = nodeService; this.interval = componentSettings.getAsTime("interval", TimeValue.timeValueSeconds(5)); - StatsExporter esExporter = new ESExporter(settings.getComponentSettings(ESExporter.class), clusterName); + StatsExporter esExporter = new ESExporter(settings.getComponentSettings(ESExporter.class), discovery); this.exporters = ImmutableSet.of(esExporter); } @@ -77,6 +79,13 @@ public class StatsExportersService extends AbstractLifecycleComponent implements StatsExporter { @@ -37,21 +45,25 @@ public class ESExporter extends AbstractLifecycleComponent implement final DateTimeFormatter indexTimeFormatter; final int timeout; - final ClusterName clusterName; + final Discovery discovery; + final String hostname; final ESLogger logger = ESLoggerFactory.getLogger(ESExporter.class.getName()); - final ToXContent.Params xContentParams; - public final static DateTimeFormatter defaultDatePrinter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC); boolean checkedForIndexTemplate = false; + final NodeStatsRenderer nodeStatsRenderer; + final ShardStatsRenderer shardStatsRenderer; - public ESExporter(Settings settings, ClusterName clusterName) { + public ESExporter(Settings settings, Discovery discovery) { super(settings); - this.clusterName = clusterName; + this.discovery = discovery; + InetAddress address = NetworkUtils.getLocalAddress(); + this.hostname = address == null ? null : address.getHostName(); + hosts = settings.getAsArray("hosts", new String[]{"localhost:9200"}); indexPrefix = settings.get("index.prefix", "es_monitor"); @@ -60,12 +72,10 @@ public class ESExporter extends AbstractLifecycleComponent implement timeout = (int) settings.getAsTime("timeout", new TimeValue(6000)).seconds(); - xContentParams = new ToXContent.MapParams( - ImmutableMap.of("load_average_format", "hash", "routing_format", "full")); - + nodeStatsRenderer = new NodeStatsRenderer(); + shardStatsRenderer = new ShardStatsRenderer(); logger.info("ESExporter initialized. Targets: {}, index prefix [{}], index time format [{}]", hosts, indexPrefix, indexTimeFormat); - } @Override @@ -76,16 +86,19 @@ public class ESExporter extends AbstractLifecycleComponent implement @Override public void exportNodeStats(NodeStats nodeStats) { - exportXContent("nodestats", new ToXContent[]{nodeStats}, nodeStats.getTimestamp()); + nodeStatsRenderer.reset(nodeStats); + exportXContent("nodestats", nodeStatsRenderer); } @Override public void exportShardStats(ShardStats[] shardStatsArray) { - exportXContent("shardstats", shardStatsArray, System.currentTimeMillis()); + shardStatsRenderer.reset(shardStatsArray); + exportXContent("shardstats", shardStatsRenderer); } - private void exportXContent(String type, ToXContent[] xContentArray, long collectionTimestamp) { - if (xContentArray == null || xContentArray.length == 0) { + + private void exportXContent(String type, MultiXContentRenderer xContentRenderer) { + if (xContentRenderer.length() == 0) { return; } @@ -106,7 +119,7 @@ public class ESExporter extends AbstractLifecycleComponent implement try { OutputStream os = conn.getOutputStream(); // TODO: find a way to disable builder's substream flushing or something neat solution - for (ToXContent xContent : xContentArray) { + for (int i = 0; i < xContentRenderer.length(); i++) { XContentBuilder builder = XContentFactory.smileBuilder(os); builder.startObject().startObject("index") .field("_index", getIndexName()).field("_type", type).endObject().endObject(); @@ -115,10 +128,7 @@ public class ESExporter extends AbstractLifecycleComponent implement builder = XContentFactory.smileBuilder(os); builder.humanReadable(false); - builder.startObject(); - builder.field("@timestamp", defaultDatePrinter.print(collectionTimestamp)); - xContent.toXContent(builder, xContentParams); - builder.endObject(); + xContentRenderer.render(i, builder); builder.flush(); os.write(SmileXContent.smileXContent.streamSeparator()); @@ -246,5 +256,92 @@ public class ESExporter extends AbstractLifecycleComponent implement logger.error("connection had an error while reporting the error. tough life."); } } + + interface MultiXContentRenderer { + + int length(); + + void render(int index, XContentBuilder builder) throws IOException; + } + + + private void addNodeInfo(XContentBuilder builder) throws IOException { + builder.startObject("node"); + DiscoveryNode node = discovery.localNode(); + builder.field("id", node.id()); + builder.field("name", node.name()); + builder.field("transport_address", node.address()); + + if (hostname != null) { + builder.field("hostname", hostname); + } + + if (!node.attributes().isEmpty()) { + builder.startObject("attributes"); + for (Map.Entry attr : node.attributes().entrySet()) { + builder.field(attr.getKey(), attr.getValue()); + } + builder.endObject(); + } + builder.endObject(); + } + + + class NodeStatsRenderer implements MultiXContentRenderer { + + NodeStats stats; + ToXContent.MapParams xContentParams = new ToXContent.MapParams( + ImmutableMap.of("node_info_format", "none", "load_average_format", "hash")); + + public void reset(NodeStats stats) { + this.stats = stats; + } + + @Override + public int length() { + return 1; + } + + @Override + public void render(int index, XContentBuilder builder) throws IOException { + builder.startObject(); + builder.field("@timestamp", defaultDatePrinter.print(stats.getTimestamp())); + addNodeInfo(builder); + stats.toXContent(builder, xContentParams); + builder.endObject(); + } + } + + class ShardStatsRenderer implements MultiXContentRenderer { + + ShardStats[] stats; + long collectionTime; + ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS; + + public void reset(ShardStats[] stats) { + this.stats = stats; + collectionTime = System.currentTimeMillis(); + } + + @Override + public int length() { + return stats == null ? 0 : stats.length; + } + + @Override + public void render(int index, XContentBuilder builder) throws IOException { + builder.startObject(); + builder.field("@timestamp", defaultDatePrinter.print(collectionTime)); + ShardRouting shardRouting = stats[index].getShardRouting(); + builder.field("id", shardRouting.id()); + builder.field("index", shardRouting.index()); + builder.field("status", shardRouting.state()); + builder.field("primary", shardRouting.primary()); + addNodeInfo(builder); + stats[index].getStats().toXContent(builder, xContentParams); + builder.endObject(); + } + } + } From 0f3949b2ea2bc567341aadcc030453ba17d36ddc Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 2 Oct 2013 10:36:58 +0200 Subject: [PATCH 11/39] Added IP to node description Original commit: elastic/x-pack-elasticsearch@cfa42db1332b94687bf91c86121f57fa8fb366c8 --- .../enterprise/monitor/exporter/ESExporter.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java index a41b9f2a04d..2164da90545 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -272,6 +273,14 @@ public class ESExporter extends AbstractLifecycleComponent implement builder.field("name", node.name()); builder.field("transport_address", node.address()); + if (node.address().uniqueAddressTypeId() == 1) { // InetSocket + InetSocketTransportAddress address = (InetSocketTransportAddress) node.address(); + InetAddress inetAddress = address.address().getAddress(); + if (inetAddress != null) { + builder.field("ip", inetAddress.getHostAddress()); + } + } + if (hostname != null) { builder.field("hostname", hostname); } From 9f55d4328fcf7a73454b500caa30a9e4c1324ed9 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 2 Oct 2013 22:36:16 +0200 Subject: [PATCH 12/39] Added an assemblies/plugin.xml Original commit: elastic/x-pack-elasticsearch@af745c177492ef09ad15cde8a483d6c1698d0de5 --- pom.xml | 2 +- src/main/assemblies/plugin.xml | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 src/main/assemblies/plugin.xml diff --git a/pom.xml b/pom.xml index c868e5a15bd..8dc3caba1d9 100644 --- a/pom.xml +++ b/pom.xml @@ -47,7 +47,7 @@ log4j log4j 1.2.17 - compile + runtime true diff --git a/src/main/assemblies/plugin.xml b/src/main/assemblies/plugin.xml new file mode 100644 index 00000000000..2ccfc875e9c --- /dev/null +++ b/src/main/assemblies/plugin.xml @@ -0,0 +1,17 @@ + + plugin + + zip + + false + + + / + true + true + + org.elasticsearch:elasticsearch + + + + \ No newline at end of file From 2adb58159f089f51856720af3f8e49f371557664 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 7 Oct 2013 23:27:53 +0200 Subject: [PATCH 13/39] Renamed types to shard_stats and node_stats, rename shard_stats.id to shard_stats.shard_id and shard_stats.status to shard_stats.state. Original commit: elastic/x-pack-elasticsearch@3ca470937a3310f12c25c6a6ba8ac7bb97de5c53 --- .../enterprise/monitor/exporter/ESExporter.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java index 2164da90545..0f2ac1dd714 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java @@ -88,13 +88,13 @@ public class ESExporter extends AbstractLifecycleComponent implement @Override public void exportNodeStats(NodeStats nodeStats) { nodeStatsRenderer.reset(nodeStats); - exportXContent("nodestats", nodeStatsRenderer); + exportXContent("node_stats", nodeStatsRenderer); } @Override public void exportShardStats(ShardStats[] shardStatsArray) { shardStatsRenderer.reset(shardStatsArray); - exportXContent("shardstats", shardStatsRenderer); + exportXContent("shard_stats", shardStatsRenderer); } @@ -342,9 +342,9 @@ public class ESExporter extends AbstractLifecycleComponent implement builder.startObject(); builder.field("@timestamp", defaultDatePrinter.print(collectionTime)); ShardRouting shardRouting = stats[index].getShardRouting(); - builder.field("id", shardRouting.id()); builder.field("index", shardRouting.index()); - builder.field("status", shardRouting.state()); + builder.field("shard_id", shardRouting.id()); + builder.field("shard_state", shardRouting.state()); builder.field("primary", shardRouting.primary()); addNodeInfo(builder); stats[index].getStats().toXContent(builder, xContentParams); From 9604447d4dc2b0d0ce6ec0f6ed04d3df86b3d521 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 28 Oct 2013 13:51:43 +0100 Subject: [PATCH 14/39] Added index and indices aggregate stats exported. Added current version of dashboards Original commit: elastic/x-pack-elasticsearch@657fea032ebe7320d4b50eaa152faa63c77b3a58 --- .../monitor/StatsExportersService.java | 36 +++- .../monitor/exporter/ESExporter.java | 187 ++++++++++++++---- .../monitor/exporter/StatsExporter.java | 8 + 3 files changed, 187 insertions(+), 44 deletions(-) diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java b/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java index 4349c7083e1..b3e64634d22 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java @@ -7,8 +7,10 @@ package org.elasticsearch.enterprise.monitor; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.collect.ImmutableSet; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -28,6 +30,8 @@ public class StatsExportersService extends AbstractLifecycleComponent implement final NodeStatsRenderer nodeStatsRenderer; final ShardStatsRenderer shardStatsRenderer; + final IndexStatsRenderer indexStatsRenderer; + final IndicesStatsRenderer indicesStatsRenderer; public ESExporter(Settings settings, Discovery discovery) { super(settings); @@ -75,6 +77,8 @@ public class ESExporter extends AbstractLifecycleComponent implement nodeStatsRenderer = new NodeStatsRenderer(); shardStatsRenderer = new ShardStatsRenderer(); + indexStatsRenderer = new IndexStatsRenderer(); + indicesStatsRenderer = new IndicesStatsRenderer(); logger.info("ESExporter initialized. Targets: {}, index prefix [{}], index time format [{}]", hosts, indexPrefix, indexTimeFormat); } @@ -97,52 +101,86 @@ public class ESExporter extends AbstractLifecycleComponent implement exportXContent("shard_stats", shardStatsRenderer); } + @Override + public void exportIndicesStats(IndicesStatsResponse indicesStats) { + Map perIndexStats = indicesStats.getIndices(); + indexStatsRenderer.reset(perIndexStats.values().toArray(new IndexStats[perIndexStats.size()])); + indicesStatsRenderer.reset(indicesStats.getTotal(), indicesStats.getPrimaries()); + logger.debug("exporting index_stats + indices_stats"); + HttpURLConnection conn = openExportingConnection(); + if (conn == null) { + return; + } + try { + addXContentRendererToConnection(conn, "index_stats", indexStatsRenderer); + addXContentRendererToConnection(conn, "indices_stats", indicesStatsRenderer); + sendCloseExportingConnection(conn); + } catch (IOException e) { + logger.error("error sending data", e); + return; + } + } + + private HttpURLConnection openExportingConnection() { + if (!checkedForIndexTemplate) { + if (!checkForIndexTemplate()) { + logger.debug("no template defined yet. skipping"); + return null; + } + } + + logger.trace("setting up an export connection"); + HttpURLConnection conn = openConnection("POST", "/_bulk", XContentType.SMILE.restContentType()); + if (conn == null) { + logger.error("could not connect to any configured elasticsearch instances: [{}]", hosts); + } + return conn; + } + + private void addXContentRendererToConnection(HttpURLConnection conn, String type, + MultiXContentRenderer renderer) throws IOException { + OutputStream os = conn.getOutputStream(); + // TODO: find a way to disable builder's substream flushing or something neat solution + for (int i = 0; i < renderer.length(); i++) { + XContentBuilder builder = XContentFactory.smileBuilder(os); + builder.startObject().startObject("index") + .field("_index", getIndexName()).field("_type", type).endObject().endObject(); + builder.flush(); + os.write(SmileXContent.smileXContent.streamSeparator()); + + builder = XContentFactory.smileBuilder(os); + builder.humanReadable(false); + renderer.render(i, builder); + builder.flush(); + os.write(SmileXContent.smileXContent.streamSeparator()); + } + } + + private void sendCloseExportingConnection(HttpURLConnection conn) throws IOException { + logger.trace("sending exporting content"); + OutputStream os = conn.getOutputStream(); + os.close(); + + if (conn.getResponseCode() != 200) { + logConnectionError("remote target didn't respond with 200 OK", conn); + } else { + conn.getInputStream().close(); // close and release to connection pool. + } + } private void exportXContent(String type, MultiXContentRenderer xContentRenderer) { if (xContentRenderer.length() == 0) { return; } - if (!checkedForIndexTemplate) { - if (!checkForIndexTemplate()) { - logger.debug("no template defined yet. skipping"); - return; - } - ; - } - logger.debug("exporting {}", type); - HttpURLConnection conn = openConnection("POST", "/_bulk", XContentType.SMILE.restContentType()); + HttpURLConnection conn = openExportingConnection(); if (conn == null) { - logger.error("could not connect to any configured elasticsearch instances: [{}]", hosts); return; } try { - OutputStream os = conn.getOutputStream(); - // TODO: find a way to disable builder's substream flushing or something neat solution - for (int i = 0; i < xContentRenderer.length(); i++) { - XContentBuilder builder = XContentFactory.smileBuilder(os); - builder.startObject().startObject("index") - .field("_index", getIndexName()).field("_type", type).endObject().endObject(); - builder.flush(); - os.write(SmileXContent.smileXContent.streamSeparator()); - - builder = XContentFactory.smileBuilder(os); - builder.humanReadable(false); - xContentRenderer.render(i, builder); - builder.flush(); - os.write(SmileXContent.smileXContent.streamSeparator()); - - } - os.close(); - - if (conn.getResponseCode() != 200) { - logConnectionError("remote target didn't respond with 200 OK", conn); - } else { - conn.getInputStream().close(); // close and release to connection pool. - } - - + addXContentRendererToConnection(conn, type, xContentRenderer); + sendCloseExportingConnection(conn); } catch (IOException e) { logger.error("error sending data", e); return; @@ -267,7 +305,11 @@ public class ESExporter extends AbstractLifecycleComponent implement private void addNodeInfo(XContentBuilder builder) throws IOException { - builder.startObject("node"); + addNodeInfo(builder, "node"); + } + + private void addNodeInfo(XContentBuilder builder, String fieldname) throws IOException { + builder.startObject(fieldname); DiscoveryNode node = discovery.localNode(); builder.field("id", node.id()); builder.field("name", node.name()); @@ -352,5 +394,72 @@ public class ESExporter extends AbstractLifecycleComponent implement } } + class IndexStatsRenderer implements MultiXContentRenderer { + + IndexStats[] stats; + long collectionTime; + ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS; + + public void reset(IndexStats[] stats) { + this.stats = stats; + collectionTime = System.currentTimeMillis(); + } + + @Override + public int length() { + return stats == null ? 0 : stats.length; + } + + @Override + public void render(int index, XContentBuilder builder) throws IOException { + builder.startObject(); + builder.field("@timestamp", defaultDatePrinter.print(collectionTime)); + IndexStats indexStats = stats[index]; + builder.field("index", indexStats.getIndex()); + addNodeInfo(builder, "_source_node"); + builder.startObject("primaries"); + indexStats.getPrimaries().toXContent(builder, xContentParams); + builder.endObject(); + builder.startObject("total"); + indexStats.getTotal().toXContent(builder, xContentParams); + builder.endObject(); + builder.endObject(); + } + } + + class IndicesStatsRenderer implements MultiXContentRenderer { + + CommonStats totalStats; + CommonStats primariesStats; + long collectionTime; + ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS; + + public void reset(CommonStats totalStats, CommonStats primariesStats) { + this.totalStats = totalStats; + this.primariesStats = primariesStats; + collectionTime = System.currentTimeMillis(); + } + + @Override + public int length() { + return totalStats == null ? 0 : 1; + } + + @Override + public void render(int index, XContentBuilder builder) throws IOException { + assert index == 0; + builder.startObject(); + builder.field("@timestamp", defaultDatePrinter.print(collectionTime)); + addNodeInfo(builder, "_source_node"); + builder.startObject("primaries"); + primariesStats.toXContent(builder, xContentParams); + builder.endObject(); + builder.startObject("total"); + totalStats.toXContent(builder, xContentParams); + builder.endObject(); + builder.endObject(); + } + } + } diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java index 93875ef978b..ba4a796bc01 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java +++ b/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java @@ -5,9 +5,14 @@ */ package org.elasticsearch.enterprise.monitor.exporter; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.indices.stats.CommonStats; +import org.elasticsearch.action.admin.indices.stats.IndexStats; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.common.component.LifecycleComponent; +import java.util.Map; + public interface StatsExporter extends LifecycleComponent { String name(); @@ -15,4 +20,7 @@ public interface StatsExporter extends LifecycleComponent { void exportNodeStats(NodeStats nodeStats); void exportShardStats(ShardStats[] shardStatsArray); + + void exportIndicesStats(IndicesStatsResponse indicesStats); + } From 06b392d581f427e37e00d5096d4241b42b322610 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 28 Oct 2013 17:07:08 +0100 Subject: [PATCH 15/39] renamed to marvel Original commit: elastic/x-pack-elasticsearch@c545942270ec28f7bba7a76ebec3050388d3db4c --- pom.xml | 6 +++--- .../{enterprise => marvel}/monitor/Plugin.java | 4 ++-- .../monitor/StatsExportersService.java | 6 +++--- .../{enterprise => marvel}/monitor/exporter/ESExporter.java | 4 ++-- .../monitor/exporter/StatsExporter.java | 2 +- src/main/resources/es-plugin.properties | 2 +- 6 files changed, 12 insertions(+), 12 deletions(-) rename src/main/java/org/elasticsearch/{enterprise => marvel}/monitor/Plugin.java (93%) rename src/main/java/org/elasticsearch/{enterprise => marvel}/monitor/StatsExportersService.java (96%) rename src/main/java/org/elasticsearch/{enterprise => marvel}/monitor/exporter/ESExporter.java (99%) rename src/main/java/org/elasticsearch/{enterprise => marvel}/monitor/exporter/StatsExporter.java (94%) diff --git a/pom.xml b/pom.xml index 8dc3caba1d9..3a30a6c15e1 100644 --- a/pom.xml +++ b/pom.xml @@ -2,13 +2,13 @@ - elasticsearch-enterprise + elasticsearch-marvel 4.0.0 org.elasticsearch - enterprise + marvel 0.1.0-SNAPSHOT jar - Enterprise Elasticsearch + Elasticsearch Marvel 2013 diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java b/src/main/java/org/elasticsearch/marvel/monitor/Plugin.java similarity index 93% rename from src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java rename to src/main/java/org/elasticsearch/marvel/monitor/Plugin.java index e4a7371cac0..171d609a90e 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/Plugin.java +++ b/src/main/java/org/elasticsearch/marvel/monitor/Plugin.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.enterprise.monitor; +package org.elasticsearch.marvel.monitor; import org.elasticsearch.common.collect.ImmutableList; import org.elasticsearch.common.component.LifecycleComponent; @@ -22,7 +22,7 @@ public class Plugin extends AbstractPlugin { @Override public String name() { - return "Elasticsearch enterprise - monitor"; + return "Elasticsearch marvel - monitor"; } @Override diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java b/src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java similarity index 96% rename from src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java rename to src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java index b3e64634d22..639fa20e838 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/StatsExportersService.java +++ b/src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.enterprise.monitor; +package org.elasticsearch.marvel.monitor; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; @@ -18,8 +18,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.Discovery; -import org.elasticsearch.enterprise.monitor.exporter.ESExporter; -import org.elasticsearch.enterprise.monitor.exporter.StatsExporter; +import org.elasticsearch.marvel.monitor.exporter.ESExporter; +import org.elasticsearch.marvel.monitor.exporter.StatsExporter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InternalIndicesService; import org.elasticsearch.node.service.NodeService; diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java b/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java similarity index 99% rename from src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java rename to src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java index 895976f9c03..62abc42fb82 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/ESExporter.java +++ b/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.enterprise.monitor.exporter; +package org.elasticsearch.marvel.monitor.exporter; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStats; @@ -241,7 +241,7 @@ public class ESExporter extends AbstractLifecycleComponent implement try { - String templateName = "enterprise.monitor." + indexPrefix; + String templateName = "marvel.monitor." + indexPrefix; logger.debug("checking of target has template [{}]", templateName); // DO HEAD REQUEST, when elasticsearch supports it diff --git a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java b/src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java similarity index 94% rename from src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java rename to src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java index ba4a796bc01..860d88dead2 100644 --- a/src/main/java/org/elasticsearch/enterprise/monitor/exporter/StatsExporter.java +++ b/src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.enterprise.monitor.exporter; +package org.elasticsearch.marvel.monitor.exporter; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.IndexStats; diff --git a/src/main/resources/es-plugin.properties b/src/main/resources/es-plugin.properties index 609ef586d11..72b2c0afd39 100644 --- a/src/main/resources/es-plugin.properties +++ b/src/main/resources/es-plugin.properties @@ -1 +1 @@ -plugin=org.elasticsearch.enterprise.monitor.Plugin \ No newline at end of file +plugin=org.elasticsearch.marvel.monitor.Plugin \ No newline at end of file From 80b85f79ce644df70323fc144d1cc247b29af408 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Thu, 31 Oct 2013 17:15:55 +0100 Subject: [PATCH 16/39] Added annotation infrastructure + a implementation for shard events Original commit: elastic/x-pack-elasticsearch@bfdf001a6f6e9c1cb16d9fc0d78b159a7b2a5f72 --- .../marvel/monitor/StatsExportersService.java | 153 +++++++++++++----- .../marvel/monitor/annotation/Annotation.java | 48 ++++++ .../annotation/ShardEventAnnotation.java | 57 +++++++ .../marvel/monitor/exporter/ESExporter.java | 85 ++++++++-- .../monitor/exporter/StatsExporter.java | 2 + 5 files changed, 297 insertions(+), 48 deletions(-) create mode 100644 src/main/java/org/elasticsearch/marvel/monitor/annotation/Annotation.java create mode 100644 src/main/java/org/elasticsearch/marvel/monitor/annotation/ShardEventAnnotation.java diff --git a/src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java b/src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java index 639fa20e838..0c3fdaeb030 100644 --- a/src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java +++ b/src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java @@ -11,20 +11,30 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableSet; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.service.IndexShard; +import org.elasticsearch.indices.IndicesLifecycle; +import org.elasticsearch.marvel.monitor.annotation.Annotation; +import org.elasticsearch.marvel.monitor.annotation.ShardEventAnnotation; import org.elasticsearch.marvel.monitor.exporter.ESExporter; import org.elasticsearch.marvel.monitor.exporter.StatsExporter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InternalIndicesService; import org.elasticsearch.node.service.NodeService; +import java.util.ArrayList; import java.util.Collection; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; public class StatsExportersService extends AbstractLifecycleComponent { @@ -33,12 +43,16 @@ public class StatsExportersService extends AbstractLifecycleComponent exporters; + private final BlockingQueue pendingAnnotationsQueue; + @Inject public StatsExportersService(Settings settings, IndicesService indicesService, NodeService nodeService, ClusterService clusterService, @@ -53,6 +67,9 @@ public class StatsExportersService extends AbstractLifecycleComponent annotationsList = new ArrayList(pendingAnnotationsQueue.size()); + pendingAnnotationsQueue.drainTo(annotationsList); + Annotation[] annotations = new Annotation[annotationsList.size()]; + annotationsList.toArray(annotations); + + for (StatsExporter e : exporters) { + try { + e.exportAnnotations(annotations); + } catch (Throwable t) { + logger.error("StatsExporter [{}] has thrown an exception:", t, e.name()); + } + } + } + + private void exportShardStats() { + logger.debug("Collecting shard stats"); + ShardStats[] shardStatsArray = indicesService.shardStats(CommonStatsFlags.ALL); + + logger.debug("Exporting shards stats"); + for (StatsExporter e : exporters) { + try { + e.exportShardStats(shardStatsArray); + } catch (Throwable t) { + logger.error("StatsExporter [{}] has thrown an exception:", t, e.name()); + } + } + } + + private void exportNodeStats() { + logger.debug("Collecting node stats"); + NodeStats nodeStats = nodeService.stats(); + + logger.debug("Exporting node stats"); + for (StatsExporter e : exporters) { + try { + e.exportNodeStats(nodeStats); + } catch (Throwable t) { + logger.error("StatsExporter [{}] has thrown an exception:", t, e.name()); + } + } + } + } + + + class IndicesLifeCycleListener extends IndicesLifecycle.Listener { + @Override + public void afterIndexShardStarted(IndexShard indexShard) { + pendingAnnotationsQueue.add(new ShardEventAnnotation(System.currentTimeMillis(), ShardEventAnnotation.EventType.STARTED, + indexShard.shardId(), indexShard.routingEntry())); + + } + + @Override + public void beforeIndexShardCreated(ShardId shardId) { + pendingAnnotationsQueue.add(new ShardEventAnnotation(System.currentTimeMillis(), ShardEventAnnotation.EventType.CREATED, + shardId, null)); + } + + @Override + public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard) { + pendingAnnotationsQueue.add(new ShardEventAnnotation(System.currentTimeMillis(), ShardEventAnnotation.EventType.CLOSED, + indexShard.shardId(), indexShard.routingEntry())); + } } } diff --git a/src/main/java/org/elasticsearch/marvel/monitor/annotation/Annotation.java b/src/main/java/org/elasticsearch/marvel/monitor/annotation/Annotation.java new file mode 100644 index 00000000000..9d883db4ccc --- /dev/null +++ b/src/main/java/org/elasticsearch/marvel/monitor/annotation/Annotation.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.monitor.annotation; +import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.joda.time.format.DateTimeFormatter; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +public abstract class Annotation { + + public final static DateTimeFormatter datePrinter = Joda.forPattern("date_time").printer(); + + protected long timestamp; + + public Annotation(long timestamp) { + this.timestamp = timestamp; + } + + public long timestamp() { + return timestamp; + } + + /** + * @return annotation's type as a short string without spaces + */ + public abstract String type(); + + /** + * should return a short string based description of the annotation + */ + abstract String conciseDescription(); + + @Override + public String toString() { + return "[" + type() + "] annotation: [" + conciseDescription() + "]"; + } + + public XContentBuilder addXContentBody(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.field("@timestamp", datePrinter.print(timestamp)); + builder.field("message", conciseDescription()); + return builder; + } +} diff --git a/src/main/java/org/elasticsearch/marvel/monitor/annotation/ShardEventAnnotation.java b/src/main/java/org/elasticsearch/marvel/monitor/annotation/ShardEventAnnotation.java new file mode 100644 index 00000000000..a99ae5bfdbf --- /dev/null +++ b/src/main/java/org/elasticsearch/marvel/monitor/annotation/ShardEventAnnotation.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.monitor.annotation; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.shard.ShardId; + +import java.io.IOException; + +public class ShardEventAnnotation extends Annotation { + + private final ShardRouting shardRouting; + private final ShardId shardId; + private EventType event; + + public enum EventType { + CREATED, + STARTED, + CLOSED + } + + + public ShardEventAnnotation(long timestamp, EventType event, ShardId shardId, ShardRouting shardRouting) { + super(timestamp); + this.event = event; + this.shardId = shardId; + this.shardRouting = shardRouting; + } + + @Override + public String type() { + return "shard_event"; + } + + @Override + String conciseDescription() { + return "[" + event + "]" + (shardRouting != null ? shardRouting : shardId); + } + + @Override + public XContentBuilder addXContentBody(XContentBuilder builder, ToXContent.Params params) throws IOException { + super.addXContentBody(builder, params); + builder.field("event", event); + builder.field("index", shardId.index()); + builder.field("shard_id", shardId.id()); + if (shardRouting != null) { + builder.field("routing"); + shardRouting.toXContent(builder, params); + } + + return builder; + } +} diff --git a/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java b/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java index 62abc42fb82..65b9041527a 100644 --- a/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java +++ b/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java @@ -14,10 +14,9 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.collect.ImmutableMap; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.joda.time.DateTimeZone; +import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.joda.time.format.DateTimeFormat; import org.elasticsearch.common.joda.time.format.DateTimeFormatter; -import org.elasticsearch.common.joda.time.format.ISODateTimeFormat; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.network.NetworkUtils; @@ -30,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.smile.SmileXContent; import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.marvel.monitor.annotation.Annotation; import java.io.IOException; import java.io.InputStream; @@ -49,9 +49,10 @@ public class ESExporter extends AbstractLifecycleComponent implement final Discovery discovery; final String hostname; + // TODO: logger name is not good now. Figure out why. final ESLogger logger = ESLoggerFactory.getLogger(ESExporter.class.getName()); - public final static DateTimeFormatter defaultDatePrinter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC); + public final static DateTimeFormatter defaultDatePrinter = Joda.forPattern("date_time").printer(); boolean checkedForIndexTemplate = false; @@ -59,6 +60,7 @@ public class ESExporter extends AbstractLifecycleComponent implement final ShardStatsRenderer shardStatsRenderer; final IndexStatsRenderer indexStatsRenderer; final IndicesStatsRenderer indicesStatsRenderer; + final AnnotationsRenderer annotationsRenderer; public ESExporter(Settings settings, Discovery discovery) { super(settings); @@ -79,6 +81,7 @@ public class ESExporter extends AbstractLifecycleComponent implement shardStatsRenderer = new ShardStatsRenderer(); indexStatsRenderer = new IndexStatsRenderer(); indicesStatsRenderer = new IndicesStatsRenderer(); + annotationsRenderer = new AnnotationsRenderer(); logger.info("ESExporter initialized. Targets: {}, index prefix [{}], index time format [{}]", hosts, indexPrefix, indexTimeFormat); } @@ -92,13 +95,13 @@ public class ESExporter extends AbstractLifecycleComponent implement @Override public void exportNodeStats(NodeStats nodeStats) { nodeStatsRenderer.reset(nodeStats); - exportXContent("node_stats", nodeStatsRenderer); + exportXContent(nodeStatsRenderer); } @Override public void exportShardStats(ShardStats[] shardStatsArray) { shardStatsRenderer.reset(shardStatsArray); - exportXContent("shard_stats", shardStatsRenderer); + exportXContent(shardStatsRenderer); } @Override @@ -112,8 +115,8 @@ public class ESExporter extends AbstractLifecycleComponent implement return; } try { - addXContentRendererToConnection(conn, "index_stats", indexStatsRenderer); - addXContentRendererToConnection(conn, "indices_stats", indicesStatsRenderer); + addXContentRendererToConnection(conn, indexStatsRenderer); + addXContentRendererToConnection(conn, indicesStatsRenderer); sendCloseExportingConnection(conn); } catch (IOException e) { logger.error("error sending data", e); @@ -121,6 +124,13 @@ public class ESExporter extends AbstractLifecycleComponent implement } } + @Override + public void exportAnnotations(Annotation[] annotations) { + annotationsRenderer.reset(annotations); + exportXContent(annotationsRenderer); + } + + private HttpURLConnection openExportingConnection() { if (!checkedForIndexTemplate) { if (!checkForIndexTemplate()) { @@ -137,14 +147,14 @@ public class ESExporter extends AbstractLifecycleComponent implement return conn; } - private void addXContentRendererToConnection(HttpURLConnection conn, String type, + private void addXContentRendererToConnection(HttpURLConnection conn, MultiXContentRenderer renderer) throws IOException { OutputStream os = conn.getOutputStream(); // TODO: find a way to disable builder's substream flushing or something neat solution for (int i = 0; i < renderer.length(); i++) { XContentBuilder builder = XContentFactory.smileBuilder(os); builder.startObject().startObject("index") - .field("_index", getIndexName()).field("_type", type).endObject().endObject(); + .field("_index", getIndexName()).field("_type", renderer.type(i)).endObject().endObject(); builder.flush(); os.write(SmileXContent.smileXContent.streamSeparator()); @@ -168,18 +178,17 @@ public class ESExporter extends AbstractLifecycleComponent implement } } - private void exportXContent(String type, MultiXContentRenderer xContentRenderer) { + private void exportXContent(MultiXContentRenderer xContentRenderer) { if (xContentRenderer.length() == 0) { return; } - logger.debug("exporting {}", type); HttpURLConnection conn = openExportingConnection(); if (conn == null) { return; } try { - addXContentRendererToConnection(conn, type, xContentRenderer); + addXContentRendererToConnection(conn, xContentRenderer); sendCloseExportingConnection(conn); } catch (IOException e) { logger.error("error sending data", e); @@ -300,6 +309,8 @@ public class ESExporter extends AbstractLifecycleComponent implement int length(); + String type(int i); + void render(int index, XContentBuilder builder) throws IOException; } @@ -353,6 +364,11 @@ public class ESExporter extends AbstractLifecycleComponent implement return 1; } + @Override + public String type(int i) { + return "node_stats"; + } + @Override public void render(int index, XContentBuilder builder) throws IOException { builder.startObject(); @@ -379,6 +395,11 @@ public class ESExporter extends AbstractLifecycleComponent implement return stats == null ? 0 : stats.length; } + @Override + public String type(int i) { + return "shard_stats"; + } + @Override public void render(int index, XContentBuilder builder) throws IOException { builder.startObject(); @@ -410,6 +431,11 @@ public class ESExporter extends AbstractLifecycleComponent implement return stats == null ? 0 : stats.length; } + @Override + public String type(int i) { + return "index_stats"; + } + @Override public void render(int index, XContentBuilder builder) throws IOException { builder.startObject(); @@ -445,6 +471,11 @@ public class ESExporter extends AbstractLifecycleComponent implement return totalStats == null ? 0 : 1; } + @Override + public String type(int i) { + return "indices_stats"; + } + @Override public void render(int index, XContentBuilder builder) throws IOException { assert index == 0; @@ -461,5 +492,35 @@ public class ESExporter extends AbstractLifecycleComponent implement } } + + class AnnotationsRenderer implements MultiXContentRenderer { + + Annotation[] annotations; + ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS; + + public void reset(Annotation[] annotations) { + this.annotations = annotations; + } + + @Override + public int length() { + return annotations == null ? 0 : annotations.length; + } + + @Override + public String type(int i) { + return annotations[i].type(); + } + + + @Override + public void render(int index, XContentBuilder builder) throws IOException { + builder.startObject(); + addNodeInfo(builder, "node"); + annotations[index].addXContentBody(builder, xContentParams); + builder.endObject(); + } + } + } diff --git a/src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java b/src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java index 860d88dead2..ffec9d567f9 100644 --- a/src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java +++ b/src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.common.component.LifecycleComponent; +import org.elasticsearch.marvel.monitor.annotation.Annotation; import java.util.Map; @@ -23,4 +24,5 @@ public interface StatsExporter extends LifecycleComponent { void exportIndicesStats(IndicesStatsResponse indicesStats); + void exportAnnotations(Annotation[] annotations); } From 4dbfcc62cef4255b00a4c0949b0d37873ff3762e Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 5 Nov 2013 19:48:00 +0100 Subject: [PATCH 17/39] Move to standard ES v0.90.6 Original commit: elastic/x-pack-elasticsearch@544319733f8303b4f3856c21df969dd57483f6f7 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 3a30a6c15e1..d82dc2eae0e 100644 --- a/pom.xml +++ b/pom.xml @@ -25,7 +25,7 @@ - 0.90.6-SNAPSHOT + 0.90.6 From 4f001a9981de2ba753f3972a0f8cb51b32fb6a16 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 6 Nov 2013 18:18:54 +0100 Subject: [PATCH 18/39] Changed default index prefix to marvel- Original commit: elastic/x-pack-elasticsearch@45706d31a81ca290f2d57f40c7c29efccae323df --- .../org/elasticsearch/marvel/monitor/exporter/ESExporter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java b/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java index 65b9041527a..6dee939983f 100644 --- a/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java +++ b/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java @@ -71,7 +71,7 @@ public class ESExporter extends AbstractLifecycleComponent implement hosts = settings.getAsArray("hosts", new String[]{"localhost:9200"}); - indexPrefix = settings.get("index.prefix", "es_monitor"); + indexPrefix = settings.get("index.prefix", "marvel"); String indexTimeFormat = settings.get("index.timeformat", "YYYY.MM.dd"); indexTimeFormatter = DateTimeFormat.forPattern(indexTimeFormat); From 4e9c7357376e501fac6977e2a873de3f68c40c70 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 8 Nov 2013 00:24:40 +0100 Subject: [PATCH 19/39] index naming was not using utc time zone. Original commit: elastic/x-pack-elasticsearch@aca5d5bda67933ba06576425fb9eeae10b1a5b0a --- .../org/elasticsearch/marvel/monitor/exporter/ESExporter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java b/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java index 6dee939983f..3233f4a389e 100644 --- a/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java +++ b/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java @@ -73,7 +73,7 @@ public class ESExporter extends AbstractLifecycleComponent implement hosts = settings.getAsArray("hosts", new String[]{"localhost:9200"}); indexPrefix = settings.get("index.prefix", "marvel"); String indexTimeFormat = settings.get("index.timeformat", "YYYY.MM.dd"); - indexTimeFormatter = DateTimeFormat.forPattern(indexTimeFormat); + indexTimeFormatter = DateTimeFormat.forPattern(indexTimeFormat).withZoneUTC(); timeout = (int) settings.getAsTime("timeout", new TimeValue(6000)).seconds(); From 7a026e2f32ee1b88623825526395aa96993aed7e Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 13 Nov 2013 13:43:04 +0100 Subject: [PATCH 20/39] Metrics are now consistently use the field as an id. Added the option to edit field names. Added the option to add a custom metric. Also the node_stats panel now opens in the same page. Original commit: elastic/x-pack-elasticsearch@b8eac389db4813f6148da7da2100ece7e7c0cd1d --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index d82dc2eae0e..bb0a2ebb7d6 100644 --- a/pom.xml +++ b/pom.xml @@ -25,7 +25,7 @@ - 0.90.6 + 0.90.7-SNAPSHOT From 04876eff6cfe6d71e72f2e2db187b192e5c9271f Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 18 Nov 2013 21:36:21 +0100 Subject: [PATCH 21/39] Moved plugin to a subfolder Tweaked build system to allow serving both marvel and kibana content without building and without symlinks Original commit: elastic/x-pack-elasticsearch@837ad48e7f81e70bef880de31df40e71d874a001 --- .jshintrc | 34 ++ README.md | 44 ++ pom.xml | 135 ----- src/main/assemblies/plugin.xml | 17 - .../elasticsearch/marvel/monitor/Plugin.java | 51 -- .../marvel/monitor/StatsExportersService.java | 228 -------- .../marvel/monitor/annotation/Annotation.java | 48 -- .../annotation/ShardEventAnnotation.java | 57 -- .../marvel/monitor/exporter/ESExporter.java | 526 ------------------ .../monitor/exporter/StatsExporter.java | 28 - src/main/resources/es-plugin.properties | 1 - 11 files changed, 78 insertions(+), 1091 deletions(-) create mode 100644 .jshintrc delete mode 100644 pom.xml delete mode 100644 src/main/assemblies/plugin.xml delete mode 100644 src/main/java/org/elasticsearch/marvel/monitor/Plugin.java delete mode 100644 src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java delete mode 100644 src/main/java/org/elasticsearch/marvel/monitor/annotation/Annotation.java delete mode 100644 src/main/java/org/elasticsearch/marvel/monitor/annotation/ShardEventAnnotation.java delete mode 100644 src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java delete mode 100644 src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java delete mode 100644 src/main/resources/es-plugin.properties diff --git a/.jshintrc b/.jshintrc new file mode 100644 index 00000000000..54051651ff6 --- /dev/null +++ b/.jshintrc @@ -0,0 +1,34 @@ +{ + "browser": true, + + "bitwise":false, + "curly": true, + "eqnull": true, + "globalstrict": true, + "devel": true, + "eqeqeq": true, + "forin": false, + "immed": true, + "supernew": true, + "expr": true, + "indent": 2, + "latedef": true, + "newcap": true, + "noarg": true, + "noempty": true, + "undef": true, + "boss": true, + "trailing": false, + "laxbreak": true, + "laxcomma": true, + "sub": true, + "unused": true, + + "maxlen": 140, + + "globals": { + "define": true, + "require": true, + "Chromath": false + } +} \ No newline at end of file diff --git a/README.md b/README.md index e69de29bb2d..520cc63dc16 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,44 @@ + + +## Grunt build system +This grunt-based build system handles Kibana development environment setup for Marvel as well as building, packaging and distribution of the Marvel plugin. Note that you **must** run *grunt setup* before any other tasks as this build system reuses parts of the Kibana build system that must be fetched + +### Installing +You will need node.js+npm and grunt. Node is available via brew, install grunt with the command below. Once grunt is installed you may run grunt tasks to setup your environment and build Marvel + +```npm install -g grunt``` +```npm install``` + +### Tasks + +```grunt setup``` + +**Run this first.** It will download the right Kibana version to ./vendor/kibana, copies the appropriate config.js to the right place and make any symlinks needed for a proper marvel/kibana environment + +```grunt server``` + +Starts a web server on http://127.0.0.1:5601 pointing at the kibana directory, while also serving custom marvel panels + +```grunt jshint``` + +Lints code without building + +```grunt build``` + +Merges kibana and marvel code, builds Kibana and the plugin (via mvn) and puts them in ./build. + +```grunt package``` + +Zips and tar+gzips the build in ./packages. Includes grunt build + +```grunt release``` + +Uploads archives to download.elasticsearch.org/elasticsearch/marvel/marvel-VERSION.extention. Includes grunt build and grunt package. You will need S3 credentials in .aws-config.json. Format as so: + +``` +{ + "key":"MY_KEY_HERE", + "secret":"your/long/secret/string" +} + +``` \ No newline at end of file diff --git a/pom.xml b/pom.xml deleted file mode 100644 index bb0a2ebb7d6..00000000000 --- a/pom.xml +++ /dev/null @@ -1,135 +0,0 @@ - - - elasticsearch-marvel - 4.0.0 - org.elasticsearch - marvel - 0.1.0-SNAPSHOT - jar - Elasticsearch Marvel - 2013 - - - The Apache Software License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - - - - - org.sonatype.oss - oss-parent - 7 - - - - 0.90.7-SNAPSHOT - - - - - sonatype - http://oss.sonatype.org/content/repositories/releases/ - - - - - - org.elasticsearch - elasticsearch - ${elasticsearch.version} - compile - - - - log4j - log4j - 1.2.17 - runtime - true - - - - org.testng - testng - 6.8 - test - - - org.hamcrest - hamcrest-core - - - junit - junit - - - - - - org.hamcrest - hamcrest-all - 1.3 - test - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 2.3.2 - - 1.6 - 1.6 - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.11 - - - **/*Tests.java - - - - - org.apache.maven.plugins - maven-source-plugin - 2.1.2 - - - attach-sources - - jar - - - - - - maven-assembly-plugin - 2.3 - - false - ${project.build.directory}/releases/ - - ${basedir}/src/main/assemblies/plugin.xml - - - - - package - - single - - - - - - - \ No newline at end of file diff --git a/src/main/assemblies/plugin.xml b/src/main/assemblies/plugin.xml deleted file mode 100644 index 2ccfc875e9c..00000000000 --- a/src/main/assemblies/plugin.xml +++ /dev/null @@ -1,17 +0,0 @@ - - plugin - - zip - - false - - - / - true - true - - org.elasticsearch:elasticsearch - - - - \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/marvel/monitor/Plugin.java b/src/main/java/org/elasticsearch/marvel/monitor/Plugin.java deleted file mode 100644 index 171d609a90e..00000000000 --- a/src/main/java/org/elasticsearch/marvel/monitor/Plugin.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.marvel.monitor; - -import org.elasticsearch.common.collect.ImmutableList; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugins.AbstractPlugin; - -import java.util.ArrayList; -import java.util.Collection; - -public class Plugin extends AbstractPlugin { - - public Plugin() { - } - - @Override - public String name() { - return "Elasticsearch marvel - monitor"; - } - - @Override - public String description() { - return "Monitoring with an elastic sauce"; - } - - @Override - public Collection modules(Settings settings) { - Module m = new AbstractModule() { - - @Override - protected void configure() { - bind(StatsExportersService.class).asEagerSingleton(); - } - }; - return ImmutableList.of(m); - } - - @Override - public Collection> services() { - Collection> l = new ArrayList>(); - l.add(StatsExportersService.class); - return l; - } -} diff --git a/src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java b/src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java deleted file mode 100644 index 0c3fdaeb030..00000000000 --- a/src/main/java/org/elasticsearch/marvel/monitor/StatsExportersService.java +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.marvel.monitor; -import org.elasticsearch.ElasticSearchException; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.ImmutableSet; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.discovery.Discovery; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.service.IndexShard; -import org.elasticsearch.indices.IndicesLifecycle; -import org.elasticsearch.marvel.monitor.annotation.Annotation; -import org.elasticsearch.marvel.monitor.annotation.ShardEventAnnotation; -import org.elasticsearch.marvel.monitor.exporter.ESExporter; -import org.elasticsearch.marvel.monitor.exporter.StatsExporter; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.indices.InternalIndicesService; -import org.elasticsearch.node.service.NodeService; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingDeque; - -public class StatsExportersService extends AbstractLifecycleComponent { - - private final InternalIndicesService indicesService; - private final NodeService nodeService; - private final ClusterService clusterService; - private final Client client; - - private final IndicesLifecycle.Listener indicesLifeCycleListener; - - private volatile ExportingWorker exp; - private volatile Thread thread; - private final TimeValue interval; - - private Collection exporters; - - private final BlockingQueue pendingAnnotationsQueue; - - @Inject - public StatsExportersService(Settings settings, IndicesService indicesService, - NodeService nodeService, ClusterService clusterService, - Client client, - Discovery discovery) { - super(settings); - this.indicesService = (InternalIndicesService) indicesService; - this.clusterService = clusterService; - this.nodeService = nodeService; - this.interval = componentSettings.getAsTime("interval", TimeValue.timeValueSeconds(5)); - this.client = client; - - StatsExporter esExporter = new ESExporter(settings.getComponentSettings(ESExporter.class), discovery); - this.exporters = ImmutableSet.of(esExporter); - - indicesLifeCycleListener = new IndicesLifeCycleListener(); - pendingAnnotationsQueue = ConcurrentCollections.newBlockingQueue(); - } - - @Override - protected void doStart() throws ElasticSearchException { - for (StatsExporter e : exporters) - e.start(); - - this.exp = new ExportingWorker(); - this.thread = new Thread(exp, EsExecutors.threadName(settings, "monitor")); - this.thread.setDaemon(true); - this.thread.start(); - - indicesService.indicesLifecycle().addListener(indicesLifeCycleListener); - } - - @Override - protected void doStop() throws ElasticSearchException { - this.exp.closed = true; - this.thread.interrupt(); - try { - this.thread.join(60000); - } catch (InterruptedException e) { - // we don't care... - } - for (StatsExporter e : exporters) - e.stop(); - - indicesService.indicesLifecycle().removeListener(indicesLifeCycleListener); - - } - - @Override - protected void doClose() throws ElasticSearchException { - for (StatsExporter e : exporters) - e.close(); - } - - class ExportingWorker implements Runnable { - - volatile boolean closed; - - @Override - public void run() { - while (!closed) { - // sleep first to allow node to complete initialization before collecting the first start - try { - Thread.sleep(interval.millis()); - } catch (InterruptedException e) { - // ignore, if closed, good.... - } - - // do the actual export..., go over the actual exporters list and... - try { - exportNodeStats(); - - exportShardStats(); - - exportAnnotations(); - - if (clusterService.state().nodes().localNodeMaster()) { - exportIndicesStats(); - } - } catch (Throwable t) { - logger.error("Background thread had an uncaught exception:", t); - } - - } - - logger.debug("shutting down worker, exporting pending annotation"); - exportAnnotations(); - - logger.debug("worker shutdown"); - } - - private void exportIndicesStats() { - logger.debug("local node is master, exporting aggregated stats"); - IndicesStatsResponse indicesStatsResponse = client.admin().indices().prepareStats().all().get(); - for (StatsExporter e : exporters) { - try { - e.exportIndicesStats(indicesStatsResponse); - } catch (Throwable t) { - logger.error("StatsExporter [{}] has thrown an exception:", t, e.name()); - } - - - } - } - - private void exportAnnotations() { - logger.debug("Exporting annotations"); - ArrayList annotationsList = new ArrayList(pendingAnnotationsQueue.size()); - pendingAnnotationsQueue.drainTo(annotationsList); - Annotation[] annotations = new Annotation[annotationsList.size()]; - annotationsList.toArray(annotations); - - for (StatsExporter e : exporters) { - try { - e.exportAnnotations(annotations); - } catch (Throwable t) { - logger.error("StatsExporter [{}] has thrown an exception:", t, e.name()); - } - } - } - - private void exportShardStats() { - logger.debug("Collecting shard stats"); - ShardStats[] shardStatsArray = indicesService.shardStats(CommonStatsFlags.ALL); - - logger.debug("Exporting shards stats"); - for (StatsExporter e : exporters) { - try { - e.exportShardStats(shardStatsArray); - } catch (Throwable t) { - logger.error("StatsExporter [{}] has thrown an exception:", t, e.name()); - } - } - } - - private void exportNodeStats() { - logger.debug("Collecting node stats"); - NodeStats nodeStats = nodeService.stats(); - - logger.debug("Exporting node stats"); - for (StatsExporter e : exporters) { - try { - e.exportNodeStats(nodeStats); - } catch (Throwable t) { - logger.error("StatsExporter [{}] has thrown an exception:", t, e.name()); - } - } - } - } - - - class IndicesLifeCycleListener extends IndicesLifecycle.Listener { - @Override - public void afterIndexShardStarted(IndexShard indexShard) { - pendingAnnotationsQueue.add(new ShardEventAnnotation(System.currentTimeMillis(), ShardEventAnnotation.EventType.STARTED, - indexShard.shardId(), indexShard.routingEntry())); - - } - - @Override - public void beforeIndexShardCreated(ShardId shardId) { - pendingAnnotationsQueue.add(new ShardEventAnnotation(System.currentTimeMillis(), ShardEventAnnotation.EventType.CREATED, - shardId, null)); - } - - @Override - public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard) { - pendingAnnotationsQueue.add(new ShardEventAnnotation(System.currentTimeMillis(), ShardEventAnnotation.EventType.CLOSED, - indexShard.shardId(), indexShard.routingEntry())); - - } - } -} diff --git a/src/main/java/org/elasticsearch/marvel/monitor/annotation/Annotation.java b/src/main/java/org/elasticsearch/marvel/monitor/annotation/Annotation.java deleted file mode 100644 index 9d883db4ccc..00000000000 --- a/src/main/java/org/elasticsearch/marvel/monitor/annotation/Annotation.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.marvel.monitor.annotation; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.joda.time.format.DateTimeFormatter; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - -public abstract class Annotation { - - public final static DateTimeFormatter datePrinter = Joda.forPattern("date_time").printer(); - - protected long timestamp; - - public Annotation(long timestamp) { - this.timestamp = timestamp; - } - - public long timestamp() { - return timestamp; - } - - /** - * @return annotation's type as a short string without spaces - */ - public abstract String type(); - - /** - * should return a short string based description of the annotation - */ - abstract String conciseDescription(); - - @Override - public String toString() { - return "[" + type() + "] annotation: [" + conciseDescription() + "]"; - } - - public XContentBuilder addXContentBody(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.field("@timestamp", datePrinter.print(timestamp)); - builder.field("message", conciseDescription()); - return builder; - } -} diff --git a/src/main/java/org/elasticsearch/marvel/monitor/annotation/ShardEventAnnotation.java b/src/main/java/org/elasticsearch/marvel/monitor/annotation/ShardEventAnnotation.java deleted file mode 100644 index a99ae5bfdbf..00000000000 --- a/src/main/java/org/elasticsearch/marvel/monitor/annotation/ShardEventAnnotation.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.marvel.monitor.annotation; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; - -public class ShardEventAnnotation extends Annotation { - - private final ShardRouting shardRouting; - private final ShardId shardId; - private EventType event; - - public enum EventType { - CREATED, - STARTED, - CLOSED - } - - - public ShardEventAnnotation(long timestamp, EventType event, ShardId shardId, ShardRouting shardRouting) { - super(timestamp); - this.event = event; - this.shardId = shardId; - this.shardRouting = shardRouting; - } - - @Override - public String type() { - return "shard_event"; - } - - @Override - String conciseDescription() { - return "[" + event + "]" + (shardRouting != null ? shardRouting : shardId); - } - - @Override - public XContentBuilder addXContentBody(XContentBuilder builder, ToXContent.Params params) throws IOException { - super.addXContentBody(builder, params); - builder.field("event", event); - builder.field("index", shardId.index()); - builder.field("shard_id", shardId.id()); - if (shardRouting != null) { - builder.field("routing"); - shardRouting.toXContent(builder, params); - } - - return builder; - } -} diff --git a/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java b/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java deleted file mode 100644 index 3233f4a389e..00000000000 --- a/src/main/java/org/elasticsearch/marvel/monitor/exporter/ESExporter.java +++ /dev/null @@ -1,526 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.marvel.monitor.exporter; -import org.elasticsearch.ElasticSearchException; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.indices.stats.CommonStats; -import org.elasticsearch.action.admin.indices.stats.IndexStats; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.common.collect.ImmutableMap; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.joda.time.format.DateTimeFormat; -import org.elasticsearch.common.joda.time.format.DateTimeFormatter; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; -import org.elasticsearch.common.network.NetworkUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.common.xcontent.smile.SmileXContent; -import org.elasticsearch.discovery.Discovery; -import org.elasticsearch.marvel.monitor.annotation.Annotation; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.net.HttpURLConnection; -import java.net.InetAddress; -import java.net.URL; -import java.util.Map; - -public class ESExporter extends AbstractLifecycleComponent implements StatsExporter { - - final String[] hosts; - final String indexPrefix; - final DateTimeFormatter indexTimeFormatter; - final int timeout; - - final Discovery discovery; - final String hostname; - - // TODO: logger name is not good now. Figure out why. - final ESLogger logger = ESLoggerFactory.getLogger(ESExporter.class.getName()); - - public final static DateTimeFormatter defaultDatePrinter = Joda.forPattern("date_time").printer(); - - boolean checkedForIndexTemplate = false; - - final NodeStatsRenderer nodeStatsRenderer; - final ShardStatsRenderer shardStatsRenderer; - final IndexStatsRenderer indexStatsRenderer; - final IndicesStatsRenderer indicesStatsRenderer; - final AnnotationsRenderer annotationsRenderer; - - public ESExporter(Settings settings, Discovery discovery) { - super(settings); - - this.discovery = discovery; - InetAddress address = NetworkUtils.getLocalAddress(); - this.hostname = address == null ? null : address.getHostName(); - - - hosts = settings.getAsArray("hosts", new String[]{"localhost:9200"}); - indexPrefix = settings.get("index.prefix", "marvel"); - String indexTimeFormat = settings.get("index.timeformat", "YYYY.MM.dd"); - indexTimeFormatter = DateTimeFormat.forPattern(indexTimeFormat).withZoneUTC(); - - timeout = (int) settings.getAsTime("timeout", new TimeValue(6000)).seconds(); - - nodeStatsRenderer = new NodeStatsRenderer(); - shardStatsRenderer = new ShardStatsRenderer(); - indexStatsRenderer = new IndexStatsRenderer(); - indicesStatsRenderer = new IndicesStatsRenderer(); - annotationsRenderer = new AnnotationsRenderer(); - - logger.info("ESExporter initialized. Targets: {}, index prefix [{}], index time format [{}]", hosts, indexPrefix, indexTimeFormat); - } - - @Override - public String name() { - return "ESExporter"; - } - - - @Override - public void exportNodeStats(NodeStats nodeStats) { - nodeStatsRenderer.reset(nodeStats); - exportXContent(nodeStatsRenderer); - } - - @Override - public void exportShardStats(ShardStats[] shardStatsArray) { - shardStatsRenderer.reset(shardStatsArray); - exportXContent(shardStatsRenderer); - } - - @Override - public void exportIndicesStats(IndicesStatsResponse indicesStats) { - Map perIndexStats = indicesStats.getIndices(); - indexStatsRenderer.reset(perIndexStats.values().toArray(new IndexStats[perIndexStats.size()])); - indicesStatsRenderer.reset(indicesStats.getTotal(), indicesStats.getPrimaries()); - logger.debug("exporting index_stats + indices_stats"); - HttpURLConnection conn = openExportingConnection(); - if (conn == null) { - return; - } - try { - addXContentRendererToConnection(conn, indexStatsRenderer); - addXContentRendererToConnection(conn, indicesStatsRenderer); - sendCloseExportingConnection(conn); - } catch (IOException e) { - logger.error("error sending data", e); - return; - } - } - - @Override - public void exportAnnotations(Annotation[] annotations) { - annotationsRenderer.reset(annotations); - exportXContent(annotationsRenderer); - } - - - private HttpURLConnection openExportingConnection() { - if (!checkedForIndexTemplate) { - if (!checkForIndexTemplate()) { - logger.debug("no template defined yet. skipping"); - return null; - } - } - - logger.trace("setting up an export connection"); - HttpURLConnection conn = openConnection("POST", "/_bulk", XContentType.SMILE.restContentType()); - if (conn == null) { - logger.error("could not connect to any configured elasticsearch instances: [{}]", hosts); - } - return conn; - } - - private void addXContentRendererToConnection(HttpURLConnection conn, - MultiXContentRenderer renderer) throws IOException { - OutputStream os = conn.getOutputStream(); - // TODO: find a way to disable builder's substream flushing or something neat solution - for (int i = 0; i < renderer.length(); i++) { - XContentBuilder builder = XContentFactory.smileBuilder(os); - builder.startObject().startObject("index") - .field("_index", getIndexName()).field("_type", renderer.type(i)).endObject().endObject(); - builder.flush(); - os.write(SmileXContent.smileXContent.streamSeparator()); - - builder = XContentFactory.smileBuilder(os); - builder.humanReadable(false); - renderer.render(i, builder); - builder.flush(); - os.write(SmileXContent.smileXContent.streamSeparator()); - } - } - - private void sendCloseExportingConnection(HttpURLConnection conn) throws IOException { - logger.trace("sending exporting content"); - OutputStream os = conn.getOutputStream(); - os.close(); - - if (conn.getResponseCode() != 200) { - logConnectionError("remote target didn't respond with 200 OK", conn); - } else { - conn.getInputStream().close(); // close and release to connection pool. - } - } - - private void exportXContent(MultiXContentRenderer xContentRenderer) { - if (xContentRenderer.length() == 0) { - return; - } - - HttpURLConnection conn = openExportingConnection(); - if (conn == null) { - return; - } - try { - addXContentRendererToConnection(conn, xContentRenderer); - sendCloseExportingConnection(conn); - } catch (IOException e) { - logger.error("error sending data", e); - return; - } - - } - - @Override - protected void doStart() throws ElasticSearchException { - } - - - @Override - protected void doStop() throws ElasticSearchException { - } - - @Override - protected void doClose() throws ElasticSearchException { - } - - - private String getIndexName() { - return indexPrefix + "-" + indexTimeFormatter.print(System.currentTimeMillis()); - - } - - - private HttpURLConnection openConnection(String method, String uri) { - return openConnection(method, uri, null); - } - - private HttpURLConnection openConnection(String method, String uri, String contentType) { - for (String host : hosts) { - try { - URL templateUrl = new URL("http://" + host + uri); - HttpURLConnection conn = (HttpURLConnection) templateUrl.openConnection(); - conn.setRequestMethod(method); - conn.setConnectTimeout(timeout); - if (contentType != null) { - conn.setRequestProperty("Content-Type", XContentType.SMILE.restContentType()); - } - conn.setUseCaches(false); - if (method.equalsIgnoreCase("POST") || method.equalsIgnoreCase("PUT")) { - conn.setDoOutput(true); - } - conn.connect(); - - return conn; - } catch (IOException e) { - logger.error("error connecting to [{}]", e, host); - } - } - - return null; - } - - private boolean checkForIndexTemplate() { - try { - - - String templateName = "marvel.monitor." + indexPrefix; - - logger.debug("checking of target has template [{}]", templateName); - // DO HEAD REQUEST, when elasticsearch supports it - HttpURLConnection conn = openConnection("GET", "/_template/" + templateName); - if (conn == null) { - logger.error("Could not connect to any configured elasticsearch instances: [{}]", hosts); - return false; - } - - boolean hasTemplate = conn.getResponseCode() == 200; - - // nothing there, lets create it - if (!hasTemplate) { - logger.debug("no template found in elasticsearch for [{}]. Adding...", templateName); - conn = openConnection("PUT", "/_template/" + templateName, XContentType.SMILE.restContentType()); - OutputStream os = conn.getOutputStream(); - XContentBuilder builder = XContentFactory.smileBuilder(os); - builder.startObject(); - builder.field("template", indexPrefix + "*"); - builder.startObject("mappings").startObject("_default_"); - builder.startArray("dynamic_templates").startObject().startObject("string_fields") - .field("match", "*") - .field("match_mapping_type", "string") - .startObject("mapping").field("index", "not_analyzed").endObject() - .endObject().endObject().endArray(); - builder.endObject().endObject(); // mapping + root object. - builder.close(); - os.close(); - - if (conn.getResponseCode() != 200) { - logConnectionError("error adding index template to elasticsearch", conn); - } - conn.getInputStream().close(); // close and release to connection pool. - - } - checkedForIndexTemplate = true; - } catch (IOException e) { - logger.error("error when checking/adding template to elasticsearch", e); - return false; - } - return true; - } - - private void logConnectionError(String msg, HttpURLConnection conn) { - InputStream inputStream = conn.getErrorStream(); - java.util.Scanner s = new java.util.Scanner(inputStream, "UTF-8").useDelimiter("\\A"); - String err = s.hasNext() ? s.next() : ""; - try { - logger.error("{} response code [{} {}]. content: {}", msg, conn.getResponseCode(), conn.getResponseMessage(), err); - } catch (IOException e) { - logger.error("connection had an error while reporting the error. tough life."); - } - } - - interface MultiXContentRenderer { - - int length(); - - String type(int i); - - void render(int index, XContentBuilder builder) throws IOException; - } - - - private void addNodeInfo(XContentBuilder builder) throws IOException { - addNodeInfo(builder, "node"); - } - - private void addNodeInfo(XContentBuilder builder, String fieldname) throws IOException { - builder.startObject(fieldname); - DiscoveryNode node = discovery.localNode(); - builder.field("id", node.id()); - builder.field("name", node.name()); - builder.field("transport_address", node.address()); - - if (node.address().uniqueAddressTypeId() == 1) { // InetSocket - InetSocketTransportAddress address = (InetSocketTransportAddress) node.address(); - InetAddress inetAddress = address.address().getAddress(); - if (inetAddress != null) { - builder.field("ip", inetAddress.getHostAddress()); - } - } - - if (hostname != null) { - builder.field("hostname", hostname); - } - - if (!node.attributes().isEmpty()) { - builder.startObject("attributes"); - for (Map.Entry attr : node.attributes().entrySet()) { - builder.field(attr.getKey(), attr.getValue()); - } - builder.endObject(); - } - builder.endObject(); - } - - - class NodeStatsRenderer implements MultiXContentRenderer { - - NodeStats stats; - ToXContent.MapParams xContentParams = new ToXContent.MapParams( - ImmutableMap.of("node_info_format", "none", "load_average_format", "hash")); - - public void reset(NodeStats stats) { - this.stats = stats; - } - - @Override - public int length() { - return 1; - } - - @Override - public String type(int i) { - return "node_stats"; - } - - @Override - public void render(int index, XContentBuilder builder) throws IOException { - builder.startObject(); - builder.field("@timestamp", defaultDatePrinter.print(stats.getTimestamp())); - addNodeInfo(builder); - stats.toXContent(builder, xContentParams); - builder.endObject(); - } - } - - class ShardStatsRenderer implements MultiXContentRenderer { - - ShardStats[] stats; - long collectionTime; - ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS; - - public void reset(ShardStats[] stats) { - this.stats = stats; - collectionTime = System.currentTimeMillis(); - } - - @Override - public int length() { - return stats == null ? 0 : stats.length; - } - - @Override - public String type(int i) { - return "shard_stats"; - } - - @Override - public void render(int index, XContentBuilder builder) throws IOException { - builder.startObject(); - builder.field("@timestamp", defaultDatePrinter.print(collectionTime)); - ShardRouting shardRouting = stats[index].getShardRouting(); - builder.field("index", shardRouting.index()); - builder.field("shard_id", shardRouting.id()); - builder.field("shard_state", shardRouting.state()); - builder.field("primary", shardRouting.primary()); - addNodeInfo(builder); - stats[index].getStats().toXContent(builder, xContentParams); - builder.endObject(); - } - } - - class IndexStatsRenderer implements MultiXContentRenderer { - - IndexStats[] stats; - long collectionTime; - ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS; - - public void reset(IndexStats[] stats) { - this.stats = stats; - collectionTime = System.currentTimeMillis(); - } - - @Override - public int length() { - return stats == null ? 0 : stats.length; - } - - @Override - public String type(int i) { - return "index_stats"; - } - - @Override - public void render(int index, XContentBuilder builder) throws IOException { - builder.startObject(); - builder.field("@timestamp", defaultDatePrinter.print(collectionTime)); - IndexStats indexStats = stats[index]; - builder.field("index", indexStats.getIndex()); - addNodeInfo(builder, "_source_node"); - builder.startObject("primaries"); - indexStats.getPrimaries().toXContent(builder, xContentParams); - builder.endObject(); - builder.startObject("total"); - indexStats.getTotal().toXContent(builder, xContentParams); - builder.endObject(); - builder.endObject(); - } - } - - class IndicesStatsRenderer implements MultiXContentRenderer { - - CommonStats totalStats; - CommonStats primariesStats; - long collectionTime; - ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS; - - public void reset(CommonStats totalStats, CommonStats primariesStats) { - this.totalStats = totalStats; - this.primariesStats = primariesStats; - collectionTime = System.currentTimeMillis(); - } - - @Override - public int length() { - return totalStats == null ? 0 : 1; - } - - @Override - public String type(int i) { - return "indices_stats"; - } - - @Override - public void render(int index, XContentBuilder builder) throws IOException { - assert index == 0; - builder.startObject(); - builder.field("@timestamp", defaultDatePrinter.print(collectionTime)); - addNodeInfo(builder, "_source_node"); - builder.startObject("primaries"); - primariesStats.toXContent(builder, xContentParams); - builder.endObject(); - builder.startObject("total"); - totalStats.toXContent(builder, xContentParams); - builder.endObject(); - builder.endObject(); - } - } - - - class AnnotationsRenderer implements MultiXContentRenderer { - - Annotation[] annotations; - ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS; - - public void reset(Annotation[] annotations) { - this.annotations = annotations; - } - - @Override - public int length() { - return annotations == null ? 0 : annotations.length; - } - - @Override - public String type(int i) { - return annotations[i].type(); - } - - - @Override - public void render(int index, XContentBuilder builder) throws IOException { - builder.startObject(); - addNodeInfo(builder, "node"); - annotations[index].addXContentBody(builder, xContentParams); - builder.endObject(); - } - } - -} - diff --git a/src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java b/src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java deleted file mode 100644 index ffec9d567f9..00000000000 --- a/src/main/java/org/elasticsearch/marvel/monitor/exporter/StatsExporter.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.marvel.monitor.exporter; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.indices.stats.CommonStats; -import org.elasticsearch.action.admin.indices.stats.IndexStats; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.marvel.monitor.annotation.Annotation; - -import java.util.Map; - -public interface StatsExporter extends LifecycleComponent { - - String name(); - - void exportNodeStats(NodeStats nodeStats); - - void exportShardStats(ShardStats[] shardStatsArray); - - void exportIndicesStats(IndicesStatsResponse indicesStats); - - void exportAnnotations(Annotation[] annotations); -} diff --git a/src/main/resources/es-plugin.properties b/src/main/resources/es-plugin.properties deleted file mode 100644 index 72b2c0afd39..00000000000 --- a/src/main/resources/es-plugin.properties +++ /dev/null @@ -1 +0,0 @@ -plugin=org.elasticsearch.marvel.monitor.Plugin \ No newline at end of file From 8dd7be641f2a0544211cc5bb780da163a20d2e46 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Thu, 21 Nov 2013 15:03:20 +0100 Subject: [PATCH 22/39] Added --es_port and --port options to the server command. Original commit: elastic/x-pack-elasticsearch@e83a9de8c98bac2fdd6fb1d6fbedf34314c8d5d6 --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 520cc63dc16..ee4070ea449 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,9 @@ You will need node.js+npm and grunt. Node is available via brew, install grunt w ```grunt server``` -Starts a web server on http://127.0.0.1:5601 pointing at the kibana directory, while also serving custom marvel panels +Starts a web server on http://127.0.0.1:5601 pointing at the kibana directory, while also serving custom marvel panels. + +You can use `grunt server --port=5601 --es_host=9200` to control the ports used for kibana and the elasticsearch port used. ```grunt jshint``` From 38f9ff6064342e79b1f6ead9d67f8c21d2ba1eb2 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Thu, 21 Nov 2013 22:55:01 +0100 Subject: [PATCH 23/39] Release command doesn't repackage but release already built (and tested) packages Original commit: elastic/x-pack-elasticsearch@df6e333050dab7f3cb6ed2eacf63710aeb022404 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ee4070ea449..9395c8d7b51 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ Zips and tar+gzips the build in ./packages. Includes grunt build ```grunt release``` -Uploads archives to download.elasticsearch.org/elasticsearch/marvel/marvel-VERSION.extention. Includes grunt build and grunt package. You will need S3 credentials in .aws-config.json. Format as so: +Uploads created archives to download.elasticsearch.org/elasticsearch/marvel/marvel-VERSION.extention. You will need S3 credentials in .aws-config.json. Format as so: ``` { From 52801c99b356180bff3ad308d0598ca4392bf832 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 7 Jan 2014 23:29:57 +0100 Subject: [PATCH 24/39] Added plugin installation instructions Original commit: elastic/x-pack-elasticsearch@55b7cd933149d5118ef1212949ab3ab32f49bdb2 --- README.md | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 9395c8d7b51..4ceb7c55b30 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,16 @@ +# Installing the last release of Marvel + +The easiest way to play/get to know Marvel is to install the latest release version of it. To do so, just run the following command on every node on your cluster (restart node for it to have effect): + +``` +./bin/plugin -i elasticsearch/marvel/latest +``` + +Once done, open up the following url (assuming standard ES config): http://localhost:9200/_plugin/marvel . This will take you to the Overview Dashboard. Use Kibana's Load dashboard menu to navigate to the Cluster Pulse dashboard + -## Grunt build system +## Grunt build system (for running the UI from a code checkout) This grunt-based build system handles Kibana development environment setup for Marvel as well as building, packaging and distribution of the Marvel plugin. Note that you **must** run *grunt setup* before any other tasks as this build system reuses parts of the Kibana build system that must be fetched ### Installing @@ -43,4 +53,4 @@ Uploads created archives to download.elasticsearch.org/elasticsearch/marvel/marv "secret":"your/long/secret/string" } -``` \ No newline at end of file +``` From e23982fb2b51880541ee6dccedf1416d0d4d3c06 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 21 Jan 2014 14:44:40 +0100 Subject: [PATCH 25/39] Moved kibana related code to a kibana folder Original commit: elastic/x-pack-elasticsearch@a0e103e1e37ee5f1e8ee76a96557d58e268c97c4 --- .jshintrc | 34 ---------------------------------- 1 file changed, 34 deletions(-) delete mode 100644 .jshintrc diff --git a/.jshintrc b/.jshintrc deleted file mode 100644 index 54051651ff6..00000000000 --- a/.jshintrc +++ /dev/null @@ -1,34 +0,0 @@ -{ - "browser": true, - - "bitwise":false, - "curly": true, - "eqnull": true, - "globalstrict": true, - "devel": true, - "eqeqeq": true, - "forin": false, - "immed": true, - "supernew": true, - "expr": true, - "indent": 2, - "latedef": true, - "newcap": true, - "noarg": true, - "noempty": true, - "undef": true, - "boss": true, - "trailing": false, - "laxbreak": true, - "laxcomma": true, - "sub": true, - "unused": true, - - "maxlen": 140, - - "globals": { - "define": true, - "require": true, - "Chromath": false - } -} \ No newline at end of file From ab4b11caf8166052c2c9682b0f49d5cb63666098 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 27 Jan 2014 21:30:23 +0100 Subject: [PATCH 26/39] Removed CONTRIBUTING file and updated license Original commit: elastic/x-pack-elasticsearch@70a8079d3b71f9d4f97e8f2b8a5f7da20def25d9 --- CONTRIBUTING.md | 98 ------------------------------------------------- 1 file changed, 98 deletions(-) delete mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 56a9dafbf09..00000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,98 +0,0 @@ -Contributing to elasticsearch -============================= - -Elasticsearch is an open source project and we love to receive contributions from our community — you! There are many ways to contribute, from writing tutorials or blog posts, improving the documentation, submitting bug reports and feature requests or writing code which can be incorporated into Elasticsearch itself. - -Bug reports ------------ - -If you think you have found a bug in Elasticsearch, first make sure that you are testing against the [latest version of Elasticsearch](http://www.elasticsearch.org/download/) - your issue may already have been fixed. If not, search our [issues list](https://github.com/elasticsearch/elasticsearch/issues) on GitHub in case a similar issue has already been opened. - -It is very helpful if you can prepare a reproduction of the bug. In other words, provide a small test case which we can run to confirm your bug. It makes it easier to find the problem and to fix it. Test cases should be provided as `curl` commands which we can copy and paste into a terminal to run it locally, for example: - -```sh -# delete the index -curl -XDELETE localhost:9200/test - -# insert a document -curl -XPUT localhost:9200/test/test/1 -d '{ - "title": "test document" -}' - -# this should return XXXX but instead returns YYY -curl .... -``` - -Provide as much information as you can. You may think that the problem lies with your query, when actually it depends on how your data is indexed. The easier it is for us to recreate your problem, the faster it is likely to be fixed. - -Feature requests ----------------- - -If you find yourself wishing for a feature that doesn't exist in Elasticsearch, you are probably not alone. There are bound to be others out there with similar needs. Many of the features that Elasticsearch has today have been added because our users saw the need. -Open an issue on our [issues list](https://github.com/elasticsearch/elasticsearch/issues) on GitHub which describes the feature you would like to see, why you need it, and how it should work. - -Contributing code and documentation changes -------------------------------------------- - -If you have a bugfix or new feature that you would like to contribute to Elasticsearch, please find or open an issue about it first. Talk about what you would like to do. It may be that somebody is already working on it, or that there are particular issues that you should know about before implementing the change. - -We enjoy working with contributors to get their code accepted. There are many approaches to fixing a problem and it is important to find the best approach before writing too much code. - -The process for contributing to any of the [Elasticsearch repositories](https://github.com/elasticsearch/) is similar. Details for individual projects can be found below. - -### Fork and clone the repository - -You will need to fork the main Elasticsearch code or documentation repository and clone it to your local machine. See -[github help page](https://help.github.com/articles/fork-a-repo) for help. - -Further instructions for specific projects are given below. - -### Submitting your changes - -Once your changes and tests are ready to submit for review: - -1. Test your changes -Run the test suite to make sure that nothing is broken. - -2. Sign the Contributor License Agreement -Please make sure you have signed our [Contributor License Agreement](http://www.elasticsearch.org/contributor-agreement/). We are not asking you to assign copyright to us, but to give us the right to distribute your code without restriction. We ask this of all contributors in order to assure our users of the origin and continuing existence of the code. You only need to sign the CLA once. - -3. Rebase your changes -Update your local repository with the most recent code from the main Elasticsearch repository, and rebase your branch on top of the latest master branch. We prefer your changes to be squashed into a single commit. - -4. Submit a pull request -Push your local changes to your forked copy of the repository and [submit a pull request](https://help.github.com/articles/using-pull-requests). In the pull request, describe what your changes do and mention the number of the issue where discussion has taken place, eg "Closes #123". - -Then sit back and wait. There will probably be discussion about the pull request and, if any changes are needed, we would love to work with you to get your pull request merged into Elasticsearch. - - -Contributing to the Elasticsearch plugin ----------------------------------------- - -**Repository:** [https://github.com/elasticsearch/elasticsearch-dash](https://github.com/elasticsearch/elasticsearch-dash) - -Make sure you have [Maven](http://maven.apache.org) installed, as Elasticsearch uses it as its build system. Integration with IntelliJ and Eclipse should work out of the box. Eclipse users can automatically configure their IDE by running `mvn eclipse:eclipse` and then importing the project into their workspace: `File > Import > Existing project into workspace`. - -Please follow these formatting guidelines: - -* Java indent is 4 spaces -* Line width is 140 characters -* The rest is left to Java coding standards -* Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do. - -To create a distribution from the source, simply run: - -```sh -cd elasticsearch-dash/ -mvn clean package -DskipTests -``` - -You will find the newly built packages under: `./target/releases/`. - -Before submitting your changes, run the test suite to make sure that nothing is broken, with: - -```sh -mvn clean test -``` - -Source: [Contributing to elasticsearch](http://www.elasticsearch.org/contributing-to-elasticsearch/) From 0d34d6cbef51373193578f4d1d8f9dc024bc7c24 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Thu, 6 Feb 2014 11:07:36 +0100 Subject: [PATCH 27/39] Updated readme file Original commit: elastic/x-pack-elasticsearch@e1879dbc24585542134cbb99771621fdb0a343aa --- README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 4ceb7c55b30..f3b51151660 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,3 @@ - # Installing the last release of Marvel The easiest way to play/get to know Marvel is to install the latest release version of it. To do so, just run the following command on every node on your cluster (restart node for it to have effect): @@ -41,7 +40,7 @@ Merges kibana and marvel code, builds Kibana and the plugin (via mvn) and puts t ```grunt package``` -Zips and tar+gzips the build in ./packages. Includes grunt build +Zips and tar+gzips the build in build/packages. Includes grunt build ```grunt release``` @@ -54,3 +53,7 @@ Uploads created archives to download.elasticsearch.org/elasticsearch/marvel/marv } ``` + +To upload the current archive as the "latest" release, use: + +```grunt release --latest``` From c4c757801a545ead1b005f3caa90c0cc37c47a5d Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 11 Mar 2014 21:01:54 +0100 Subject: [PATCH 28/39] Updated readme file to include sense usage Original commit: elastic/x-pack-elasticsearch@e9ec1f9a7887362945e86b0d44ae913c206e133d --- README.md | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f3b51151660..884a5b6516b 100644 --- a/README.md +++ b/README.md @@ -9,13 +9,22 @@ The easiest way to play/get to know Marvel is to install the latest release vers Once done, open up the following url (assuming standard ES config): http://localhost:9200/_plugin/marvel . This will take you to the Overview Dashboard. Use Kibana's Load dashboard menu to navigate to the Cluster Pulse dashboard +## I just want to run Sense + +To run Sense, checkout a copy of this repo. You can now double click `sense/index.html` and it will open in your default browser. + +For a cleaner experience, we recommend using something like http://anvilformac.com and point it at the repo checkout. Once done you can access Sense via `http://webserver/sense/`, for example http://elasticsearch-marvel.dev/sense if using Anvil. + +Note: to run the Kibana side of Marvel, you'd need to install grunt as described bellow. + ## Grunt build system (for running the UI from a code checkout) This grunt-based build system handles Kibana development environment setup for Marvel as well as building, packaging and distribution of the Marvel plugin. Note that you **must** run *grunt setup* before any other tasks as this build system reuses parts of the Kibana build system that must be fetched ### Installing You will need node.js+npm and grunt. Node is available via brew, install grunt with the command below. Once grunt is installed you may run grunt tasks to setup your environment and build Marvel -```npm install -g grunt``` +```npm install -g grunt-cli``` + ```npm install``` ### Tasks From 99f06b31a9f2c441660b88ebd0f67af879130edf Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Tue, 18 Mar 2014 15:41:50 -0700 Subject: [PATCH 29/39] Save Dashboards to Elasticsearch - Added "show_home" option to loader - Added custom "Save" panel to navigation panels - Changed "Marvel Dashboards" to "Dashboards" - Added merge code to Navigation to merge saved dashboards into static dashboards - Splitting up functions into discrete files - Removing findDashboardByLink() method - Changed Experimental to Stable for all Marvel panels Original commit: elastic/x-pack-elasticsearch@933f031bebf83b5d84e428badb1f8ae0e7ac08cd --- .jshintignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 .jshintignore diff --git a/.jshintignore b/.jshintignore new file mode 100644 index 00000000000..22d0d82f809 --- /dev/null +++ b/.jshintignore @@ -0,0 +1 @@ +vendor From 9116a2180ed2a5d375eaeb264f3be8e0e97da414 Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Wed, 28 May 2014 10:27:00 -0700 Subject: [PATCH 30/39] Ignoring the Gruntfile.js for JSHint Original commit: elastic/x-pack-elasticsearch@d180e0048ea1708732ccadfdee9cd170fc55120b --- .jshintignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.jshintignore b/.jshintignore index 22d0d82f809..a7d5694758a 100644 --- a/.jshintignore +++ b/.jshintignore @@ -1 +1,2 @@ vendor +Gruntfile.js From 50324e231a89e2ae9de4d689582ec497e9fec3ef Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Mon, 18 May 2015 10:16:18 -0700 Subject: [PATCH 31/39] Adding watchers and testing framework Original commit: elastic/x-pack-elasticsearch@88b1619192f805f6f85729fa84be24e171ee7b7b --- watcher/lib/client.js | 4 + watcher/lib/create_template.js | 11 + watcher/lib/execute_watcher.js | 40 +++ watcher/lib/find_port.js | 13 + watcher/lib/inject_stats.js | 74 +++++ watcher/lib/load_watcher.js | 26 ++ watcher/lib/setup_es.js | 71 +++++ watcher/lib/setup_smtp_server.js | 56 ++++ watcher/lib/template.json | 447 ++++++++++++++++++++++++++++ watcher/lib/test_no_execute.js | 28 ++ watcher/package.json | 22 ++ watcher/test/cluster_status.js | 95 ++++++ watcher/test/cpu_usage.js | 81 +++++ watcher/test/heap_used.js | 81 +++++ watcher/test/mocha.opts | 2 + watcher/watches/cluster_status.json | 92 ++++++ watcher/watches/cpu_usage.json | 73 +++++ watcher/watches/heap_used.json | 73 +++++ 18 files changed, 1289 insertions(+) create mode 100644 watcher/lib/client.js create mode 100644 watcher/lib/create_template.js create mode 100644 watcher/lib/execute_watcher.js create mode 100644 watcher/lib/find_port.js create mode 100644 watcher/lib/inject_stats.js create mode 100644 watcher/lib/load_watcher.js create mode 100644 watcher/lib/setup_es.js create mode 100644 watcher/lib/setup_smtp_server.js create mode 100644 watcher/lib/template.json create mode 100644 watcher/lib/test_no_execute.js create mode 100644 watcher/package.json create mode 100644 watcher/test/cluster_status.js create mode 100644 watcher/test/cpu_usage.js create mode 100644 watcher/test/heap_used.js create mode 100644 watcher/test/mocha.opts create mode 100644 watcher/watches/cluster_status.json create mode 100644 watcher/watches/cpu_usage.json create mode 100644 watcher/watches/heap_used.json diff --git a/watcher/lib/client.js b/watcher/lib/client.js new file mode 100644 index 00000000000..7118f52e4fc --- /dev/null +++ b/watcher/lib/client.js @@ -0,0 +1,4 @@ +var Client = require('elasticsearch').Client; +module.exports = new Client({ + host: 'http://localhost:9800' +}); diff --git a/watcher/lib/create_template.js b/watcher/lib/create_template.js new file mode 100644 index 00000000000..bf0506c3daf --- /dev/null +++ b/watcher/lib/create_template.js @@ -0,0 +1,11 @@ +var client = require('./client'); +var template = require('./template.json'); + +module.exports = function () { + return client.indices.putTemplate({ + body: template, + name: 'marvel' + }); +}; + + diff --git a/watcher/lib/execute_watcher.js b/watcher/lib/execute_watcher.js new file mode 100644 index 00000000000..2247ff83112 --- /dev/null +++ b/watcher/lib/execute_watcher.js @@ -0,0 +1,40 @@ +var Promise = require('bluebird'); +var request = require('request'); +var injectStats = require('./inject_stats'); +var loadWatcher = require('./load_watcher'); + +module.exports = function (watcher, fixture) { + + return injectStats(fixture).then(function () { + return loadWatcher(watcher); + }).then(function () { + return new Promise(function (resolve, reject) { + var options = { + method: 'POST', + url: 'http://localhost:9800/_watcher/watch/' + watcher + '/_execute', + json: true, + body: { + trigger_event: { + schedule: { + scheduled_time: 'now', + triggered_time: 'now' + } + } + } + }; + + request(options, function (err, resp, body) { + if (err) return reject(err); + if (resp.statusCode === 200) return resolve(body); + var message = options.url + ' responed with ' + resp.statusCode; + var error = new Error(body.error || message); + error.body = body; + error.resp = resp; + error.code = resp.statusCode; + error.options = options; + reject(error); + }); + }); + }); + +}; diff --git a/watcher/lib/find_port.js b/watcher/lib/find_port.js new file mode 100644 index 00000000000..93313bb095e --- /dev/null +++ b/watcher/lib/find_port.js @@ -0,0 +1,13 @@ +var Promise = require('bluebird'); +var portscanner = require('portscanner'); +module.exports = function findPort(start, end, host) { + host = host || 'localhost'; + return new Promise(function (resolve, reject) { + portscanner.findAPortNotInUse(start, end, host, function (err, port) { + if (err) return reject(err); + resolve(port); + }); + }); +}; + + diff --git a/watcher/lib/inject_stats.js b/watcher/lib/inject_stats.js new file mode 100644 index 00000000000..e14ec726915 --- /dev/null +++ b/watcher/lib/inject_stats.js @@ -0,0 +1,74 @@ +var _ = require('lodash'); +_.mixin(require('lodash-deep')); +var moment = require('moment'); +var client = require('./client'); + +module.exports = function (fixture) { + var indexPattern = fixture.indexPattern; + var type = fixture.type; + var data = fixture.data; + var rawData = fixture.rawData; + var startDate = fixture.startDate; + var duration = fixture.duration; + var dateField = fixture.dateField; + var workingDate = moment.utc(); + var indices = []; + var body = []; + var fields; + + + function createEntries(row) { + var index = workingDate.format(indexPattern); + var entry = { '@timestamp': workingDate.toISOString() }; + row.forEach(function (val, index) { + _.deepSet(entry, fields[index], val); + }); + + indices.push(index); + + body.push({ + index: { + _index: index, + _type: type + } + }); + body.push(entry); + } + + if (rawData) { + rawData.forEach(function (row) { + var index = moment.utc(row[dateField]).format(indexPattern); + var entry = {}; + _.each(row, function (val, key) { + _.deepSet(entry, key, val); + }); + indices.push(index); + body.push({ + index: { + _index: index, + _type: type + } + }); + body.push(entry); + }); + } else { + fields = data.shift(); + while(startDate <= workingDate) { + data.forEach(createEntries); + workingDate.subtract(duration); + } + } + + return client.deleteByQuery({ + index: _.unique(indices), + ignoreUnavailable: true, + allowNoIndices: true, + q: '*' + }) + .then(function (arg) { + return client.bulk({ + body: body, + refresh: true + }); + }); +}; diff --git a/watcher/lib/load_watcher.js b/watcher/lib/load_watcher.js new file mode 100644 index 00000000000..6fc03a7fa08 --- /dev/null +++ b/watcher/lib/load_watcher.js @@ -0,0 +1,26 @@ +var Promise = require('bluebird'); +var request = require('request'); +var path = require('path'); + +module.exports = function (name) { + var watch = require(path.join(__dirname, '..', 'watches', name + '.json')); + var options = { + method: 'PUT', + url: 'http://localhost:9800/_watcher/watch/' + name, + json: true, + body: watch + }; + return new Promise(function (resolve, reject) { + request(options, function (err, resp, body) { + if (err) return reject(err); + if (resp.statusCode <= 201) resolve({ resp: resp, body: body }); + var message = options.url + ' responded with ' + resp.statusCode; + var error = new Error(body.error || message); + error.body = body; + error.resp = resp; + error.code = resp.statusCode; + error.options = options; + reject(error); + }); + }); +}; diff --git a/watcher/lib/setup_es.js b/watcher/lib/setup_es.js new file mode 100644 index 00000000000..436f37df02c --- /dev/null +++ b/watcher/lib/setup_es.js @@ -0,0 +1,71 @@ +var path = require('path'); +var Promise = require('bluebird'); +var libesvm = require('libesvm'); +var createTemplate = require('./create_template'); + +function startEs() { + var options = { + version: '1.5.2', + directory: path.join(__dirname, '..', 'esvm'), + purge: true, + plugins: [ + 'elasticsearch/watcher/1.0.0-Beta1-5', + 'elasticsearch/license/latest' + ], + config: { + 'script.groovy.sandbox.enabled': true, + 'cluster.name': 'test', + 'network.host': '127.0.0.1', + 'http.port': 9800, + 'watcher.actions.email.service.account': { + 'local': { + 'email_defaults.from': 'admin@example.com', + 'smtp': { + 'host': 'localhost', + 'user': 'test', + 'password': 'test', + 'port': 5555 + } + } + } + + } + }; + var cluster = libesvm.createCluster(options); + cluster.on('log', function (log) { + if (log.type === 'progress') return; + if (process.env.DEBUG) console.log('%s %s %s %s', log.level, log.node, log.type, log.message); + }); + return cluster.install() + .then(function () { + return cluster.installPlugins(); + }) + .then(function () { + return cluster.start(); + }) + .then(function () { + after(function () { + this.timeout(60000); + return cluster.shutdown(); + }); + return cluster; + }); +} + +before(function () { + var self = this; + this.timeout(60000); + return new Promise(function (resolve, reject) { + startEs().then(function (cluster) { + self.cluster = cluster; + cluster.on('log', function (log) { + if (/watch service has started/.test(log.message)) { + createTemplate().then(function () { + resolve(cluster); + }); + } + }); + }).catch(reject); + }); +}); + diff --git a/watcher/lib/setup_smtp_server.js b/watcher/lib/setup_smtp_server.js new file mode 100644 index 00000000000..12e4f7ec922 --- /dev/null +++ b/watcher/lib/setup_smtp_server.js @@ -0,0 +1,56 @@ +var Promise = require('bluebird'); +var SMTPServer = require('smtp-server').SMTPServer; +var MailParser = require('mailparser').MailParser; +var acceptAny = function (address, session, done) { + return done(); +}; + +var mailbox = []; + +function startSMTP() { + return new Promise(function (resolve, reject) { + var server = new SMTPServer({ + logger: false, + disabledCommands: ['STARTTLS'], + onAuth: function (auth, session, done) { + done(null, { user: 1 }); + }, + onMailFrom: acceptAny, + onRcptTo: acceptAny, + onData: function (stream, session, done) { + var mailparser = new MailParser(); + mailparser.on('end', function (mailObj) { + mailbox.push(mailObj); + done(); + }); + stream.pipe(mailparser); + } + }); + + server.listen(5555, function (err) { + if (err) return reject(err); + after(function (done) { + server.close(done); + }); + resolve(server); + }); + }); +} + +before(function () { + var self = this; + return startSMTP().then(function (server) { + this.smtp = server; + return server; + }); +}); + +beforeEach(function () { + this.mailbox = mailbox; +}); + +afterEach(function () { + mailbox = []; +}); + +module.exports = mailbox; diff --git a/watcher/lib/template.json b/watcher/lib/template.json new file mode 100644 index 00000000000..362fc0d2c38 --- /dev/null +++ b/watcher/lib/template.json @@ -0,0 +1,447 @@ +{ + "template": ".marvel*", + "settings": { + "number_of_shards": 1, + "number_of_replicas": 1, + "analysis": { + "analyzer": { + "default": { + "type": "standard", + "stopwords": "_none_" + } + } + }, + "mapper.dynamic": true, + "marvel.index_format": 6 + }, + "mappings": { + "_default_": { + "dynamic_templates": [ + { + "string_fields": { + "match": "*", + "match_mapping_type": "string", + "mapping": { + "type": "multi_field", + "fields": { + "{name}": { + "type": "string", + "index": "analyzed", + "omit_norms": true + }, + "raw": { + "type": "string", + "index": "not_analyzed", + "ignore_above": 256 + } + } + } + } + } + ] + }, + "node_stats": { + "properties": { + "breakers": { + "properties": { + "fielddata": { + "properties": { + "estimated_size_in_bytes": { + "type": "long" + }, + "tripped": { + "type": "long" + }, + "limit_size_in_bytes": { + "type": "long" + } + } + }, + "request": { + "properties": { + "estimated_size_in_bytes": { + "type": "long" + }, + "tripped": { + "type": "long" + }, + "limit_size_in_bytes": { + "type": "long" + } + } + }, + "parent": { + "properties": { + "estimated_size_in_bytes": { + "type": "long" + }, + "tripped": { + "type": "long" + }, + "limit_size_in_bytes": { + "type": "long" + } + } + } + } + }, + "fs": { + "properties": { + "total": { + "properties": { + "disk_io_op": { + "type": "long" + }, + "disk_reads": { + "type": "long" + }, + "disk_writes": { + "type": "long" + }, + "disk_io_size_in_bytes": { + "type": "long" + }, + "disk_read_size_in_bytes": { + "type": "long" + }, + "disk_write_size_in_bytes": { + "type": "long" + } + } + } + } + }, + "jvm": { + "properties": { + "buffer_pools": { + "properties": { + "direct": { + "properties": { + "used_in_bytes": { + "type": "long" + } + } + }, + "mapped": { + "properties": { + "used_in_bytes": { + "type": "long" + } + } + } + } + }, + "gc": { + "properties": { + "collectors": { + "properties": { + "young": { + "properties": { + "collection_count": { + "type": "long" + }, + "collection_time_in_millis": { + "type": "long" + } + } + }, + "old": { + "properties": { + "collection_count": { + "type": "long" + }, + "collection_time_in_millis": { + "type": "long" + } + } + } + } + } + } + } + } + }, + "indices": { + "properties": { + "indexing": { + "properties": { + "throttle_time_in_millis": { + "type": "long" + } + } + }, + "percolate": { + "properties": { + "total": { + "type": "long" + }, + "time_in_millis": { + "type": "long" + }, + "queries": { + "type": "long" + }, + "memory_size_in_bytes": { + "type": "long" + } + } + }, + "segments": { + "properties": { + "index_writer_memory_in_bytes": { + "type": "long" + }, + "version_map_memory_in_bytes": { + "type": "long" + }, + "index_writer_max_memory_in_bytes": { + "type": "long" + } + } + }, + "query_cache": { + "properties": { + "memory_size_in_bytes": { + "type": "long" + }, + "evictions": { + "type": "long" + }, + "hit_count": { + "type": "long" + }, + "miss_count": { + "type": "long" + } + } + } + } + }, + "os": { + "properties": { + "load_average": { + "properties": { + "1m": { + "type": "float" + }, + "5m": { + "type": "float" + }, + "15m": { + "type": "float" + } + } + } + } + }, + "thread_pool": { + "properties": { + "listener": { + "properties": { + "threads": { + "type": "long" + }, + "rejected": { + "type": "long" + }, + "completed": { + "type": "long" + }, + "queue": { + "type": "long" + }, + "largest": { + "type": "long" + } + } + } + } + } + } + }, + "index_stats": { + "properties": { + "index": { + "type": "multi_field", + "fields": { + "index": { + "type": "string", + "norms": { + "enabled": false + } + }, + "raw": { + "type": "string", + "index": "not_analyzed", + "norms": { + "enabled": false + }, + "index_options": "docs", + "include_in_all": false, + "ignore_above": 256 + } + } + }, + "total": { + "properties": { + "fielddata": { + "properties": { + "memory_size_in_bytes": { + "type": "long" + } + } + }, + "indexing": { + "properties": { + "throttle_time_in_millis": { + "type": "long" + } + } + }, + "merges": { + "properties": { + "total_size_in_bytes": { + "type": "long" + } + } + }, + "percolate": { + "properties": { + "total": { + "type": "long" + }, + "time_in_millis": { + "type": "long" + }, + "queries": { + "type": "long" + }, + "memory_size_in_bytes": { + "type": "long" + } + } + }, + "search": { + "properties": { + "query": { + "properties": { + "query_total": { + "type": "long" + } + } + } + } + }, + "segments": { + "properties": { + "index_writer_memory_in_bytes": { + "type": "long" + }, + "version_map_memory_in_bytes": { + "type": "long" + }, + "index_writer_max_memory_in_bytes": { + "type": "long" + } + } + }, + "query_cache": { + "properties": { + "memory_size_in_bytes": { + "type": "long" + }, + "evictions": { + "type": "long" + }, + "hit_count": { + "type": "long" + }, + "miss_count": { + "type": "long" + } + } + } + } + }, + "primaries": { + "properties": { + "docs": { + "properties": { + "count": { + "type": "long" + } + } + }, + "indexing": { + "properties": { + "index_total": { + "type": "long" + } + } + } + } + } + } + }, + "cluster_event": {}, + "shard_event": {}, + "indices_stats": { + "properties": { + "primaries": { + "properties": { + "indexing": { + "properties": { + "index_total": { + "type": "long" + } + } + }, + "docs": { + "properties": { + "count": { + "type": "long" + } + } + } + } + }, + "total": { + "properties": { + "search": { + "properties": { + "query_total": { + "type": "long" + } + } + } + } + } + } + }, + "cluster_stats": {}, + "index_event": {}, + "node_event": {}, + "routing_event": {}, + "cluster_state": { + "properties": { + "blocks": { + "type": "object", + "enabled": false + }, + "nodes": { + "type": "object", + "enabled": false + }, + "routing_nodes": { + "type": "object", + "enabled": false + }, + "routing_table": { + "type": "object", + "enabled": false + } + } + } + } +} + diff --git a/watcher/lib/test_no_execute.js b/watcher/lib/test_no_execute.js new file mode 100644 index 00000000000..7c94d531487 --- /dev/null +++ b/watcher/lib/test_no_execute.js @@ -0,0 +1,28 @@ +var lib = require('requirefrom')('lib'); +var executeWatcher = lib('execute_watcher'); +var expect = require('expect.js'); +module.exports = function testNoExecute(options, message, generateRawData) { + describe(message, function () { + var response; + beforeEach(function () { + this.timeout(5000); + var rawData = generateRawData(); + var fixture = { + indexPattern: options.indexPattern, + type: options.type, + dateField: '@timestamp', + rawData: rawData + }; + return executeWatcher(options.watcher, fixture).then(function (resp) { + response = resp; + if (process.env.DEBUG) console.log(JSON.stringify(resp, null, ' ')); + return resp; + }); + }); + it('should not meet the script condition', function () { + expect(response.state).to.be('execution_not_needed'); + expect(response.execution_result.condition.script.met).to.be(false); + }); + }); +}; + diff --git a/watcher/package.json b/watcher/package.json new file mode 100644 index 00000000000..e72ff1aee9c --- /dev/null +++ b/watcher/package.json @@ -0,0 +1,22 @@ +{ + "name": "marvel-watchers", + "version": "1.0.0", + "description": "", + "main": "index.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "libesvm": "0.0.12", + "lodash": "^3.8.0", + "lodash-deep": "^1.6.0", + "mailparser": "^0.5.1", + "portscanner": "^1.0.0", + "requirefrom": "^0.2.0" + } +} diff --git a/watcher/test/cluster_status.js b/watcher/test/cluster_status.js new file mode 100644 index 00000000000..91eeb774c92 --- /dev/null +++ b/watcher/test/cluster_status.js @@ -0,0 +1,95 @@ +var _ = require('lodash'); +var lib = require('requirefrom')('lib'); +var expect = require('expect.js'); +var moment = require('moment'); +var executeWatcher = lib('execute_watcher'); +var options = { + indexPattern: '[.marvel-]YYYY.MM.DD', + type: 'cluster_stats', + watcher: 'cluster_status' +}; +var testNoExecute = lib('test_no_execute').bind(null, options); +var client = lib('client'); +lib('setup_es'); +lib('setup_smtp_server'); + +describe('Marvel Watchers', function () { + describe('Cluster Status', function () { + + describe('Red for 60 seconds', function () { + var response; + beforeEach(function () { + this.timeout(5000); + var workingDate = moment.utc(); + var rawData = _.times(12, function () { + return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; + }); + var fixture = { + indexPattern: '[.marvel-]YYYY.MM.DD', + type: 'cluster_stats', + dateField: '@timestamp', + rawData: rawData + }; + return executeWatcher('cluster_status', fixture).then(function (resp) { + response = resp; + if (process.env.DEBUG) console.log(JSON.stringify(resp, null, ' ')); + return resp; + }); + }); + + it('should meet the script condition', function () { + expect(response.state).to.be('executed'); + expect(response.execution_result.condition.script.met).to.be(true); + }); + + it('should send an email', function () { + expect(this.mailbox).to.have.length(1); + var message = this.mailbox[0]; + expect(message.subject).to.contain('Watcher Notification - Cluster has been RED for the last 60 seconds'); + expect(message.text).to.contain('Your cluster has been red for the last 60 seconds.'); + }); + + }); + + testNoExecute('Red for 55 then Yellow for 60 seconds', function () { + var workingDate = moment.utc(); + var rawData = _.times(11, function () { + return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; + }); + rawData.concat(_.times(12, function () { + return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'yellow' }; + })); + return rawData; + }); + + testNoExecute('Red for 30 then Yellow for 60 seconds', function () { + var workingDate = moment.utc(); + var rawData = _.times(6, function () { + return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; + }); + rawData.concat(_.times(12, function () { + return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'yellow' }; + })); + return rawData; + }); + + testNoExecute('Red for 5 Yellow for 10 Red for 10 Green for 60', function () { + var workingDate = moment.utc(); + var rawData = _.times(1, function () { + return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; + }); + rawData.concat(_.times(2, function () { + return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'yellow' }; + })); + rawData.concat(_.times(2, function () { + return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; + })); + rawData.concat(_.times(12, function () { + return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'green' }; + })); + return rawData; + }); + + }); +}); + diff --git a/watcher/test/cpu_usage.js b/watcher/test/cpu_usage.js new file mode 100644 index 00000000000..2920d7698af --- /dev/null +++ b/watcher/test/cpu_usage.js @@ -0,0 +1,81 @@ +var lib = require('requirefrom')('lib'); +var expect = require('expect.js'); +var moment = require('moment'); +var executeWatcher = lib('execute_watcher'); +var client = lib('client'); +var indexPattern = '[.marvel-]YYYY.MM.DD'; +lib('setup_es'); +lib('setup_smtp_server'); + +describe('Marvel Watchers', function () { + describe('CPU Usage', function () { + + describe('above 75%', function () { + var response; + beforeEach(function () { + this.timeout(5000); + var fixture = { + indexPattern: indexPattern, + type: 'node_stats', + duration: moment.duration(5, 's'), + startDate: moment.utc().subtract(5, 'm'), + data: [ + ['node.name', 'os.cpu.user'], + ['node-01', 75], + ['node-02', 85], + ['node-03', 60] + ] + }; + return executeWatcher('cpu_usage', fixture).then(function (resp) { + response = resp; + return resp; + }); + }); + + it('should meet the script condition', function () { + expect(response.state).to.be('executed'); + expect(response.execution_result.condition.script.met).to.be(true); + }); + + it('should send an email with multiple hosts', function () { + expect(this.mailbox).to.have.length(1); + var message = this.mailbox[0]; + expect(message.text).to.contain('"node-01" - CPU Usage is at 75.0%'); + expect(message.text).to.contain('"node-02" - CPU Usage is at 85.0%'); + }); + + }); + + describe('below 75%', function () { + var response; + beforeEach(function () { + var self = this; + this.timeout(5000); + var fixture = { + indexPattern: indexPattern, + type: 'node_stats', + duration: moment.duration(5, 's'), + startDate: moment.utc().subtract(5, 'm'), + data: [ + ['node.name', 'os.cpu.user'], + ['node-01', 35], + ['node-02', 25], + ['node-03', 10] + ] + }; + return executeWatcher('cpu_usage', fixture).then(function (resp) { + response = resp; + return resp; + }); + }); + + it('should not send an email', function () { + expect(response.state).to.be('execution_not_needed'); + expect(response.execution_result.condition.script.met).to.be(false); + expect(this.mailbox).to.have.length(0); + }); + + }); + + }); +}); diff --git a/watcher/test/heap_used.js b/watcher/test/heap_used.js new file mode 100644 index 00000000000..ee5b444088d --- /dev/null +++ b/watcher/test/heap_used.js @@ -0,0 +1,81 @@ +var lib = require('requirefrom')('lib'); +var expect = require('expect.js'); +var moment = require('moment'); +var executeWatcher = lib('execute_watcher'); +var client = lib('client'); +var indexPattern = '[.marvel-]YYYY.MM.DD'; +lib('setup_es'); +lib('setup_smtp_server'); + +describe('Marvel Watchers', function () { + describe('Memory Usage', function () { + + describe('above 75%', function () { + var response; + beforeEach(function () { + this.timeout(5000); + var fixture = { + indexPattern: indexPattern, + type: 'node_stats', + duration: moment.duration(5, 's'), + startDate: moment.utc().subtract(5, 'm'), + data: [ + ['node.name', 'jvm.mem.heap_used_percent'], + ['node-01', 75], + ['node-02', 85], + ['node-03', 60] + ] + }; + return executeWatcher('heap_used', fixture).then(function (resp) { + response = resp; + return resp; + }); + }); + + it('should meet the script condition', function () { + expect(response.state).to.be('executed'); + expect(response.execution_result.condition.script.met).to.be(true); + }); + + it('should send an email with multiple hosts', function () { + expect(this.mailbox).to.have.length(1); + var message = this.mailbox[0]; + expect(message.text).to.contain('"node-01" - Memory Usage is at 75.0%'); + expect(message.text).to.contain('"node-02" - Memory Usage is at 85.0%'); + }); + + }); + + describe('below 75%', function () { + var response; + beforeEach(function () { + var self = this; + this.timeout(5000); + var fixture = { + indexPattern: indexPattern, + type: 'node_stats', + duration: moment.duration(5, 's'), + startDate: moment.utc().subtract(5, 'm'), + data: [ + ['node.name', 'jvm.mem.heap_used_percent'], + ['node-01', 35], + ['node-02', 25], + ['node-03', 10] + ] + }; + return executeWatcher('heap_used', fixture).then(function (resp) { + response = resp; + return resp; + }); + }); + + it('should not send an email', function () { + expect(response.state).to.be('execution_not_needed'); + expect(response.execution_result.condition.script.met).to.be(false); + expect(this.mailbox).to.have.length(0); + }); + + }); + + }); +}); diff --git a/watcher/test/mocha.opts b/watcher/test/mocha.opts new file mode 100644 index 00000000000..ec71a5114b0 --- /dev/null +++ b/watcher/test/mocha.opts @@ -0,0 +1,2 @@ +--require expect.js +--reporter spec diff --git a/watcher/watches/cluster_status.json b/watcher/watches/cluster_status.json new file mode 100644 index 00000000000..52556996e73 --- /dev/null +++ b/watcher/watches/cluster_status.json @@ -0,0 +1,92 @@ +{ + "trigger": { + "schedule": { + "interval": "1m" + } + }, + "input": { + "search": { + "request": { + "indices": ".marvel-*", + "types": "cluster_stats", + "body": { + "query": { + "filtered": { + "filter": { + "bool": { + "must": [ + { + "range": { + "@timestamp": { + "gte": "now-2m", + "lte": "now" + } + } + } + ], + "should": [ + { + "term": { + "status.raw": "red" + } + }, + { + "term": { + "status.raw": "green" + } + }, + { + "term": { + "status.raw": "yellow" + } + } + ] + } + } + } + }, + "fields": ["@timestamp","status"], + "sort": [ + { + "@timestamp": { + "order": "desc" + } + } + ], + "size": 1, + "aggs": { + "minutes": { + "date_histogram": { + "field": "@timestamp", + "interval": "5s" + }, + "aggs": { + "status": { + "terms": { + "field": "status.raw", + "size": 3 + } + } + } + } + } + } + } + } + }, + "throttle_period": "30m", + "condition": { + "script": { + "inline": "if (ctx.payload.hits.total < 1) return false; def rows = ctx.payload.hits.hits; if (rows[0].fields.status[0] != 'red') return false; if (ctx.payload.aggregations.minutes.buckets.size() < 12) return false; def last60Seconds = ctx.payload.aggregations.minutes.buckets[-12..-1]; return last60Seconds.every { it.status.buckets.every { s -> s.key == 'red' } }" + } + }, + "actions": { + "send_email": { + "email": { + "to": "user@example.com", + "subject": "Watcher Notification - Cluster has been RED for the last 60 seconds", + "body": "Your cluster has been red for the last 60 seconds." + } + } + } +} diff --git a/watcher/watches/cpu_usage.json b/watcher/watches/cpu_usage.json new file mode 100644 index 00000000000..edda8624f64 --- /dev/null +++ b/watcher/watches/cpu_usage.json @@ -0,0 +1,73 @@ +{ + "trigger": { + "schedule": { + "interval": "1m" + } + }, + "input": { + "search": { + "request": { + "indices": [ + ".marvel-*" + ], + "search_type": "count", + "body": { + "query": { + "filtered": { + "filter": { + "range": { + "@timestamp": { + "gte": "now-2m", + "lte": "now" + } + } + } + } + }, + "aggs": { + "minutes": { + "date_histogram": { + "field": "@timestamp", + "interval": "minute" + }, + "aggs": { + "nodes": { + "terms": { + "field": "node.name.raw", + "size": 10, + "order": { + "cpu": "desc" + } + }, + "aggs": { + "cpu": { + "avg": { + "field": "os.cpu.user" + } + } + } + } + } + } + } + } + } + } + }, + "throttle_period": "30m", + "condition": { + "script": "if (ctx.payload.aggregations.minutes.buckets.size() == 0) return false; def latest = ctx.payload.aggregations.minutes.buckets[-1]; def node = latest.nodes.buckets[0]; return node && node.cpu && node.cpu.value >= 75;" + }, + "actions": { + "send_email": { + "transform": { + "script": "def latest = ctx.payload.aggregations.minutes.buckets[-1]; return latest.nodes.buckets.findAll { return it.cpu && it.cpu.value >= 75 };" + }, + "email": { + "to": "user@example.com", + "subject": "Watcher Notification - HIGH CPU USAGE", + "body": "Nodes with HIGH CPU Usage (above 75%):\n\n{{#ctx.payload._value}}\"{{key}}\" - CPU Usage is at {{cpu.value}}%\n{{/ctx.payload._value}}" + } + } + } +} diff --git a/watcher/watches/heap_used.json b/watcher/watches/heap_used.json new file mode 100644 index 00000000000..3f33719a9e8 --- /dev/null +++ b/watcher/watches/heap_used.json @@ -0,0 +1,73 @@ +{ + "trigger": { + "schedule": { + "interval": "1m" + } + }, + "input": { + "search": { + "request": { + "indices": [ + ".marvel-*" + ], + "search_type": "count", + "body": { + "query": { + "filtered": { + "filter": { + "range": { + "@timestamp": { + "gte": "now-2m", + "lte": "now" + } + } + } + } + }, + "aggs": { + "minutes": { + "date_histogram": { + "field": "@timestamp", + "interval": "minute" + }, + "aggs": { + "nodes": { + "terms": { + "field": "node.name.raw", + "size": 10, + "order": { + "memory": "desc" + } + }, + "aggs": { + "memory": { + "avg": { + "field": "jvm.mem.heap_used_percent" + } + } + } + } + } + } + } + } + } + } + }, + "throttle_period": "30m", + "condition": { + "script": "if (ctx.payload.aggregations.minutes.buckets.size() == 0) return false; def latest = ctx.payload.aggregations.minutes.buckets[-1]; def node = latest.nodes.buckets[0]; return node && node.memory && node.memory.value >= 75;" + }, + "actions": { + "send_email": { + "transform": { + "script": "def latest = ctx.payload.aggregations.minutes.buckets[-1]; return latest.nodes.buckets.findAll { return it.memory && it.memory.value >= 75 };" + }, + "email": { + "to": "user@example.com", + "subject": "Watcher Notification - HIGH MEMORY USAGE", + "body": "Nodes with HIGH MEMORY Usage (above 75%):\n\n{{#ctx.payload._value}}\"{{key}}\" - Memory Usage is at {{memory.value}}%\n{{/ctx.payload._value}}" + } + } + } +} From 835c92f32e71dbcdf3cbfceb65a8eddfb82e5cfd Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Mon, 18 May 2015 10:54:40 -0700 Subject: [PATCH 32/39] Adding dependencies Original commit: elastic/x-pack-elasticsearch@d24f14bedb794d736450a14eaebc9198b713497c --- watcher/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/watcher/package.json b/watcher/package.json index e72ff1aee9c..ab38bb94d83 100644 --- a/watcher/package.json +++ b/watcher/package.json @@ -16,6 +16,7 @@ "lodash": "^3.8.0", "lodash-deep": "^1.6.0", "mailparser": "^0.5.1", + "moment": "^2.10.3", "portscanner": "^1.0.0", "requirefrom": "^0.2.0" } From ca08fb731fa342cee29625f38a67b6ef4a48a110 Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Mon, 18 May 2015 11:16:09 -0700 Subject: [PATCH 33/39] Adding readme to watchers Original commit: elastic/x-pack-elasticsearch@67177df52055358f6e8fba0d30b186445e00605e --- watcher/README.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 watcher/README.md diff --git a/watcher/README.md b/watcher/README.md new file mode 100644 index 00000000000..3611ac8e063 --- /dev/null +++ b/watcher/README.md @@ -0,0 +1,4 @@ +# Marvel Watchers + +More to come + From 848b90a720819447333c4b8a61f5912a41fa1491 Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Tue, 19 May 2015 14:22:10 -0700 Subject: [PATCH 34/39] Adding file descriptors to watcher examples Original commit: elastic/x-pack-elasticsearch@9d5ee156e80e5754c73f2dc693d27386d48555e8 --- watcher/test/file_descriptors.js | 82 +++++++++++++++++++++++++++ watcher/watches/file_descriptors.json | 79 ++++++++++++++++++++++++++ 2 files changed, 161 insertions(+) create mode 100644 watcher/test/file_descriptors.js create mode 100644 watcher/watches/file_descriptors.json diff --git a/watcher/test/file_descriptors.js b/watcher/test/file_descriptors.js new file mode 100644 index 00000000000..6de8a188f80 --- /dev/null +++ b/watcher/test/file_descriptors.js @@ -0,0 +1,82 @@ +var lib = require('requirefrom')('lib'); +var expect = require('expect.js'); +var moment = require('moment'); +var executeWatcher = lib('execute_watcher'); +var client = lib('client'); +var indexPattern = '[.marvel-]YYYY.MM.DD'; +lib('setup_es'); +lib('setup_smtp_server'); + +describe('Marvel Watchers', function () { + describe('File Descriptors', function () { + + describe('above 80%', function () { + var response; + beforeEach(function () { + this.timeout(5000); + var fixture = { + indexPattern: indexPattern, + type: 'node_stats', + duration: moment.duration(5, 's'), + startDate: moment.utc().subtract(5, 'm'), + data: [ + ['node.name', 'process.open_file_descriptors'], + ['node-01', Math.round(65535*0.75)], + ['node-02', Math.round(65535*0.81)], + ['node-03', Math.round(65535*0.93)] + ] + }; + return executeWatcher('file_descriptors', fixture).then(function (resp) { + response = resp; + return resp; + }); + }); + + it('should meet the script condition', function () { + expect(response.state).to.be('executed'); + expect(response.execution_result.condition.script.met).to.be(true); + }); + + it('should send an email with multiple hosts', function () { + expect(this.mailbox).to.have.length(1); + var message = this.mailbox[0]; + expect(message.text).to.contain('"node-02" - File Descriptors is at ' + Math.round(65535*0.81) + '.0 ('+ Math.round(((65535*0.81)/65535)*100) + '%)'); + expect(message.text).to.contain('"node-03" - File Descriptors is at ' + Math.round(65535*0.93) + '.0 ('+ Math.round(((65535*0.93)/65535)*100) + '%)'); + }); + + }); + + describe('below 80%', function () { + var response; + beforeEach(function () { + var self = this; + this.timeout(5000); + var fixture = { + indexPattern: indexPattern, + type: 'node_stats', + duration: moment.duration(5, 's'), + startDate: moment.utc().subtract(5, 'm'), + data: [ + ['node.name', 'process.open_file_descriptors'], + ['node-01', Math.round(65535*0.05)], + ['node-02', Math.round(65535*0.30)], + ['node-03', Math.round(65535*0.23)] + ] + }; + return executeWatcher('file_descriptors', fixture).then(function (resp) { + response = resp; + return resp; + }); + }); + + it('should not send an email', function () { + expect(response.state).to.be('execution_not_needed'); + expect(response.execution_result.condition.script.met).to.be(false); + expect(this.mailbox).to.have.length(0); + }); + + }); + + }); +}); + diff --git a/watcher/watches/file_descriptors.json b/watcher/watches/file_descriptors.json new file mode 100644 index 00000000000..83fbe122aca --- /dev/null +++ b/watcher/watches/file_descriptors.json @@ -0,0 +1,79 @@ +{ + "metadata": { + "system_fd": 65535, + "threshold": 0.8 + }, + "trigger": { + "schedule": { + "interval": "1m" + } + }, + "input": { + "search": { + "request": { + "indices": [ + ".marvel-*" + ], + "types": "node_stats", + "search_type": "count", + "body": { + "query": { + "filtered": { + "filter": { + "range": { + "@timestamp": { + "gte": "now-1m", + "lte": "now" + } + } + } + } + }, + "aggs": { + "minutes": { + "date_histogram": { + "field": "@timestamp", + "interval": "5s" + }, + "aggs": { + "nodes": { + "terms": { + "field": "node.name.raw", + "size": 10, + "order": { + "fd": "desc" + } + }, + "aggs": { + "fd": { + "avg": { + "field": "process.open_file_descriptors" + } + } + } + } + } + } + } + } + } + } + }, + "throttle_period": "30m", + "condition": { + "script": "if (ctx.payload.aggregations.minutes.buckets.size() == 0) return false; def latest = ctx.payload.aggregations.minutes.buckets[-1]; def node = latest.nodes.buckets[0]; return node && node.fd && node.fd.value >= (ctx.metadata.system_fd * ctx.metadata.threshold);" + }, + "actions": { + "send_email": { + "transform": { + "script": "def latest = ctx.payload.aggregations.minutes.buckets[-1]; return latest.nodes.buckets.findAll({ return it.fd && it.fd.value >= (ctx.metadata.system_fd * ctx.metadata.threshold) }).collect({ it.fd.percent = Math.round((it.fd.value/ctx.metadata.system_fd)*100); it });" + }, + "email": { + "to": "user@example.com", + "subject": "Watcher Notification - NODES WITH 80% FILE DESCRIPTORS USED", + "body": "Nodes with 80% FILE DESCRIPTORS USED (above 80%):\n\n{{#ctx.payload._value}}\"{{key}}\" - File Descriptors is at {{fd.value}} ({{fd.percent}}%)\n{{/ctx.payload._value}}" + } + } + } +} + From 1c92b3976b561d0b79566cf2912162c9bed20678 Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Wed, 20 May 2015 08:57:42 -0700 Subject: [PATCH 35/39] Adding fielddata utilization Original commit: elastic/x-pack-elasticsearch@f92ef0e38d66242e0df68c4dae07cff707072ef9 --- watcher/test/fielddata.js | 82 ++++++++++++++++++++++++++++++++++ watcher/watches/fielddata.json | 79 ++++++++++++++++++++++++++++++++ 2 files changed, 161 insertions(+) create mode 100644 watcher/test/fielddata.js create mode 100644 watcher/watches/fielddata.json diff --git a/watcher/test/fielddata.js b/watcher/test/fielddata.js new file mode 100644 index 00000000000..0601afdf38e --- /dev/null +++ b/watcher/test/fielddata.js @@ -0,0 +1,82 @@ +var lib = require('requirefrom')('lib'); +var expect = require('expect.js'); +var moment = require('moment'); +var executeWatcher = lib('execute_watcher'); +var client = lib('client'); +var indexPattern = '[.marvel-]YYYY.MM.DD'; +lib('setup_es'); +lib('setup_smtp_server'); + +describe('Marvel Watchers', function () { + describe('File Descriptors', function () { + + describe('above 80%', function () { + var response; + beforeEach(function () { + this.timeout(5000); + var fixture = { + indexPattern: indexPattern, + type: 'node_stats', + duration: moment.duration(5, 's'), + startDate: moment.utc().subtract(5, 'm'), + data: [ + ['node.name', 'indices.fielddata.memory_size_in_bytes'], + ['node-01', 81000], + ['node-02', 70000], + ['node-03', 90000] + ] + }; + return executeWatcher('fielddata', fixture).then(function (resp) { + response = resp; + return resp; + }); + }); + + it('should meet the script condition', function () { + expect(response.state).to.be('executed'); + expect(response.execution_result.condition.script.met).to.be(true); + }); + + it('should send an email with multiple hosts', function () { + expect(this.mailbox).to.have.length(1); + var message = this.mailbox[0]; + expect(message.text).to.contain('"node-01" - Fielddata utilization is at 81000.0 bytes (81%)'); + expect(message.text).to.contain('"node-03" - Fielddata utilization is at 90000.0 bytes (90%)'); + }); + + }); + + describe('below 80%', function () { + var response; + beforeEach(function () { + var self = this; + this.timeout(5000); + var fixture = { + indexPattern: indexPattern, + type: 'node_stats', + duration: moment.duration(5, 's'), + startDate: moment.utc().subtract(5, 'm'), + data: [ + ['node.name', 'indices.fielddata.memory_size_in_bytes'], + ['node-01', 12039], + ['node-02', 54393], + ['node-03', 20302] + ] + }; + return executeWatcher('fielddata', fixture).then(function (resp) { + response = resp; + return resp; + }); + }); + + it('should not send an email', function () { + expect(response.state).to.be('execution_not_needed'); + expect(response.execution_result.condition.script.met).to.be(false); + expect(this.mailbox).to.have.length(0); + }); + + }); + + }); +}); + diff --git a/watcher/watches/fielddata.json b/watcher/watches/fielddata.json new file mode 100644 index 00000000000..08072f35ebf --- /dev/null +++ b/watcher/watches/fielddata.json @@ -0,0 +1,79 @@ +{ + "metadata": { + "fielddata_cache_size": 100000, + "threshold": 0.8 + }, + "trigger": { + "schedule": { + "interval": "1m" + } + }, + "input": { + "search": { + "request": { + "indices": [ + ".marvel-*" + ], + "types": "node_stats", + "search_type": "count", + "body": { + "query": { + "filtered": { + "filter": { + "range": { + "@timestamp": { + "gte": "now-1m", + "lte": "now" + } + } + } + } + }, + "aggs": { + "minutes": { + "date_histogram": { + "field": "@timestamp", + "interval": "5s" + }, + "aggs": { + "nodes": { + "terms": { + "field": "node.name.raw", + "size": 10, + "order": { + "fielddata": "desc" + } + }, + "aggs": { + "fielddata": { + "avg": { + "field": "indices.fielddata.memory_size_in_bytes" + } + } + } + } + } + } + } + } + } + } + }, + "throttle_period": "30m", + "condition": { + "script": "if (ctx.payload.aggregations.minutes.buckets.size() == 0) return false; def latest = ctx.payload.aggregations.minutes.buckets[-1]; def node = latest.nodes.buckets[0]; return node && node.fielddata && node.fielddata.value >= (ctx.metadata.fielddata_cache_size * ctx.metadata.threshold);" + }, + "actions": { + "send_email": { + "transform": { + "script": "def latest = ctx.payload.aggregations.minutes.buckets[-1]; return latest.nodes.buckets.findAll({ return it.fielddata && it.fielddata.value >= (ctx.metadata.fielddata_cache_size * ctx.metadata.threshold) }).collect({ it.fielddata.percent = Math.round((it.fielddata.value/ctx.metadata.fielddata_cache_size)*100); it });" + }, + "email": { + "to": "user@example.com", + "subject": "Watcher Notification - NODES WITH 80% FIELDDATA UTILIZATION", + "body": "Nodes with 80% FIELDDATA UTILIZATION (above 80%):\n\n{{#ctx.payload._value}}\"{{key}}\" - Fielddata utilization is at {{fielddata.value}} bytes ({{fielddata.percent}}%)\n{{/ctx.payload._value}}" + } + } + } +} + From e3538c1105fd69c5485b68c88b108bbabf6993b3 Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Mon, 1 Jun 2015 14:04:43 -0700 Subject: [PATCH 36/39] Adding some instructions to the readme Original commit: elastic/x-pack-elasticsearch@379b0a76c8531e2a2ffe5a3f4f79e591a6c99c90 --- watcher/README.md | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/watcher/README.md b/watcher/README.md index 3611ac8e063..b27def2bb47 100644 --- a/watcher/README.md +++ b/watcher/README.md @@ -1,4 +1,13 @@ # Marvel Watchers -More to come +In this directory you will find example Watchers for Marvel that were distributed with the Watcher Beta documentation. The `watches` directory contains the example watches and the `test` directory contains integration test for each watcher. +The testing framework is written in Node.js and use ESVM to create an Elasticsearch instance for running the tests against + +### Setup and Running Tests + +- Run `npm install` in this directory +- Ensure you have `mocha` installed globally +- Run `mocha` in this directory + +Once the tests are completed you wan delete the `esvm` directory. From 1559bdfc26a3325ba6041511eccbb9793e94bb4a Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Mon, 1 Jun 2015 14:05:43 -0700 Subject: [PATCH 37/39] removing stuff from README Original commit: elastic/x-pack-elasticsearch@801c5d8eb41016d3c049b4f75c730054013cb022 --- watcher/README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/watcher/README.md b/watcher/README.md index b27def2bb47..0d29c7e0225 100644 --- a/watcher/README.md +++ b/watcher/README.md @@ -9,5 +9,3 @@ The testing framework is written in Node.js and use ESVM to create an Elasticsea - Run `npm install` in this directory - Ensure you have `mocha` installed globally - Run `mocha` in this directory - -Once the tests are completed you wan delete the `esvm` directory. From 03380b74e070d41cc849075921636b3521537a4b Mon Sep 17 00:00:00 2001 From: Chris Cowan Date: Tue, 23 Jun 2015 13:45:52 -0700 Subject: [PATCH 38/39] Refactor plugin layout; add settings; start issues work Original commit: elastic/x-pack-elasticsearch@dae4d50d495d018cf489c3f8da4a8aebe8cf62c9 --- .bowerrc | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .bowerrc diff --git a/.bowerrc b/.bowerrc new file mode 100644 index 00000000000..1d69e778685 --- /dev/null +++ b/.bowerrc @@ -0,0 +1,4 @@ +{ + "directory": "./public/bower_components" +} + From 37c0b627e75f10eb844677c77529a6997f21e4fa Mon Sep 17 00:00:00 2001 From: uboness Date: Fri, 17 Jul 2015 13:03:23 +0200 Subject: [PATCH 39/39] migration of marvel agent 2.0 Original commit: elastic/x-pack-elasticsearch@6cd0a7d25c2960f31558381c2cf7550e4c492def --- .bowerrc | 4 - .jshintignore | 2 - NOTICE.txt | 5 - README.md | 68 -- .../elasticsearch_license_header.txt | 14 + .../dev-tools/license_header_definition.xml | 13 + marvel/pom.xml | 42 ++ marvel/src/main/assemblies/plugin.xml | 17 + .../elasticsearch/marvel/MarvelModule.java | 29 + .../elasticsearch/marvel/MarvelPlugin.java | 78 +++ .../elasticsearch/marvel/MarvelVersion.java | 204 ++++++ .../marvel/agent/AgentService.java | 215 ++++++ .../agent/collector/AbstractCollector.java | 92 +++ .../marvel/agent/collector/Collector.java | 18 + .../agent/collector/CollectorModule.java | 36 + .../collector/indices/IndexCollector.java | 71 ++ .../collector/indices/IndexMarvelDoc.java | 172 +++++ .../agent/exporter/AbstractExporter.java | 88 +++ .../marvel/agent/exporter/Exporter.java | 17 + .../marvel/agent/exporter/ExporterModule.java | 35 + .../marvel/agent/exporter/HttpESExporter.java | 652 ++++++++++++++++++ .../marvel/agent/exporter/MarvelDoc.java | 54 ++ .../marvel/agent/support/AgentUtils.java | 146 ++++ .../marvel/license/LicenseModule.java | 37 + .../marvel/license/LicenseService.java | 128 ++++ .../org/elasticsearch/marvel/mode/Mode.java | 65 ++ .../src/main/resources/es-plugin.properties | 2 + .../main/resources/marvel_index_template.json | 3 +- .../marvel/MarvelPluginClientTests.java | 45 ++ .../marvel/MarvelPluginTests.java | 92 +++ .../marvel/MarvelVersionTests.java | 24 + .../marvel/agent/AgentUtilsTests.java | 168 +++++ .../indices/IndexCollectorTests.java | 112 +++ .../indices/IndexMarvelDocTests.java | 42 ++ .../agent/exporter/HttpESExporterTests.java | 230 ++++++ .../license/LicenseIntegrationTests.java | 147 ++++ marvel/src/test/resources/log4j.properties | 11 + watcher/README.md | 11 - watcher/lib/client.js | 4 - watcher/lib/create_template.js | 11 - watcher/lib/execute_watcher.js | 40 -- watcher/lib/find_port.js | 13 - watcher/lib/inject_stats.js | 74 -- watcher/lib/load_watcher.js | 26 - watcher/lib/setup_es.js | 71 -- watcher/lib/setup_smtp_server.js | 56 -- watcher/lib/test_no_execute.js | 28 - watcher/package.json | 23 - watcher/test/cluster_status.js | 95 --- watcher/test/cpu_usage.js | 81 --- watcher/test/fielddata.js | 82 --- watcher/test/file_descriptors.js | 82 --- watcher/test/heap_used.js | 81 --- watcher/test/mocha.opts | 2 - watcher/watches/cluster_status.json | 92 --- watcher/watches/cpu_usage.json | 73 -- watcher/watches/fielddata.json | 79 --- watcher/watches/file_descriptors.json | 79 --- watcher/watches/heap_used.json | 73 -- 59 files changed, 3097 insertions(+), 1257 deletions(-) delete mode 100644 .bowerrc delete mode 100644 .jshintignore delete mode 100644 NOTICE.txt delete mode 100644 README.md create mode 100644 marvel/dev-tools/elasticsearch_license_header.txt create mode 100644 marvel/dev-tools/license_header_definition.xml create mode 100644 marvel/pom.xml create mode 100644 marvel/src/main/assemblies/plugin.xml create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/MarvelModule.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/MarvelPlugin.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/MarvelVersion.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/AgentService.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/collector/AbstractCollector.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/collector/Collector.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/collector/CollectorModule.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/collector/indices/IndexCollector.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/collector/indices/IndexMarvelDoc.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/AbstractExporter.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/Exporter.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/ExporterModule.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/HttpESExporter.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/MarvelDoc.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/agent/support/AgentUtils.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/license/LicenseModule.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/license/LicenseService.java create mode 100644 marvel/src/main/java/org/elasticsearch/marvel/mode/Mode.java create mode 100644 marvel/src/main/resources/es-plugin.properties rename watcher/lib/template.json => marvel/src/main/resources/marvel_index_template.json (99%) create mode 100644 marvel/src/test/java/org/elasticsearch/marvel/MarvelPluginClientTests.java create mode 100644 marvel/src/test/java/org/elasticsearch/marvel/MarvelPluginTests.java create mode 100644 marvel/src/test/java/org/elasticsearch/marvel/MarvelVersionTests.java create mode 100644 marvel/src/test/java/org/elasticsearch/marvel/agent/AgentUtilsTests.java create mode 100644 marvel/src/test/java/org/elasticsearch/marvel/agent/collector/indices/IndexCollectorTests.java create mode 100644 marvel/src/test/java/org/elasticsearch/marvel/agent/collector/indices/IndexMarvelDocTests.java create mode 100644 marvel/src/test/java/org/elasticsearch/marvel/agent/exporter/HttpESExporterTests.java create mode 100644 marvel/src/test/java/org/elasticsearch/marvel/license/LicenseIntegrationTests.java create mode 100644 marvel/src/test/resources/log4j.properties delete mode 100644 watcher/README.md delete mode 100644 watcher/lib/client.js delete mode 100644 watcher/lib/create_template.js delete mode 100644 watcher/lib/execute_watcher.js delete mode 100644 watcher/lib/find_port.js delete mode 100644 watcher/lib/inject_stats.js delete mode 100644 watcher/lib/load_watcher.js delete mode 100644 watcher/lib/setup_es.js delete mode 100644 watcher/lib/setup_smtp_server.js delete mode 100644 watcher/lib/test_no_execute.js delete mode 100644 watcher/package.json delete mode 100644 watcher/test/cluster_status.js delete mode 100644 watcher/test/cpu_usage.js delete mode 100644 watcher/test/fielddata.js delete mode 100644 watcher/test/file_descriptors.js delete mode 100644 watcher/test/heap_used.js delete mode 100644 watcher/test/mocha.opts delete mode 100644 watcher/watches/cluster_status.json delete mode 100644 watcher/watches/cpu_usage.json delete mode 100644 watcher/watches/fielddata.json delete mode 100644 watcher/watches/file_descriptors.json delete mode 100644 watcher/watches/heap_used.json diff --git a/.bowerrc b/.bowerrc deleted file mode 100644 index 1d69e778685..00000000000 --- a/.bowerrc +++ /dev/null @@ -1,4 +0,0 @@ -{ - "directory": "./public/bower_components" -} - diff --git a/.jshintignore b/.jshintignore deleted file mode 100644 index a7d5694758a..00000000000 --- a/.jshintignore +++ /dev/null @@ -1,2 +0,0 @@ -vendor -Gruntfile.js diff --git a/NOTICE.txt b/NOTICE.txt deleted file mode 100644 index 86016475acd..00000000000 --- a/NOTICE.txt +++ /dev/null @@ -1,5 +0,0 @@ -ElasticSearch -Copyright 2009-2013 ElasticSearch - -This product includes software developed by The Apache Software -Foundation (http://www.apache.org/). diff --git a/README.md b/README.md deleted file mode 100644 index 884a5b6516b..00000000000 --- a/README.md +++ /dev/null @@ -1,68 +0,0 @@ -# Installing the last release of Marvel - -The easiest way to play/get to know Marvel is to install the latest release version of it. To do so, just run the following command on every node on your cluster (restart node for it to have effect): - -``` -./bin/plugin -i elasticsearch/marvel/latest -``` - -Once done, open up the following url (assuming standard ES config): http://localhost:9200/_plugin/marvel . This will take you to the Overview Dashboard. Use Kibana's Load dashboard menu to navigate to the Cluster Pulse dashboard - - -## I just want to run Sense - -To run Sense, checkout a copy of this repo. You can now double click `sense/index.html` and it will open in your default browser. - -For a cleaner experience, we recommend using something like http://anvilformac.com and point it at the repo checkout. Once done you can access Sense via `http://webserver/sense/`, for example http://elasticsearch-marvel.dev/sense if using Anvil. - -Note: to run the Kibana side of Marvel, you'd need to install grunt as described bellow. - -## Grunt build system (for running the UI from a code checkout) -This grunt-based build system handles Kibana development environment setup for Marvel as well as building, packaging and distribution of the Marvel plugin. Note that you **must** run *grunt setup* before any other tasks as this build system reuses parts of the Kibana build system that must be fetched - -### Installing -You will need node.js+npm and grunt. Node is available via brew, install grunt with the command below. Once grunt is installed you may run grunt tasks to setup your environment and build Marvel - -```npm install -g grunt-cli``` - -```npm install``` - -### Tasks - -```grunt setup``` - -**Run this first.** It will download the right Kibana version to ./vendor/kibana, copies the appropriate config.js to the right place and make any symlinks needed for a proper marvel/kibana environment - -```grunt server``` - -Starts a web server on http://127.0.0.1:5601 pointing at the kibana directory, while also serving custom marvel panels. - -You can use `grunt server --port=5601 --es_host=9200` to control the ports used for kibana and the elasticsearch port used. - -```grunt jshint``` - -Lints code without building - -```grunt build``` - -Merges kibana and marvel code, builds Kibana and the plugin (via mvn) and puts them in ./build. - -```grunt package``` - -Zips and tar+gzips the build in build/packages. Includes grunt build - -```grunt release``` - -Uploads created archives to download.elasticsearch.org/elasticsearch/marvel/marvel-VERSION.extention. You will need S3 credentials in .aws-config.json. Format as so: - -``` -{ - "key":"MY_KEY_HERE", - "secret":"your/long/secret/string" -} - -``` - -To upload the current archive as the "latest" release, use: - -```grunt release --latest``` diff --git a/marvel/dev-tools/elasticsearch_license_header.txt b/marvel/dev-tools/elasticsearch_license_header.txt new file mode 100644 index 00000000000..250c1ffaa0b --- /dev/null +++ b/marvel/dev-tools/elasticsearch_license_header.txt @@ -0,0 +1,14 @@ +ELASTICSEARCH CONFIDENTIAL +__________________ + + [2014] Elasticsearch Incorporated. All Rights Reserved. + +NOTICE: All information contained herein is, and remains +the property of Elasticsearch Incorporated and its suppliers, +if any. The intellectual and technical concepts contained +herein are proprietary to Elasticsearch Incorporated +and its suppliers and may be covered by U.S. and Foreign Patents, +patents in process, and are protected by trade secret or copyright law. +Dissemination of this information or reproduction of this material +is strictly forbidden unless prior written permission is obtained +from Elasticsearch Incorporated. \ No newline at end of file diff --git a/marvel/dev-tools/license_header_definition.xml b/marvel/dev-tools/license_header_definition.xml new file mode 100644 index 00000000000..86857154bcf --- /dev/null +++ b/marvel/dev-tools/license_header_definition.xml @@ -0,0 +1,13 @@ + + + + /* + * + */EOL + + (\s|\t)*/\*.*$ + .*\*/(\s|\t)*$ + false + true + + \ No newline at end of file diff --git a/marvel/pom.xml b/marvel/pom.xml new file mode 100644 index 00000000000..ab0785168ae --- /dev/null +++ b/marvel/pom.xml @@ -0,0 +1,42 @@ + + + + 4.0.0 + + org.elasticsearch + elasticsearch-marvel + Elastic X-Plugins - Marvel + + + org.elasticsearch + x-plugins + 2.0.0-beta1-SNAPSHOT + + + + + + org.elasticsearch + elasticsearch-license-plugin + ${project.version} + provided + + + + + + + + src/main/resources + true + + **/*.properties + marvel_index_template.json + + + + + + diff --git a/marvel/src/main/assemblies/plugin.xml b/marvel/src/main/assemblies/plugin.xml new file mode 100644 index 00000000000..2ccfc875e9c --- /dev/null +++ b/marvel/src/main/assemblies/plugin.xml @@ -0,0 +1,17 @@ + + plugin + + zip + + false + + + / + true + true + + org.elasticsearch:elasticsearch + + + + \ No newline at end of file diff --git a/marvel/src/main/java/org/elasticsearch/marvel/MarvelModule.java b/marvel/src/main/java/org/elasticsearch/marvel/MarvelModule.java new file mode 100644 index 00000000000..4419c2efe44 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/MarvelModule.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel; + +import com.google.common.collect.ImmutableList; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.inject.Scopes; +import org.elasticsearch.common.inject.SpawnModules; +import org.elasticsearch.marvel.agent.AgentService; +import org.elasticsearch.marvel.agent.collector.CollectorModule; +import org.elasticsearch.marvel.agent.exporter.ExporterModule; +import org.elasticsearch.marvel.license.LicenseModule; + +public class MarvelModule extends AbstractModule implements SpawnModules { + + @Override + protected void configure() { + bind(AgentService.class).in(Scopes.SINGLETON); + } + + @Override + public Iterable spawnModules() { + return ImmutableList.of(new LicenseModule(), new CollectorModule(), new ExporterModule()); + } +} diff --git a/marvel/src/main/java/org/elasticsearch/marvel/MarvelPlugin.java b/marvel/src/main/java/org/elasticsearch/marvel/MarvelPlugin.java new file mode 100644 index 00000000000..1530fd150cf --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/MarvelPlugin.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel; + +import com.google.common.collect.ImmutableList; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.component.LifecycleComponent; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.marvel.agent.AgentService; +import org.elasticsearch.marvel.license.LicenseService; +import org.elasticsearch.plugins.AbstractPlugin; +import org.elasticsearch.tribe.TribeService; + +import java.util.Collection; + +public class MarvelPlugin extends AbstractPlugin { + + private static final ESLogger logger = Loggers.getLogger(MarvelPlugin.class); + + public static final String NAME = "marvel"; + public static final String ENABLED = NAME + ".enabled"; + + private final boolean enabled; + + public MarvelPlugin(Settings settings) { + this.enabled = marvelEnabled(settings); + } + + @Override + public String name() { + return NAME; + } + + @Override + public String description() { + return "Elasticsearch Marvel"; + } + + public boolean isEnabled() { + return enabled; + } + + @Override + public Collection> modules() { + if (!enabled) { + return ImmutableList.of(); + } + return ImmutableList.>of(MarvelModule.class); + } + + @Override + public Collection> services() { + if (!enabled) { + return ImmutableList.of(); + } + return ImmutableList.>of(LicenseService.class, AgentService.class); + } + + public static boolean marvelEnabled(Settings settings) { + String tribe = settings.get(TribeService.TRIBE_NAME); + if (tribe != null) { + logger.trace("marvel cannot be started on tribe node [{}]", tribe); + return false; + } + + if (!"node".equals(settings.get(Client.CLIENT_TYPE_SETTING))) { + logger.trace("marvel cannot be started on a transport client"); + return false; + } + return settings.getAsBoolean(ENABLED, true); + } +} diff --git a/marvel/src/main/java/org/elasticsearch/marvel/MarvelVersion.java b/marvel/src/main/java/org/elasticsearch/marvel/MarvelVersion.java new file mode 100644 index 00000000000..f156e93c424 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/MarvelVersion.java @@ -0,0 +1,204 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.license.plugin.LicenseVersion; + +import java.io.IOException; +import java.io.Serializable; + +public class MarvelVersion implements Serializable { + + // The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is Beta/RC indicator + // AA values below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release + // the (internal) format of the id is there so we can easily do after/before checks on the id + + public static final int V_2_0_0_Beta1_ID = /*00*/2000001; + public static final MarvelVersion V_2_0_0_Beta1 = new MarvelVersion(V_2_0_0_Beta1_ID, true, Version.V_2_0_0_Beta1, LicenseVersion.V_1_0_0); + + public static final MarvelVersion CURRENT = V_2_0_0_Beta1; + + public static MarvelVersion readVersion(StreamInput in) throws IOException { + return fromId(in.readVInt()); + } + + public static MarvelVersion fromId(int id) { + switch (id) { + case V_2_0_0_Beta1_ID: + return V_2_0_0_Beta1; + default: + return new MarvelVersion(id, null, Version.CURRENT, LicenseVersion.CURRENT); + } + } + + public static void writeVersion(MarvelVersion version, StreamOutput out) throws IOException { + out.writeVInt(version.id); + } + + /** + * Returns the smallest version between the 2. + */ + public static MarvelVersion smallest(MarvelVersion version1, MarvelVersion version2) { + return version1.id < version2.id ? version1 : version2; + } + + /** + * Returns the version given its string representation, current version if the argument is null or empty + */ + public static MarvelVersion fromString(String version) { + if (!Strings.hasLength(version)) { + return MarvelVersion.CURRENT; + } + + String[] parts = version.split("[\\.-]"); + if (parts.length < 3 || parts.length > 4) { + throw new IllegalArgumentException("the version needs to contain major, minor and revision, and optionally the build"); + } + + try { + //we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo + int major = Integer.parseInt(parts[0]) * 1000000; + int minor = Integer.parseInt(parts[1]) * 10000; + int revision = Integer.parseInt(parts[2]) * 100; + + int build = 99; + if (parts.length == 4) { + String buildStr = parts[3]; + if (buildStr.startsWith("beta")) { + build = Integer.parseInt(buildStr.substring(4)); + } + if (buildStr.startsWith("rc")) { + build = Integer.parseInt(buildStr.substring(2)) + 50; + } + } + + return fromId(major + minor + revision + build); + + } catch(NumberFormatException e) { + throw new IllegalArgumentException("unable to parse version " + version, e); + } + } + + public final int id; + public final byte major; + public final byte minor; + public final byte revision; + public final byte build; + public final Boolean snapshot; + public final Version minEsCompatibilityVersion; + public final LicenseVersion minLicenseCompatibilityVersion; + + MarvelVersion(int id, @Nullable Boolean snapshot, Version minEsCompatibilityVersion, LicenseVersion minLicenseCompatibilityVersion) { + this.id = id; + this.major = (byte) ((id / 1000000) % 100); + this.minor = (byte) ((id / 10000) % 100); + this.revision = (byte) ((id / 100) % 100); + this.build = (byte) (id % 100); + this.snapshot = snapshot; + this.minEsCompatibilityVersion = minEsCompatibilityVersion; + this.minLicenseCompatibilityVersion = minLicenseCompatibilityVersion; + } + + public boolean snapshot() { + return snapshot != null && snapshot; + } + + public boolean after(MarvelVersion version) { + return version.id < id; + } + + public boolean onOrAfter(MarvelVersion version) { + return version.id <= id; + } + + public boolean before(MarvelVersion version) { + return version.id > id; + } + + public boolean onOrBefore(MarvelVersion version) { + return version.id >= id; + } + + public boolean compatibleWith(MarvelVersion version) { + return version.onOrAfter(minimumCompatibilityVersion()); + } + + public boolean compatibleWith(Version esVersion) { + return esVersion.onOrAfter(minEsCompatibilityVersion); + } + + /** + * Returns the minimum compatible version based on the current + * version. Ie a node needs to have at least the return version in order + * to communicate with a node running the current version. The returned version + * is in most of the cases the smallest major version release unless the current version + * is a beta or RC release then the version itself is returned. + */ + public MarvelVersion minimumCompatibilityVersion() { + return MarvelVersion.smallest(this, fromId(major * 1000000 + 99)); + } + + /** + * @return The minimum elasticsearch version this marvel version is compatible with. + */ + public Version minimumEsCompatiblityVersion() { + return minEsCompatibilityVersion; + } + + /** + * @return The minimum license plugin version this marvel version is compatible with. + */ + public LicenseVersion minimumLicenseCompatibilityVersion() { + return minLicenseCompatibilityVersion; + } + + + /** + * Just the version number (without -SNAPSHOT if snapshot). + */ + public String number() { + StringBuilder sb = new StringBuilder(); + sb.append(major).append('.').append(minor).append('.').append(revision); + if (build < 50) { + sb.append("-beta").append(build); + } else if (build < 99) { + sb.append("-rc").append(build - 50); + } + return sb.toString(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(number()); + if (snapshot()) { + sb.append("-SNAPSHOT"); + } + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + MarvelVersion that = (MarvelVersion) o; + + if (id != that.id) return false; + + return true; + } + + @Override + public int hashCode() { + return id; + } +} \ No newline at end of file diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/AgentService.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/AgentService.java new file mode 100644 index 00000000000..b7b3dbb6c6a --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/AgentService.java @@ -0,0 +1,215 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent; + +import com.google.common.collect.ImmutableSet; +import org.elasticsearch.cluster.settings.ClusterDynamicSettings; +import org.elasticsearch.cluster.settings.DynamicSettings; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.marvel.agent.collector.Collector; +import org.elasticsearch.marvel.agent.exporter.Exporter; +import org.elasticsearch.marvel.agent.exporter.MarvelDoc; +import org.elasticsearch.marvel.license.LicenseService; +import org.elasticsearch.node.settings.NodeSettingsService; + +import java.util.Collection; +import java.util.Set; + +public class AgentService extends AbstractLifecycleComponent implements NodeSettingsService.Listener { + + private static final String SETTINGS_BASE = "marvel.agent."; + + public static final String SETTINGS_INTERVAL = SETTINGS_BASE + "interval"; + public static final String SETTINGS_INDICES = SETTINGS_BASE + "indices"; + public static final String SETTINGS_ENABLED = SETTINGS_BASE + "enabled"; + + public static final String SETTINGS_STATS_TIMEOUT = SETTINGS_BASE + "stats.timeout"; + public static final String SETTINGS_INDICES_STATS_TIMEOUT = SETTINGS_BASE + "stats.indices.timeout"; + + private volatile ExportingWorker exportingWorker; + + private volatile Thread workerThread; + private volatile long samplingInterval; + volatile private String[] indicesToExport = Strings.EMPTY_ARRAY; + + private volatile TimeValue indicesStatsTimeout; + private volatile TimeValue clusterStatsTimeout; + + private final Collection collectors; + private final Collection exporters; + + @Inject + public AgentService(Settings settings, NodeSettingsService nodeSettingsService, + @ClusterDynamicSettings DynamicSettings dynamicSettings, + LicenseService licenseService, + Set collectors, Set exporters) { + super(settings); + this.samplingInterval = settings.getAsTime(SETTINGS_INTERVAL, TimeValue.timeValueSeconds(10)).millis(); + this.indicesToExport = settings.getAsArray(SETTINGS_INDICES, this.indicesToExport, true); + + TimeValue statsTimeout = settings.getAsTime(SETTINGS_STATS_TIMEOUT, TimeValue.timeValueMinutes(10)); + indicesStatsTimeout = settings.getAsTime(SETTINGS_INDICES_STATS_TIMEOUT, statsTimeout); + + if (settings.getAsBoolean(SETTINGS_ENABLED, true)) { + this.collectors = ImmutableSet.copyOf(collectors); + this.exporters = ImmutableSet.copyOf(exporters); + } else { + this.collectors = ImmutableSet.of(); + this.exporters = ImmutableSet.of(); + logger.info("collecting disabled by settings"); + } + + nodeSettingsService.addListener(this); + dynamicSettings.addDynamicSetting(SETTINGS_INTERVAL); + dynamicSettings.addDynamicSetting(SETTINGS_INDICES + ".*"); // array settings + dynamicSettings.addDynamicSetting(SETTINGS_STATS_TIMEOUT); + dynamicSettings.addDynamicSetting(SETTINGS_INDICES_STATS_TIMEOUT); + + logger.trace("marvel is running in [{}] mode", licenseService.mode()); + } + + protected void applyIntervalSettings() { + if (samplingInterval <= 0) { + logger.info("data sampling is disabled due to interval settings [{}]", samplingInterval); + if (workerThread != null) { + + // notify worker to stop on its leisure, not to disturb an exporting operation + exportingWorker.closed = true; + + exportingWorker = null; + workerThread = null; + } + } else if (workerThread == null || !workerThread.isAlive()) { + + exportingWorker = new ExportingWorker(); + workerThread = new Thread(exportingWorker, EsExecutors.threadName(settings, "marvel.exporters")); + workerThread.setDaemon(true); + workerThread.start(); + + } + } + + @Override + protected void doStart() { + if (exporters.size() == 0) { + return; + } + + for (Collector collector : collectors) { + collector.start(); + } + + for (Exporter exporter : exporters) { + exporter.start(); + } + + applyIntervalSettings(); + } + + @Override + protected void doStop() { + if (exporters.size() == 0) { + return; + } + if (workerThread != null && workerThread.isAlive()) { + exportingWorker.closed = true; + workerThread.interrupt(); + try { + workerThread.join(60000); + } catch (InterruptedException e) { + // we don't care... + } + + } + + for (Collector collector : collectors) { + collector.stop(); + } + + for (Exporter exporter : exporters) { + exporter.stop(); + } + } + + @Override + protected void doClose() { + for (Collector collector : collectors) { + collector.close(); + } + + for (Exporter exporter : exporters) { + exporter.close(); + } + } + + // used for testing + public Collection getExporters() { + return exporters; + } + + @Override + public void onRefreshSettings(Settings settings) { + TimeValue newSamplingInterval = settings.getAsTime(SETTINGS_INTERVAL, null); + if (newSamplingInterval != null && newSamplingInterval.millis() != samplingInterval) { + logger.info("sampling interval updated to [{}]", newSamplingInterval); + samplingInterval = newSamplingInterval.millis(); + applyIntervalSettings(); + } + + String[] indices = settings.getAsArray(SETTINGS_INDICES, null, true); + if (indices != null) { + logger.info("sampling indices updated to [{}]", Strings.arrayToCommaDelimitedString(indices)); + indicesToExport = indices; + } + + TimeValue statsTimeout = settings.getAsTime(SETTINGS_STATS_TIMEOUT, TimeValue.timeValueMinutes(10)); + TimeValue newTimeValue = settings.getAsTime(SETTINGS_INDICES_STATS_TIMEOUT, statsTimeout); + if (!indicesStatsTimeout.equals(newTimeValue)) { + logger.info("indices stats timeout updated to [{}]", newTimeValue); + indicesStatsTimeout = newTimeValue; + + } + } + + class ExportingWorker implements Runnable { + + volatile boolean closed = false; + + @Override + public void run() { + while (!closed) { + // sleep first to allow node to complete initialization before collecting the first start + try { + Thread.sleep(samplingInterval); + if (closed) { + continue; + } + + for (Collector collector : collectors) { + logger.trace("collecting {}", collector.name()); + Collection results = collector.collect(); + + if (results != null && !results.isEmpty()) { + for (Exporter exporter : exporters) { + exporter.export(results); + } + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (Throwable t) { + logger.error("Background thread had an uncaught exception:", t); + } + } + logger.debug("worker shutdown"); + } + } +} diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/AbstractCollector.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/AbstractCollector.java new file mode 100644 index 00000000000..d2918ad9220 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/AbstractCollector.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.collector; + + +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.marvel.agent.exporter.MarvelDoc; + +import java.util.Collection; + +public abstract class AbstractCollector extends AbstractLifecycleComponent implements Collector { + + private final String name; + + protected final ClusterService clusterService; + + @Inject + public AbstractCollector(Settings settings, String name, ClusterService clusterService) { + super(settings); + this.name = name; + this.clusterService = clusterService; + } + + @Override + public String name() { + return name; + } + + @Override + public T start() { + logger.debug("starting collector [{}]", name()); + return super.start(); + } + + @Override + protected void doStart() { + } + + /** + * Indicates if the current collector should + * be executed on master node only. + */ + protected boolean masterOnly() { + return false; + } + + @Override + public Collection collect() { + if (masterOnly() && !clusterService.state().nodes().localNodeMaster()) { + logger.trace("collector [{}] runs on master only", name()); + return null; + } + + try { + return doCollect(); + } catch (ElasticsearchTimeoutException e) { + logger.error("collector [{}] timed out when collecting data"); + } catch (Exception e) { + logger.error("collector [{}] throws exception when collecting data", e, name()); + } + return null; + } + + protected abstract Collection doCollect() throws Exception; + + @Override + public T stop() { + logger.debug("stopping collector [{}]", name()); + return super.stop(); + } + + @Override + protected void doStop() { + } + + @Override + public void close() { + logger.trace("closing collector [{}]", name()); + super.close(); + } + + @Override + protected void doClose() { + } +} \ No newline at end of file diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/Collector.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/Collector.java new file mode 100644 index 00000000000..6e077960020 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/Collector.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.collector; + +import org.elasticsearch.common.component.LifecycleComponent; +import org.elasticsearch.marvel.agent.exporter.MarvelDoc; + +import java.util.Collection; + +public interface Collector extends LifecycleComponent { + + String name(); + + Collection collect(); +} \ No newline at end of file diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/CollectorModule.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/CollectorModule.java new file mode 100644 index 00000000000..25b71d3f5a1 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/CollectorModule.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.collector; + +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.multibindings.Multibinder; +import org.elasticsearch.marvel.agent.collector.indices.IndexCollector; + +import java.util.HashSet; +import java.util.Set; + +public class CollectorModule extends AbstractModule { + + private final Set> collectors = new HashSet<>(); + + public CollectorModule() { + // Registers default collectors + registerCollector(IndexCollector.class); + } + + @Override + protected void configure() { + Multibinder binder = Multibinder.newSetBinder(binder(), Collector.class); + for (Class collector : collectors) { + bind(collector).asEagerSingleton(); + binder.addBinding().to(collector); + } + } + + public void registerCollector(Class collector) { + collectors.add(collector); + } +} \ No newline at end of file diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/indices/IndexCollector.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/indices/IndexCollector.java new file mode 100644 index 00000000000..6df746e713d --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/indices/IndexCollector.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.collector.indices; + +import com.google.common.collect.ImmutableList; +import org.elasticsearch.action.admin.indices.stats.IndexStats; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.marvel.agent.collector.AbstractCollector; +import org.elasticsearch.marvel.agent.exporter.MarvelDoc; + +import java.util.Collection; + +/** + * Collector for indices statistics. + * + * This collector runs on the master node only and collect a {@link IndexMarvelDoc} document + * for each existing index in the cluster. + */ +public class IndexCollector extends AbstractCollector { + + public static final String NAME = "index-collector"; + protected static final String TYPE = "marvel_index"; + + private final ClusterName clusterName; + private final Client client; + + @Inject + public IndexCollector(Settings settings, ClusterService clusterService, ClusterName clusterName, Client client) { + super(settings, NAME, clusterService); + this.client = client; + this.clusterName = clusterName; + } + + @Override + protected boolean masterOnly() { + return true; + } + + @Override + protected Collection doCollect() throws Exception { + ImmutableList.Builder results = ImmutableList.builder(); + + IndicesStatsResponse indicesStats = client.admin().indices().prepareStats().all() + .setStore(true) + .setIndexing(true) + .setDocs(true) + .get(); + + long timestamp = System.currentTimeMillis(); + for (IndexStats indexStats : indicesStats.getIndices().values()) { + results.add(buildMarvelDoc(clusterName.value(), TYPE, timestamp, indexStats)); + } + return results.build(); + } + + protected MarvelDoc buildMarvelDoc(String clusterName, String type, long timestamp, IndexStats indexStats) { + return IndexMarvelDoc.createMarvelDoc(clusterName, type, timestamp, + indexStats.getIndex(), + indexStats.getTotal().getDocs().getCount(), + indexStats.getTotal().getStore().sizeInBytes(), indexStats.getTotal().getStore().throttleTime().millis(), + indexStats.getTotal().getIndexing().getTotal().getThrottleTimeInMillis()); + } +} diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/indices/IndexMarvelDoc.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/indices/IndexMarvelDoc.java new file mode 100644 index 00000000000..890f9af7751 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/collector/indices/IndexMarvelDoc.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.collector.indices; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.marvel.agent.exporter.MarvelDoc; + +import java.io.IOException; +import java.util.concurrent.TimeUnit; + +public class IndexMarvelDoc extends MarvelDoc { + + private final String index; + private final Docs docs; + private final Store store; + private final Indexing indexing; + + public IndexMarvelDoc(String clusterName, String type, long timestamp, + String index, Docs docs, Store store, Indexing indexing) { + super(clusterName, type, timestamp); + this.index = index; + this.docs = docs; + this.store = store; + this.indexing = indexing; + } + + @Override + public IndexMarvelDoc payload() { + return this; + } + + public String getIndex() { + return index; + } + + public Docs getDocs() { + return docs; + } + + public Store getStore() { + return store; + } + + public Indexing getIndexing() { + return indexing; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + super.toXContent(builder, params); + builder.field(Fields.INDEX, index); + if (docs != null) { + docs.toXContent(builder, params); + } + if (store != null) { + store.toXContent(builder, params); + } + if (indexing != null) { + indexing.toXContent(builder, params); + } + builder.endObject(); + return builder; + } + + public static IndexMarvelDoc createMarvelDoc(String clusterName, String type, long timestamp, + String index, long docsCount, long storeSizeInBytes, long storeThrottleTimeInMillis, long indexingThrottleTimeInMillis) { + return new IndexMarvelDoc(clusterName, type, timestamp, index, + new Docs(docsCount), + new Store(storeSizeInBytes, storeThrottleTimeInMillis), + new Indexing(indexingThrottleTimeInMillis)); + } + + static final class Fields { + static final XContentBuilderString INDEX = new XContentBuilderString("index"); + } + + static class Docs implements ToXContent { + + private final long count; + + Docs(long count) { + this.count = count; + } + + public long getCount() { + return count; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(Fields.DOCS); + builder.field(Fields.COUNT, count); + builder.endObject(); + return builder; + } + + static final class Fields { + static final XContentBuilderString DOCS = new XContentBuilderString("docs"); + static final XContentBuilderString COUNT = new XContentBuilderString("count"); + } + } + + static class Store implements ToXContent { + + private final long sizeInBytes; + private final long throttleTimeInMillis; + + public Store(long sizeInBytes, long throttleTimeInMillis) { + this.sizeInBytes = sizeInBytes; + this.throttleTimeInMillis = throttleTimeInMillis; + } + + public long getSizeInBytes() { + return sizeInBytes; + } + + public long getThrottleTimeInMillis() { + return throttleTimeInMillis; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(Fields.STORE); + builder.field(Fields.SIZE_IN_BYTES, sizeInBytes); + builder.timeValueField(Fields.THROTTLE_TIME_IN_MILLIS, Fields.THROTTLE_TIME, new TimeValue(throttleTimeInMillis, TimeUnit.MILLISECONDS)); + builder.endObject(); + return builder; + } + + static final class Fields { + static final XContentBuilderString STORE = new XContentBuilderString("store"); + static final XContentBuilderString SIZE_IN_BYTES = new XContentBuilderString("size_in_bytes"); + static final XContentBuilderString THROTTLE_TIME = new XContentBuilderString("throttle_time"); + static final XContentBuilderString THROTTLE_TIME_IN_MILLIS = new XContentBuilderString("throttle_time_in_millis"); + } + } + + static class Indexing implements ToXContent { + + private final long throttleTimeInMillis; + + public Indexing(long throttleTimeInMillis) { + this.throttleTimeInMillis = throttleTimeInMillis; + } + + public long getThrottleTimeInMillis() { + return throttleTimeInMillis; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(Fields.INDEXING); + builder.timeValueField(Fields.THROTTLE_TIME_IN_MILLIS, Fields.THROTTLE_TIME, new TimeValue(throttleTimeInMillis, TimeUnit.MILLISECONDS)); + builder.endObject(); + return builder; + } + + static final class Fields { + static final XContentBuilderString INDEXING = new XContentBuilderString("indexing"); + static final XContentBuilderString THROTTLE_TIME = new XContentBuilderString("throttle_time"); + static final XContentBuilderString THROTTLE_TIME_IN_MILLIS = new XContentBuilderString("throttle_time_in_millis"); + } + } +} + diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/AbstractExporter.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/AbstractExporter.java new file mode 100644 index 00000000000..1002774d015 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/AbstractExporter.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.exporter; + + +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; + +import java.util.Collection; + +public abstract class AbstractExporter extends AbstractLifecycleComponent implements Exporter { + + private final String name; + + protected final ClusterService clusterService; + + @Inject + public AbstractExporter(Settings settings, String name, ClusterService clusterService) { + super(settings); + this.name = name; + this.clusterService = clusterService; + } + + @Override + public String name() { + return name; + } + + @Override + public T start() { + logger.debug("starting exporter [{}]", name()); + return super.start(); + } + + @Override + protected void doStart() { + } + + protected boolean masterOnly() { + return false; + } + + @Override + public void export(Collection marvelDocs) { + if (masterOnly() && !clusterService.state().nodes().localNodeMaster()) { + logger.trace("exporter [{}] runs on master only", name()); + return; + } + + if (marvelDocs == null) { + logger.debug("no objects to export for [{}]", name()); + return; + } + + try { + doExport(marvelDocs); + } catch (Exception e) { + logger.error("export [{}] throws exception when exporting data", e, name()); + } + } + + protected abstract void doExport(Collection marvelDocs) throws Exception; + + @Override + public T stop() { + logger.debug("stopping exporter [{}]", name()); + return super.stop(); + } + + @Override + protected void doStop() { + } + + @Override + public void close() { + logger.trace("closing exporter [{}]", name()); + super.close(); + } + + @Override + protected void doClose() { + } +} \ No newline at end of file diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/Exporter.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/Exporter.java new file mode 100644 index 00000000000..c7688aa3ebb --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/Exporter.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.exporter; + +import org.elasticsearch.common.component.LifecycleComponent; + +import java.util.Collection; + +public interface Exporter extends LifecycleComponent { + + String name(); + + void export(Collection marvelDocs); +} \ No newline at end of file diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/ExporterModule.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/ExporterModule.java new file mode 100644 index 00000000000..23ac241128e --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/ExporterModule.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.exporter; + +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.multibindings.Multibinder; + +import java.util.HashSet; +import java.util.Set; + +public class ExporterModule extends AbstractModule { + + private final Set> exporters = new HashSet<>(); + + public ExporterModule() { + // Registers default exporter + registerExporter(HttpESExporter.class); + } + + @Override + protected void configure() { + Multibinder binder = Multibinder.newSetBinder(binder(), Exporter.class); + for (Class exporter : exporters) { + bind(exporter).asEagerSingleton(); + binder.addBinding().to(exporter); + } + } + + public void registerExporter(Class exporter) { + exporters.add(exporter); + } +} diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/HttpESExporter.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/HttpESExporter.java new file mode 100644 index 00000000000..208a7539f30 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/HttpESExporter.java @@ -0,0 +1,652 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.exporter; + +import com.google.common.io.ByteStreams; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.settings.ClusterDynamicSettings; +import org.elasticsearch.cluster.settings.DynamicSettings; +import org.elasticsearch.common.Base64; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.smile.SmileXContent; +import org.elasticsearch.env.Environment; +import org.elasticsearch.http.HttpServer; +import org.elasticsearch.marvel.agent.support.AgentUtils; +import org.elasticsearch.node.Node; +import org.elasticsearch.node.service.NodeService; +import org.elasticsearch.node.settings.NodeSettingsService; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + +import javax.net.ssl.*; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.KeyStore; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Map; + +public class HttpESExporter extends AbstractExporter implements NodeSettingsService.Listener { + + private static final String NAME = "es_exporter"; + + private static final String SETTINGS_PREFIX = "marvel.agent.exporter.es."; + public static final String SETTINGS_HOSTS = SETTINGS_PREFIX + "hosts"; + public static final String SETTINGS_INDEX_PREFIX = SETTINGS_PREFIX + "index.prefix"; + public static final String SETTINGS_INDEX_TIME_FORMAT = SETTINGS_PREFIX + "index.timeformat"; + public static final String SETTINGS_TIMEOUT = SETTINGS_PREFIX + "timeout"; + public static final String SETTINGS_READ_TIMEOUT = SETTINGS_PREFIX + "read_timeout"; + + // es level timeout used when checking and writing templates (used to speed up tests) + public static final String SETTINGS_CHECK_TEMPLATE_TIMEOUT = SETTINGS_PREFIX + ".template.master_timeout"; + + // es level timeout used for bulk indexing (used to speed up tests) + public static final String SETTINGS_BULK_TIMEOUT = SETTINGS_PREFIX + ".bulk.timeout"; + + volatile String[] hosts; + volatile boolean boundToLocalNode = false; + final String indexPrefix; + final DateTimeFormatter indexTimeFormatter; + volatile int timeoutInMillis; + volatile int readTimeoutInMillis; + + + /** https support * */ + final SSLSocketFactory sslSocketFactory; + volatile boolean hostnameVerification; + + final ClusterService clusterService; + final ClusterName clusterName; + final NodeService nodeService; + final Environment environment; + + HttpServer httpServer; + final boolean httpEnabled; + + @Nullable + final TimeValue templateCheckTimeout; + @Nullable + final TimeValue bulkTimeout; + + volatile boolean checkedAndUploadedIndexTemplate = false; + + final ConnectionKeepAliveWorker keepAliveWorker; + Thread keepAliveThread; + + @Inject + public HttpESExporter(Settings settings, ClusterService clusterService, ClusterName clusterName, + @ClusterDynamicSettings DynamicSettings dynamicSettings, + NodeSettingsService nodeSettingsService, + NodeService nodeService, Environment environment) { + super(settings, NAME, clusterService); + + this.clusterService = clusterService; + + this.clusterName = clusterName; + this.nodeService = nodeService; + this.environment = environment; + + httpEnabled = settings.getAsBoolean(Node.HTTP_ENABLED, true); + + hosts = settings.getAsArray(SETTINGS_HOSTS, Strings.EMPTY_ARRAY); + + validateHosts(hosts); + + indexPrefix = settings.get(SETTINGS_INDEX_PREFIX, ".marvel"); + String indexTimeFormat = settings.get(SETTINGS_INDEX_TIME_FORMAT, "YYYY.MM.dd"); + indexTimeFormatter = DateTimeFormat.forPattern(indexTimeFormat).withZoneUTC(); + + timeoutInMillis = (int) settings.getAsTime(SETTINGS_TIMEOUT, new TimeValue(6000)).millis(); + readTimeoutInMillis = (int) settings.getAsTime(SETTINGS_READ_TIMEOUT, new TimeValue(timeoutInMillis * 10)).millis(); + + templateCheckTimeout = settings.getAsTime(SETTINGS_CHECK_TEMPLATE_TIMEOUT, null); + bulkTimeout = settings.getAsTime(SETTINGS_CHECK_TEMPLATE_TIMEOUT, null); + + keepAliveWorker = new ConnectionKeepAliveWorker(); + + dynamicSettings.addDynamicSetting(SETTINGS_HOSTS); + dynamicSettings.addDynamicSetting(SETTINGS_HOSTS + ".*"); + dynamicSettings.addDynamicSetting(SETTINGS_TIMEOUT); + dynamicSettings.addDynamicSetting(SETTINGS_READ_TIMEOUT); + dynamicSettings.addDynamicSetting(SETTINGS_SSL_HOSTNAME_VERIFICATION); + nodeSettingsService.addListener(this); + + if (!settings.getByPrefix(SETTINGS_SSL_PREFIX).getAsMap().isEmpty()) { + sslSocketFactory = createSSLSocketFactory(settings); + } else { + logger.trace("no ssl context configured"); + sslSocketFactory = null; + } + hostnameVerification = settings.getAsBoolean(SETTINGS_SSL_HOSTNAME_VERIFICATION, true); + + logger.debug("initialized with targets: {}, index prefix [{}], index time format [{}]", + AgentUtils.santizeUrlPwds(Strings.arrayToCommaDelimitedString(hosts)), indexPrefix, indexTimeFormat); + } + + static private void validateHosts(String[] hosts) { + for (String host : hosts) { + try { + AgentUtils.parseHostWithPath(host, ""); + } catch (URISyntaxException e) { + throw new RuntimeException("[marvel.agent.exporter] invalid host: [" + AgentUtils.santizeUrlPwds(host) + "]." + + " error: [" + AgentUtils.santizeUrlPwds(e.getMessage()) + "]"); + } catch (MalformedURLException e) { + throw new RuntimeException("[marvel.agent.exporter] invalid host: [" + AgentUtils.santizeUrlPwds(host) + "]." + + " error: [" + AgentUtils.santizeUrlPwds(e.getMessage()) + "]"); + } + } + } + + @Override + public String name() { + return NAME; + } + + @Inject(optional = true) + public void setHttpServer(HttpServer httpServer) { + this.httpServer = httpServer; + } + + private HttpURLConnection openExportingConnection() { + logger.trace("setting up an export connection"); + String queryString = ""; + if (bulkTimeout != null) { + queryString = "?master_timeout=" + bulkTimeout; + } + HttpURLConnection conn = openAndValidateConnection("POST", getIndexName() + "/_bulk" + queryString, XContentType.SMILE.restContentType()); + if (conn != null && (keepAliveThread == null || !keepAliveThread.isAlive())) { + // start keep alive upon successful connection if not there. + initKeepAliveThread(); + } + return conn; + } + + private void addMarvelDocToConnection(HttpURLConnection conn, + MarvelDoc marvelDoc) throws IOException { + OutputStream os = conn.getOutputStream(); + // TODO: find a way to disable builder's substream flushing or something neat solution + XContentBuilder builder = XContentFactory.smileBuilder(); + builder.startObject().startObject("index") + .field("_type", marvelDoc.type()) + .endObject().endObject(); + builder.close(); + builder.bytes().writeTo(os); + os.write(SmileXContent.smileXContent.streamSeparator()); + + builder = XContentFactory.smileBuilder(); + builder.humanReadable(false); + marvelDoc.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.close(); + builder.bytes().writeTo(os); + os.write(SmileXContent.smileXContent.streamSeparator()); + } + + @SuppressWarnings("unchecked") + private void sendCloseExportingConnection(HttpURLConnection conn) throws IOException { + logger.trace("sending content"); + OutputStream os = conn.getOutputStream(); + os.close(); + if (conn.getResponseCode() != 200) { + logConnectionError("remote target didn't respond with 200 OK", conn); + return; + } + + InputStream inputStream = conn.getInputStream(); + try (XContentParser parser = XContentType.SMILE.xContent().createParser(inputStream)) { + Map response = parser.map(); + if (response.get("items") != null) { + ArrayList list = (ArrayList) response.get("items"); + for (Object itemObject : list) { + Map actions = (Map) itemObject; + for (String actionKey : actions.keySet()) { + Map action = (Map) actions.get(actionKey); + if (action.get("error") != null) { + logger.error("{} failure (index:[{}] type: [{}]): {}", actionKey, action.get("_index"), action.get("_type"), action.get("error")); + } + } + } + } + } + } + + @Override + protected void doExport(Collection marvelDocs) throws Exception { + HttpURLConnection conn = openExportingConnection(); + if (conn == null) { + return; + } + try { + for (MarvelDoc marvelDoc : marvelDocs) { + addMarvelDocToConnection(conn, marvelDoc); + } + sendCloseExportingConnection(conn); + } catch (IOException e) { + logger.error("error sending data to [{}]: {}", AgentUtils.santizeUrlPwds(conn.getURL()), AgentUtils.santizeUrlPwds(ExceptionsHelper.detailedMessage(e))); + } + } + + @Override + protected void doStart() { + // not initializing keep alive worker here but rather upon first exporting. + // In the case we are sending metrics to the same ES as where the plugin is hosted + // we want to give it some time to start. + } + + + @Override + protected void doStop() { + if (keepAliveThread != null && keepAliveThread.isAlive()) { + keepAliveWorker.closed = true; + keepAliveThread.interrupt(); + try { + keepAliveThread.join(6000); + } catch (InterruptedException e) { + // don't care. + } + } + } + + @Override + protected void doClose() { + } + + // used for testing + String[] getHosts() { + return hosts; + } + + private String getIndexName() { + return indexPrefix + "-" + indexTimeFormatter.print(System.currentTimeMillis()); + + } + + /** + * open a connection to any host, validating it has the template installed if needed + * + * @return a url connection to the selected host or null if no current host is available. + */ + private HttpURLConnection openAndValidateConnection(String method, String path) { + return openAndValidateConnection(method, path, null); + } + + /** + * open a connection to any host, validating it has the template installed if needed + * + * @return a url connection to the selected host or null if no current host is available. + */ + private HttpURLConnection openAndValidateConnection(String method, String path, String contentType) { + if (hosts.length == 0) { + // Due to how Guice injection works and because HttpServer can be optional, + // we can't be 100% sure that the HttpServer is created when the ESExporter + // instance is created. This is specially true in integration tests. + // So if HttpServer is enabled in settings we can safely use the NodeService + // to retrieve the bound address. + BoundTransportAddress boundAddress = null; + if (httpEnabled) { + if ((httpServer != null) && (httpServer.lifecycleState() == Lifecycle.State.STARTED)) { + logger.debug("deriving host setting from httpServer"); + boundAddress = httpServer.info().address(); + } else if (nodeService.info().getHttp() != null) { + logger.debug("deriving host setting from node info API"); + boundAddress = nodeService.info().getHttp().address(); + } + } else { + logger.warn("http server is not enabled no hosts are manually configured"); + return null; + } + + String[] extractedHosts = AgentUtils.extractHostsFromAddress(boundAddress, logger); + if (extractedHosts == null || extractedHosts.length == 0) { + return null; + } + hosts = extractedHosts; + logger.trace("auto-resolved hosts to {}", extractedHosts); + boundToLocalNode = true; + } + + // it's important to have boundToLocalNode persistent to prevent calls during shutdown (causing ugly exceptions) + if (boundToLocalNode && (httpServer != null) && (httpServer.lifecycleState() != Lifecycle.State.STARTED)) { + logger.debug("local node http server is not started. can't connect"); + return null; + } + + // out of for to move faulty hosts to the end + int hostIndex = 0; + try { + for (; hostIndex < hosts.length; hostIndex++) { + String host = hosts[hostIndex]; + if (!checkedAndUploadedIndexTemplate) { + // check templates first on the host + checkedAndUploadedIndexTemplate = checkAndUploadIndexTemplate(host); + if (!checkedAndUploadedIndexTemplate) { + continue; + } + } + HttpURLConnection connection = openConnection(host, method, path, contentType); + if (connection != null) { + return connection; + } + // failed hosts - reset template check , someone may have restarted the target cluster and deleted + // it's data folder. be safe. + checkedAndUploadedIndexTemplate = false; + } + } finally { + if (hostIndex > 0 && hostIndex < hosts.length) { + logger.debug("moving [{}] failed hosts to the end of the list", hostIndex); + String[] newHosts = new String[hosts.length]; + System.arraycopy(hosts, hostIndex, newHosts, 0, hosts.length - hostIndex); + System.arraycopy(hosts, 0, newHosts, hosts.length - hostIndex, hostIndex); + hosts = newHosts; + logger.debug("preferred target host is now [{}]", AgentUtils.santizeUrlPwds(hosts[0])); + } + } + + logger.error("could not connect to any configured elasticsearch instances: [{}]", AgentUtils.santizeUrlPwds(Strings.arrayToCommaDelimitedString(hosts))); + + return null; + + } + + /** open a connection to the given hosts, returning null when not successful * */ + private HttpURLConnection openConnection(String host, String method, String path, @Nullable String contentType) { + try { + final URL url = AgentUtils.parseHostWithPath(host, path); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + + if (conn instanceof HttpsURLConnection && sslSocketFactory != null) { + HttpsURLConnection httpsConn = (HttpsURLConnection) conn; + httpsConn.setSSLSocketFactory(sslSocketFactory); + if (!hostnameVerification) { + httpsConn.setHostnameVerifier(TrustAllHostnameVerifier.INSTANCE); + } + } + + conn.setRequestMethod(method); + conn.setConnectTimeout(timeoutInMillis); + conn.setReadTimeout(readTimeoutInMillis); + if (contentType != null) { + conn.setRequestProperty("Content-Type", contentType); + } + if (url.getUserInfo() != null) { + String basicAuth = "Basic " + Base64.encodeBytes(url.getUserInfo().getBytes("ISO-8859-1")); + conn.setRequestProperty("Authorization", basicAuth); + } + conn.setUseCaches(false); + if (method.equalsIgnoreCase("POST") || method.equalsIgnoreCase("PUT")) { + conn.setDoOutput(true); + } + conn.connect(); + + return conn; + } catch (URISyntaxException e) { + logErrorBasedOnLevel(e, "error parsing host [{}]", AgentUtils.santizeUrlPwds(host)); + } catch (IOException e) { + logErrorBasedOnLevel(e, "error connecting to [{}]", AgentUtils.santizeUrlPwds(host)); + } + return null; + } + + private void logErrorBasedOnLevel(Throwable t, String msg, Object... params) { + logger.error(msg + " [" + AgentUtils.santizeUrlPwds(t.getMessage()) + "]", params); + if (logger.isDebugEnabled()) { + logger.debug(msg + ". full error details:\n[{}]", params, AgentUtils.santizeUrlPwds(ExceptionsHelper.detailedMessage(t))); + } + } + + + /** + * Checks if the index templates already exist and if not uploads it + * Any critical error that should prevent data exporting is communicated via an exception. + * + * @return true if template exists or was uploaded successfully. + */ + private boolean checkAndUploadIndexTemplate(final String host) { + byte[] template; + try { + template = Streams.copyToBytesFromClasspath("/marvel_index_template.json"); + } catch (IOException e) { + // throwing an exception to stop exporting process - we don't want to send data unless + // we put in the template for it. + throw new RuntimeException("failed to load marvel_index_template.json", e); + } + + try { + int expectedVersion = AgentUtils.parseIndexVersionFromTemplate(template); + if (expectedVersion < 0) { + throw new RuntimeException("failed to find an index version in pre-configured index template"); + } + + String queryString = ""; + if (templateCheckTimeout != null) { + queryString = "?timeout=" + templateCheckTimeout; + } + HttpURLConnection conn = openConnection(host, "GET", "_template/marvel" + queryString, null); + if (conn == null) { + return false; + } + + boolean hasTemplate = false; + if (conn.getResponseCode() == 200) { + // verify content. + InputStream is = conn.getInputStream(); + byte[] existingTemplate = ByteStreams.toByteArray(is); + is.close(); + int foundVersion = AgentUtils.parseIndexVersionFromTemplate(existingTemplate); + if (foundVersion < 0) { + logger.warn("found an existing index template but couldn't extract it's version. leaving it as is."); + hasTemplate = true; + } else if (foundVersion >= expectedVersion) { + logger.debug("accepting existing index template (version [{}], needed [{}])", foundVersion, expectedVersion); + hasTemplate = true; + } else { + logger.debug("replacing existing index template (version [{}], needed [{}])", foundVersion, expectedVersion); + } + } + // nothing there, lets create it + if (!hasTemplate) { + logger.debug("uploading index template"); + conn = openConnection(host, "PUT", "_template/marvel" + queryString, XContentType.JSON.restContentType()); + OutputStream os = conn.getOutputStream(); + Streams.copy(template, os); + if (!(conn.getResponseCode() == 200 || conn.getResponseCode() == 201)) { + logConnectionError("error adding the marvel template to [" + host + "]", conn); + } else { + hasTemplate = true; + } + conn.getInputStream().close(); // close and release to connection pool. + } + + return hasTemplate; + } catch (IOException e) { + logger.error("failed to verify/upload the marvel template to [{}]:\n{}", AgentUtils.santizeUrlPwds(host), AgentUtils.santizeUrlPwds(e.getMessage())); + return false; + } + } + + private void logConnectionError(String msg, HttpURLConnection conn) { + InputStream inputStream = conn.getErrorStream(); + String err = ""; + if (inputStream != null) { + java.util.Scanner s = new java.util.Scanner(inputStream, "UTF-8").useDelimiter("\\A"); + err = s.hasNext() ? s.next() : ""; + } + + try { + logger.error("{} response code [{} {}]. content: [{}]", + AgentUtils.santizeUrlPwds(msg), conn.getResponseCode(), + AgentUtils.santizeUrlPwds(conn.getResponseMessage()), + AgentUtils.santizeUrlPwds(err)); + } catch (IOException e) { + logger.error("{}. connection had an error while reporting the error. tough life.", AgentUtils.santizeUrlPwds(msg)); + } + } + + @Override + public void onRefreshSettings(Settings settings) { + TimeValue newTimeout = settings.getAsTime(SETTINGS_TIMEOUT, null); + if (newTimeout != null) { + logger.info("connection timeout set to [{}]", newTimeout); + timeoutInMillis = (int) newTimeout.millis(); + } + + newTimeout = settings.getAsTime(SETTINGS_READ_TIMEOUT, null); + if (newTimeout != null) { + logger.info("connection read timeout set to [{}]", newTimeout); + readTimeoutInMillis = (int) newTimeout.millis(); + } + + String[] newHosts = settings.getAsArray(SETTINGS_HOSTS, null); + if (newHosts != null) { + logger.info("hosts set to [{}]", AgentUtils.santizeUrlPwds(Strings.arrayToCommaDelimitedString(newHosts))); + this.hosts = newHosts; + this.checkedAndUploadedIndexTemplate = false; + this.boundToLocalNode = false; + } + + Boolean newHostnameVerification = settings.getAsBoolean(SETTINGS_SSL_HOSTNAME_VERIFICATION, null); + if (newHostnameVerification != null) { + logger.info("hostname verification set to [{}]", newHostnameVerification); + this.hostnameVerification = newHostnameVerification; + } + } + + protected void initKeepAliveThread() { + keepAliveThread = new Thread(keepAliveWorker, EsExecutors.threadName(settings, "keep_alive")); + keepAliveThread.setDaemon(true); + keepAliveThread.start(); + } + + /** + * Sadly we need to make sure we keep the connection open to the target ES a + * Java's connection pooling closes connections if idle for 5sec. + */ + class ConnectionKeepAliveWorker implements Runnable { + volatile boolean closed = false; + + @Override + public void run() { + logger.trace("starting keep alive thread"); + while (!closed) { + try { + Thread.sleep(1000); + if (closed) { + return; + } + String[] currentHosts = hosts; + if (currentHosts.length == 0) { + logger.trace("keep alive thread shutting down. no hosts defined"); + return; // no hosts configured at the moment. + } + HttpURLConnection conn = openConnection(currentHosts[0], "GET", "", null); + if (conn == null) { + logger.trace("keep alive thread shutting down. failed to open connection to current host [{}]", AgentUtils.santizeUrlPwds(currentHosts[0])); + return; + } else { + conn.getInputStream().close(); // close and release to connection pool. + } + } catch (InterruptedException e) { + // ignore, if closed, good.... + } catch (Throwable t) { + logger.debug("error in keep alive thread, shutting down (will be restarted after a successful connection has been made) {}", + AgentUtils.santizeUrlPwds(ExceptionsHelper.detailedMessage(t))); + return; + } + } + } + } + + private static final String SETTINGS_SSL_PREFIX = SETTINGS_PREFIX + "ssl."; + + public static final String SETTINGS_SSL_PROTOCOL = SETTINGS_SSL_PREFIX + "protocol"; + public static final String SETTINGS_SSL_TRUSTSTORE = SETTINGS_SSL_PREFIX + "truststore.path"; + public static final String SETTINGS_SSL_TRUSTSTORE_PASSWORD = SETTINGS_SSL_PREFIX + "truststore.password"; + public static final String SETTINGS_SSL_TRUSTSTORE_ALGORITHM = SETTINGS_SSL_PREFIX + "truststore.algorithm"; + public static final String SETTINGS_SSL_HOSTNAME_VERIFICATION = SETTINGS_SSL_PREFIX + "hostname_verification"; + + /** SSL Initialization * */ + public SSLSocketFactory createSSLSocketFactory(Settings settings) { + SSLContext sslContext; + // Initialize sslContext + try { + String sslContextProtocol = settings.get(SETTINGS_SSL_PROTOCOL, "TLS"); + String trustStore = settings.get(SETTINGS_SSL_TRUSTSTORE, System.getProperty("javax.net.ssl.trustStore")); + String trustStorePassword = settings.get(SETTINGS_SSL_TRUSTSTORE_PASSWORD, System.getProperty("javax.net.ssl.trustStorePassword")); + String trustStoreAlgorithm = settings.get(SETTINGS_SSL_TRUSTSTORE_ALGORITHM, System.getProperty("ssl.TrustManagerFactory.algorithm")); + + if (trustStore == null) { + throw new RuntimeException("truststore is not configured, use " + SETTINGS_SSL_TRUSTSTORE); + } + + if (trustStoreAlgorithm == null) { + trustStoreAlgorithm = TrustManagerFactory.getDefaultAlgorithm(); + } + + logger.debug("SSL: using trustStore[{}], trustAlgorithm[{}]", trustStore, trustStoreAlgorithm); + + Path trustStorePath = environment.configFile().resolve(trustStore); + if (!Files.exists(trustStorePath)) { + throw new FileNotFoundException("Truststore at path [" + trustStorePath + "] does not exist"); + } + + TrustManager[] trustManagers; + try (InputStream trustStoreStream = Files.newInputStream(trustStorePath)) { + // Load TrustStore + KeyStore ks = KeyStore.getInstance("jks"); + ks.load(trustStoreStream, trustStorePassword == null ? null : trustStorePassword.toCharArray()); + + // Initialize a trust manager factory with the trusted store + TrustManagerFactory trustFactory = TrustManagerFactory.getInstance(trustStoreAlgorithm); + trustFactory.init(ks); + + // Retrieve the trust managers from the factory + trustManagers = trustFactory.getTrustManagers(); + } catch (Exception e) { + throw new RuntimeException("Failed to initialize a TrustManagerFactory", e); + } + + sslContext = SSLContext.getInstance(sslContextProtocol); + sslContext.init(null, trustManagers, null); + } catch (Exception e) { + throw new RuntimeException("[marvel.agent.exporter] failed to initialize the SSLContext", e); + } + return sslContext.getSocketFactory(); + } + + /** + * Trust all hostname verifier. This simply returns true to completely disable hostname verification + */ + static class TrustAllHostnameVerifier implements HostnameVerifier { + static final HostnameVerifier INSTANCE = new TrustAllHostnameVerifier(); + + private TrustAllHostnameVerifier() { + } + + @Override + public boolean verify(String s, SSLSession sslSession) { + return true; + } + } +} + diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/MarvelDoc.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/MarvelDoc.java new file mode 100644 index 00000000000..27b0c11482e --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/exporter/MarvelDoc.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.exporter; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.io.IOException; + +public abstract class MarvelDoc implements ToXContent { + + private final String clusterName; + private final String type; + private final long timestamp; + + public MarvelDoc(String clusterName, String type, long timestamp) { + this.clusterName = clusterName; + this.type = type; + this.timestamp = timestamp; + } + + public String clusterName() { + return clusterName; + } + + public String type() { + return type; + } + + public long timestamp() { + return timestamp; + } + + public abstract T payload(); + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(Fields.CLUSTER_NAME, clusterName()); + DateTime timestampDateTime = new DateTime(timestamp(), DateTimeZone.UTC); + builder.field(Fields.TIMESTAMP, timestampDateTime.toString()); + return builder; + } + + static final class Fields { + static final XContentBuilderString CLUSTER_NAME = new XContentBuilderString("cluster_name"); + static final XContentBuilderString TIMESTAMP = new XContentBuilderString("timestamp"); + } +} \ No newline at end of file diff --git a/marvel/src/main/java/org/elasticsearch/marvel/agent/support/AgentUtils.java b/marvel/src/main/java/org/elasticsearch/marvel/agent/support/AgentUtils.java new file mode 100644 index 00000000000..e1a6452eae7 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/agent/support/AgentUtils.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.support; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.*; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class AgentUtils { + + public static XContentBuilder nodeToXContent(DiscoveryNode node, XContentBuilder builder) throws IOException { + return nodeToXContent(node, null, builder); + } + + public static XContentBuilder nodeToXContent(DiscoveryNode node, Boolean isMasterNode, XContentBuilder builder) throws IOException { + builder.field("id", node.id()); + builder.field("name", node.name()); + builder.field("transport_address", node.address()); + + if (node.address().uniqueAddressTypeId() == 1) { // InetSocket + InetSocketTransportAddress address = (InetSocketTransportAddress) node.address(); + InetSocketAddress inetSocketAddress = address.address(); + InetAddress inetAddress = inetSocketAddress.getAddress(); + if (inetAddress != null) { + builder.field("ip", inetAddress.getHostAddress()); + builder.field("host", inetAddress.getHostName()); + builder.field("ip_port", inetAddress.getHostAddress() + ":" + inetSocketAddress.getPort()); + } + } else if (node.address().uniqueAddressTypeId() == 2) { // local transport + builder.field("ip_port", "_" + node.address()); // will end up being "_local[ID]" + } + + builder.field("master_node", node.isMasterNode()); + builder.field("data_node", node.isDataNode()); + if (isMasterNode != null) { + builder.field("master", isMasterNode.booleanValue()); + } + + if (!node.attributes().isEmpty()) { + builder.startObject("attributes"); + for (Map.Entry attr : node.attributes().entrySet()) { + builder.field(attr.getKey(), attr.getValue()); + } + builder.endObject(); + } + return builder; + } + + public static String nodeDescription(DiscoveryNode node) { + StringBuilder builder = new StringBuilder().append("[").append(node.name()).append("]"); + if (node.address().uniqueAddressTypeId() == 1) { // InetSocket + InetSocketTransportAddress address = (InetSocketTransportAddress) node.address(); + InetSocketAddress inetSocketAddress = address.address(); + InetAddress inetAddress = inetSocketAddress.getAddress(); + if (inetAddress != null) { + builder.append("[").append(inetAddress.getHostAddress()).append(":").append(inetSocketAddress.getPort()).append("]"); + } + } + return builder.toString(); + } + + public static String[] extractHostsFromAddress(BoundTransportAddress boundAddress, ESLogger logger) { + if (boundAddress == null || boundAddress.boundAddress() == null) { + logger.debug("local http server is not yet started. can't connect"); + return null; + } + + if (boundAddress.boundAddress().uniqueAddressTypeId() != 1) { + logger.error("local node is not bound via the http transport. can't connect"); + return null; + } + InetSocketTransportAddress address = (InetSocketTransportAddress) boundAddress.boundAddress(); + InetSocketAddress inetSocketAddress = address.address(); + InetAddress inetAddress = inetSocketAddress.getAddress(); + if (inetAddress == null) { + logger.error("failed to extract the ip address of current node."); + return null; + } + + String host = inetAddress.getHostAddress(); + if (host.indexOf(":") >= 0) { + // ipv6 + host = "[" + host + "]"; + } + + return new String[]{host + ":" + inetSocketAddress.getPort()}; + } + + public static URL parseHostWithPath(String host, String path) throws URISyntaxException, MalformedURLException { + + if (!host.contains("://")) { + // prefix with http + host = "http://" + host; + } + if (!host.endsWith("/")) { + // make sure we can safely resolves sub paths and not replace parent folders + host = host + "/"; + } + + URL hostUrl = new URL(host); + + if (hostUrl.getPort() == -1) { + // url has no port, default to 9200 - sadly we need to rebuild.. + StringBuilder newUrl = new StringBuilder(hostUrl.getProtocol() + "://"); + if (hostUrl.getUserInfo() != null) { + newUrl.append(hostUrl.getUserInfo()).append("@"); + } + newUrl.append(hostUrl.getHost()).append(":9200").append(hostUrl.toURI().getPath()); + + hostUrl = new URL(newUrl.toString()); + + } + return new URL(hostUrl, path); + + } + + public static int parseIndexVersionFromTemplate(byte[] template) throws UnsupportedEncodingException { + Pattern versionRegex = Pattern.compile("marvel.index_format\"\\s*:\\s*\"?(\\d+)\"?"); + Matcher matcher = versionRegex.matcher(new String(template, "UTF-8")); + if (matcher.find()) { + return Integer.parseInt(matcher.group(1)); + } else { + return -1; + } + } + + private static final String userInfoChars = "\\w-\\._~!$&\\'\\(\\)*+,;=%"; + private static Pattern urlPwdSanitizer = Pattern.compile("([" + userInfoChars + "]+?):[" + userInfoChars + "]+?@"); + + public static String santizeUrlPwds(Object text) { + Matcher matcher = urlPwdSanitizer.matcher(text.toString()); + return matcher.replaceAll("$1:XXXXXX@"); + } +} diff --git a/marvel/src/main/java/org/elasticsearch/marvel/license/LicenseModule.java b/marvel/src/main/java/org/elasticsearch/marvel/license/LicenseModule.java new file mode 100644 index 00000000000..2e74427a8c6 --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/license/LicenseModule.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.license; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.license.plugin.LicenseVersion; +import org.elasticsearch.marvel.MarvelVersion; + +public class LicenseModule extends AbstractModule { + + public LicenseModule() { + verifyLicensePlugin(); + } + + @Override + protected void configure() { + bind(LicenseService.class).asEagerSingleton(); + } + + private void verifyLicensePlugin() { + try { + getClass().getClassLoader().loadClass("org.elasticsearch.license.plugin.LicensePlugin"); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("marvel plugin requires the license plugin to be installed"); + } + + if (LicenseVersion.CURRENT.before(MarvelVersion.CURRENT.minLicenseCompatibilityVersion)) { + throw new ElasticsearchException("marvel [" + MarvelVersion.CURRENT + + "] requires minimum license plugin version [" + MarvelVersion.CURRENT.minLicenseCompatibilityVersion + + "], but installed license plugin version is [" + LicenseVersion.CURRENT + "]"); + } + } +} \ No newline at end of file diff --git a/marvel/src/main/java/org/elasticsearch/marvel/license/LicenseService.java b/marvel/src/main/java/org/elasticsearch/marvel/license/LicenseService.java new file mode 100644 index 00000000000..ba43e0dba7a --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/license/LicenseService.java @@ -0,0 +1,128 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.license; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.joda.FormatDateTimeFormatter; +import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.license.core.License; +import org.elasticsearch.license.plugin.core.LicensesClientService; +import org.elasticsearch.license.plugin.core.LicensesService; +import org.elasticsearch.marvel.MarvelPlugin; +import org.elasticsearch.marvel.mode.Mode; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Locale; + +public class LicenseService extends AbstractLifecycleComponent { + + public static final String FEATURE_NAME = MarvelPlugin.NAME; + + private static final LicensesService.TrialLicenseOptions TRIAL_LICENSE_OPTIONS = + new LicensesService.TrialLicenseOptions(TimeValue.timeValueHours(30 * 24), 1000); + + private static final FormatDateTimeFormatter DATE_FORMATTER = Joda.forPattern("EEEE, MMMMM dd, yyyy", Locale.ROOT); + + private final LicensesClientService clientService; + private final Collection expirationLoggers; + + private volatile Mode mode; + + @Inject + public LicenseService(Settings settings, LicensesClientService clientService) { + super(settings); + this.clientService = clientService; + this.mode = Mode.LITE; + this.expirationLoggers = Arrays.asList( + new LicensesService.ExpirationCallback.Pre(days(7), days(30), days(1)) { + @Override + public void on(License license, LicensesService.ExpirationStatus status) { + logger.error("\n" + + "#\n" + + "# Marvel license will expire on [{}].\n" + + "# Have a new license? please update it. Otherwise, please reach out to your support contact.\n" + + "#", DATE_FORMATTER.printer().print(license.expiryDate())); + } + }, + new LicensesService.ExpirationCallback.Pre(days(0), days(7), minutes(10)) { + @Override + public void on(License license, LicensesService.ExpirationStatus status) { + logger.error("\n" + + "#\n" + + "# Marvel license will expire on [{}].\n" + + "# Have a new license? please update it. Otherwise, please reach out to your support contact.\n" + + "#", DATE_FORMATTER.printer().print(license.expiryDate())); + } + }, + new LicensesService.ExpirationCallback.Post(days(0), null, minutes(10)) { + @Override + public void on(License license, LicensesService.ExpirationStatus status) { + logger.error("\n" + + "#\n" + + "# MARVEL LICENSE WAS EXPIRED ON [{}].\n" + + "# HAVE A NEW LICENSE? PLEASE UPDATE IT. OTHERWISE, PLEASE REACH OUT TO YOUR SUPPORT CONTACT.\n" + + "#", DATE_FORMATTER.printer().print(license.expiryDate())); + } + } + ); + } + + @Override + protected void doStart() throws ElasticsearchException { + clientService.register(FEATURE_NAME, TRIAL_LICENSE_OPTIONS, expirationLoggers, new InternalListener(this)); + } + + @Override + protected void doStop() throws ElasticsearchException { + } + + @Override + protected void doClose() throws ElasticsearchException { + } + + static TimeValue days(int days) { + return TimeValue.timeValueHours(days * 24); + } + + static TimeValue minutes(int minutes) { + return TimeValue.timeValueMinutes(minutes); + } + + /** + * @return the current marvel's operating mode + */ + public Mode mode() { + return mode; + } + + class InternalListener implements LicensesClientService.Listener { + + private final LicenseService service; + + public InternalListener(LicenseService service) { + this.service = service; + } + + @Override + public void onEnabled(License license) { + try { + service.mode = Mode.fromName(license.type()); + } catch (IllegalArgumentException e) { + service.mode = Mode.LITE; + } + } + + @Override + public void onDisabled(License license) { + service.mode = Mode.LITE; + } + } +} diff --git a/marvel/src/main/java/org/elasticsearch/marvel/mode/Mode.java b/marvel/src/main/java/org/elasticsearch/marvel/mode/Mode.java new file mode 100644 index 00000000000..9947aa0008a --- /dev/null +++ b/marvel/src/main/java/org/elasticsearch/marvel/mode/Mode.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.mode; + +import org.elasticsearch.ElasticsearchException; + +import java.util.Locale; + +/** + * Marvel's operating mode + */ +public enum Mode { + + /** + * Marvel runs in downgraded mode + */ + TRIAL(0), + + /** + * Marvel runs in downgraded mode + */ + LITE(0), + + /** + * Marvel runs in normal mode + */ + STANDARD(1); + + private final byte id; + + Mode(int id) { + this.id = (byte) id; + } + + public byte getId() { + return id; + } + + public static Mode fromId(byte id) { + switch (id) { + case 0: + return TRIAL; + case 1: + return LITE; + case 2: + return STANDARD; + case 3: + default: + throw new ElasticsearchException("unknown marvel mode id [" + id + "]"); + } + } + + public static Mode fromName(String name) { + switch (name.toLowerCase(Locale.ROOT)) { + case "trial": return TRIAL; + case "lite": return LITE; + case "standard" : return STANDARD; + default: + throw new ElasticsearchException("unknown marvel mode name [" + name + "]"); + } + } +} diff --git a/marvel/src/main/resources/es-plugin.properties b/marvel/src/main/resources/es-plugin.properties new file mode 100644 index 00000000000..a4493030137 --- /dev/null +++ b/marvel/src/main/resources/es-plugin.properties @@ -0,0 +1,2 @@ +plugin=org.elasticsearch.marvel.MarvelPlugin +version=${project.version} diff --git a/watcher/lib/template.json b/marvel/src/main/resources/marvel_index_template.json similarity index 99% rename from watcher/lib/template.json rename to marvel/src/main/resources/marvel_index_template.json index 362fc0d2c38..1fa44c9115c 100644 --- a/watcher/lib/template.json +++ b/marvel/src/main/resources/marvel_index_template.json @@ -443,5 +443,4 @@ } } } -} - +} \ No newline at end of file diff --git a/marvel/src/test/java/org/elasticsearch/marvel/MarvelPluginClientTests.java b/marvel/src/test/java/org/elasticsearch/marvel/MarvelPluginClientTests.java new file mode 100644 index 00000000000..1fcbbe867bc --- /dev/null +++ b/marvel/src/test/java/org/elasticsearch/marvel/MarvelPluginClientTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel; + +import org.elasticsearch.client.Client; +import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.Test; + +import java.util.Collection; + +import static org.hamcrest.Matchers.is; + +public class MarvelPluginClientTests extends ElasticsearchTestCase { + + @Test + public void testModulesWithClientSettings() { + Settings settings = Settings.builder() + .put(Client.CLIENT_TYPE_SETTING, TransportClient.CLIENT_TYPE) + .build(); + + MarvelPlugin plugin = new MarvelPlugin(settings); + assertThat(plugin.isEnabled(), is(false)); + Collection> modules = plugin.modules(); + assertThat(modules.size(), is(0)); + } + + @Test + public void testModulesWithNodeSettings() { + // these settings mimic what ES does when running as a node... + Settings settings = Settings.builder() + .put(Client.CLIENT_TYPE_SETTING, "node") + .build(); + MarvelPlugin plugin = new MarvelPlugin(settings); + assertThat(plugin.isEnabled(), is(true)); + Collection> modules = plugin.modules(); + assertThat(modules.size(), is(1)); + } + +} diff --git a/marvel/src/test/java/org/elasticsearch/marvel/MarvelPluginTests.java b/marvel/src/test/java/org/elasticsearch/marvel/MarvelPluginTests.java new file mode 100644 index 00000000000..403912d2378 --- /dev/null +++ b/marvel/src/test/java/org/elasticsearch/marvel/MarvelPluginTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel; + +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.info.PluginInfo; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.marvel.agent.AgentService; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.test.ElasticsearchIntegrationTest; +import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; +import org.elasticsearch.tribe.TribeService; +import org.junit.Test; + +import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope.TEST; +import static org.hamcrest.Matchers.equalTo; + +@ClusterScope(scope = TEST, transportClientRatio = 0, numClientNodes = 0, numDataNodes = 0) +public class MarvelPluginTests extends ElasticsearchIntegrationTest { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.settingsBuilder() + .put(super.nodeSettings(nodeOrdinal)) + .put("plugin.types", MarvelPlugin.class.getName()) + .put(PluginsService.LOAD_PLUGIN_FROM_CLASSPATH, false) + .build(); + } + + @Test + public void testMarvelEnabled() { + internalCluster().startNode(Settings.builder().put(MarvelPlugin.ENABLED, true).build()); + assertPluginIsLoaded(); + assertServiceIsBound(AgentService.class); + } + + @Test + public void testMarvelDisabled() { + internalCluster().startNode(Settings.builder().put(MarvelPlugin.ENABLED, false).build()); + assertPluginIsLoaded(); + assertServiceIsNotBound(AgentService.class); + } + + @Test + public void testMarvelDisabledOnTribeNode() { + internalCluster().startNode(Settings.builder().put(TribeService.TRIBE_NAME, "t1").build()); + assertPluginIsLoaded(); + assertServiceIsNotBound(AgentService.class); + } + + private void assertPluginIsLoaded() { + NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().setPlugins(true).get(); + for (NodeInfo nodeInfo : response) { + assertNotNull(nodeInfo.getPlugins()); + + boolean found = false; + for (PluginInfo plugin : nodeInfo.getPlugins().getInfos()) { + assertNotNull(plugin); + + if (MarvelPlugin.NAME.equals(plugin.getName())) { + found = true; + break; + } + } + assertThat("marvel plugin not found", found, equalTo(true)); + } + } + + private void assertServiceIsBound(Class klass) { + try { + Object binding = internalCluster().getDataNodeInstance(klass); + assertNotNull(binding); + assertTrue(klass.isInstance(binding)); + } catch (Exception e) { + fail("no service bound for class " + klass.getSimpleName()); + } + } + + private void assertServiceIsNotBound(Class klass) { + try { + internalCluster().getDataNodeInstance(klass); + fail("should have thrown an exception about missing implementation"); + } catch (Exception ce) { + assertThat("message contains error about missing implemention: " + ce.getMessage(), + ce.getMessage().contains("No implementation"), equalTo(true)); + } + } +} diff --git a/marvel/src/test/java/org/elasticsearch/marvel/MarvelVersionTests.java b/marvel/src/test/java/org/elasticsearch/marvel/MarvelVersionTests.java new file mode 100644 index 00000000000..d377aa59112 --- /dev/null +++ b/marvel/src/test/java/org/elasticsearch/marvel/MarvelVersionTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel; + +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.Test; + +import static org.hamcrest.Matchers.equalTo; + +public class MarvelVersionTests extends ElasticsearchTestCase { + + @Test + public void testVersionFromString() { + assertThat(MarvelVersion.fromString("2.0.0-beta1"), equalTo(MarvelVersion.V_2_0_0_Beta1)); + } + + @Test + public void testVersionNumber() { + assertThat(MarvelVersion.V_2_0_0_Beta1.number(), equalTo("2.0.0-beta1")); + } +} \ No newline at end of file diff --git a/marvel/src/test/java/org/elasticsearch/marvel/agent/AgentUtilsTests.java b/marvel/src/test/java/org/elasticsearch/marvel/agent/AgentUtilsTests.java new file mode 100644 index 00000000000..37f5ef2ab0a --- /dev/null +++ b/marvel/src/test/java/org/elasticsearch/marvel/agent/AgentUtilsTests.java @@ -0,0 +1,168 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent; + +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.marvel.agent.support.AgentUtils; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLEncoder; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + + +public class AgentUtilsTests extends ElasticsearchTestCase { + + @Test + public void testVersionIsExtractableFromIndexTemplate() throws IOException { + byte[] template = Streams.copyToBytesFromClasspath("/marvel_index_template.json"); + MatcherAssert.assertThat(AgentUtils.parseIndexVersionFromTemplate(template), Matchers.greaterThan(0)); + } + + @Test + public void testHostParsing() throws MalformedURLException, URISyntaxException { + URL url = AgentUtils.parseHostWithPath("localhost:9200", ""); + verifyUrl(url, "http", "localhost", 9200, "/"); + + url = AgentUtils.parseHostWithPath("localhost", "_bulk"); + verifyUrl(url, "http", "localhost", 9200, "/_bulk"); + + url = AgentUtils.parseHostWithPath("http://localhost:9200", "_bulk"); + verifyUrl(url, "http", "localhost", 9200, "/_bulk"); + + url = AgentUtils.parseHostWithPath("http://localhost", "_bulk"); + verifyUrl(url, "http", "localhost", 9200, "/_bulk"); + + url = AgentUtils.parseHostWithPath("https://localhost:9200", "_bulk"); + verifyUrl(url, "https", "localhost", 9200, "/_bulk"); + + url = AgentUtils.parseHostWithPath("https://boaz-air.local:9200", "_bulk"); + verifyUrl(url, "https", "boaz-air.local", 9200, "/_bulk"); + + url = AgentUtils.parseHostWithPath("boaz:test@localhost:9200", ""); + verifyUrl(url, "http", "localhost", 9200, "/", "boaz:test"); + + url = AgentUtils.parseHostWithPath("boaz:test@localhost", "_bulk"); + verifyUrl(url, "http", "localhost", 9200, "/_bulk", "boaz:test"); + + url = AgentUtils.parseHostWithPath("http://boaz:test@localhost:9200", "_bulk"); + verifyUrl(url, "http", "localhost", 9200, "/_bulk", "boaz:test"); + + url = AgentUtils.parseHostWithPath("http://boaz:test@localhost", "_bulk"); + verifyUrl(url, "http", "localhost", 9200, "/_bulk", "boaz:test"); + + url = AgentUtils.parseHostWithPath("https://boaz:test@localhost:9200", "_bulk"); + verifyUrl(url, "https", "localhost", 9200, "/_bulk", "boaz:test"); + + url = AgentUtils.parseHostWithPath("boaz:test@localhost:9200/suburl", ""); + verifyUrl(url, "http", "localhost", 9200, "/suburl/", "boaz:test"); + + url = AgentUtils.parseHostWithPath("boaz:test@localhost:9200/suburl/", ""); + verifyUrl(url, "http", "localhost", 9200, "/suburl/", "boaz:test"); + + url = AgentUtils.parseHostWithPath("localhost/suburl", "_bulk"); + verifyUrl(url, "http", "localhost", 9200, "/suburl/_bulk"); + + url = AgentUtils.parseHostWithPath("http://boaz:test@localhost:9200/suburl/suburl1", "_bulk"); + verifyUrl(url, "http", "localhost", 9200, "/suburl/suburl1/_bulk", "boaz:test"); + + url = AgentUtils.parseHostWithPath("http://boaz:test@localhost/suburl", "_bulk"); + verifyUrl(url, "http", "localhost", 9200, "/suburl/_bulk", "boaz:test"); + + url = AgentUtils.parseHostWithPath("https://boaz:test@localhost:9200/suburl", "_bulk"); + verifyUrl(url, "https", "localhost", 9200, "/suburl/_bulk", "boaz:test"); + + url = AgentUtils.parseHostWithPath("https://user:test@server_with_underscore:9300", "_bulk"); + verifyUrl(url, "https", "server_with_underscore", 9300, "/_bulk", "user:test"); + + url = AgentUtils.parseHostWithPath("user:test@server_with_underscore:9300", "_bulk"); + verifyUrl(url, "http", "server_with_underscore", 9300, "/_bulk", "user:test"); + + url = AgentUtils.parseHostWithPath("server_with_underscore:9300", "_bulk"); + verifyUrl(url, "http", "server_with_underscore", 9300, "/_bulk"); + + url = AgentUtils.parseHostWithPath("server_with_underscore", "_bulk"); + verifyUrl(url, "http", "server_with_underscore", 9200, "/_bulk"); + + url = AgentUtils.parseHostWithPath("https://user:test@server-dash:9300", "_bulk"); + verifyUrl(url, "https", "server-dash", 9300, "/_bulk", "user:test"); + + url = AgentUtils.parseHostWithPath("user:test@server-dash:9300", "_bulk"); + verifyUrl(url, "http", "server-dash", 9300, "/_bulk", "user:test"); + + url = AgentUtils.parseHostWithPath("server-dash:9300", "_bulk"); + verifyUrl(url, "http", "server-dash", 9300, "/_bulk"); + + url = AgentUtils.parseHostWithPath("server-dash", "_bulk"); + verifyUrl(url, "http", "server-dash", 9200, "/_bulk"); + } + + void verifyUrl(URL url, String protocol, String host, int port, String path) throws URISyntaxException { + assertThat(url.getProtocol(), equalTo(protocol)); + assertThat(url.getHost(), equalTo(host)); + assertThat(url.getPort(), equalTo(port)); + assertThat(url.toURI().getPath(), equalTo(path)); + } + + void verifyUrl(URL url, String protocol, String host, int port, String path, String userInfo) throws URISyntaxException { + verifyUrl(url, protocol, host, port, path); + assertThat(url.getUserInfo(), equalTo(userInfo)); + + } + + @Test + public void sanitizeUrlPadTest() throws UnsupportedEncodingException { + String pwd = URLEncoder.encode(randomRealisticUnicodeOfCodepointLengthBetween(3, 20), "UTF-8"); + String[] inputs = new String[]{ + "https://boaz:" + pwd + "@hostname:9200", + "http://boaz:" + pwd + "@hostname:9200", + "boaz:" + pwd + "@hostname", + "boaz:" + pwd + "@hostname/hello", + "Parse exception in [boaz:" + pwd + "@hostname:9200,boaz1:" + pwd + "@hostname \n" + + "caused: by exception ,boaz1:" + pwd + "@hostname", + "failed to upload index template, stopping export\n" + + "java.lang.RuntimeException: failed to load/verify index template\n" + + " at org.elasticsearch.marvel.agent.exporter.ESExporter.checkAndUploadIndexTemplate(ESExporter.java:525)\n" + + " at org.elasticsearch.marvel.agent.exporter.ESExporter.openExportingConnection(ESExporter.java:213)\n" + + " at org.elasticsearch.marvel.agent.exporter.ESExporter.exportXContent(ESExporter.java:285)\n" + + " at org.elasticsearch.marvel.agent.exporter.ESExporter.exportClusterStats(ESExporter.java:206)\n" + + " at org.elasticsearch.marvel.agent.AgentService$ExportingWorker.exportClusterStats(AgentService.java:288)\n" + + " at org.elasticsearch.marvel.agent.AgentService$ExportingWorker.run(AgentService.java:245)\n" + + " at java.lang.Thread.run(Thread.java:745)\n" + + "Caused by: java.io.IOException: Server returned HTTP response code: 401 for URL: http://marvel_exporter:" + pwd + "@localhost:9200/_template/marvel\n" + + " at sun.reflect.GeneratedConstructorAccessor3.createMarvelDoc(Unknown Source)\n" + + " at sun.reflect.DelegatingConstructorAccessorImpl.createMarvelDoc(DelegatingConstructorAccessorImpl.java:45)\n" + + " at java.lang.reflect.Constructor.createMarvelDoc(Constructor.java:526)\n" + + " at sun.net.www.protocol.http.HttpURLConnection$6.run(HttpURLConnection.java:1675)\n" + + " at sun.net.www.protocol.http.HttpURLConnection$6.run(HttpURLConnection.java:1673)\n" + + " at java.security.AccessController.doPrivileged(Native Method)\n" + + " at sun.net.www.protocol.http.HttpURLConnection.getChainedException(HttpURLConnection.java:1671)\n" + + " at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1244)\n" + + " at org.elasticsearch.marvel.agent.exporter.ESExporter.checkAndUploadIndexTemplate(ESExporter.java:519)\n" + + " ... 6 more\n" + + "Caused by: java.io.IOException: Server returned HTTP response code: 401 for URL: http://marvel_exporter:" + pwd + "@localhost:9200/_template/marvel\n" + + " at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1626)\n" + + " at java.net.HttpURLConnection.getResponseCode(HttpURLConnection.java:468)\n" + + " at org.elasticsearch.marvel.agent.exporter.ESExporter.checkAndUploadIndexTemplate(ESExporter.java:514)\n" + + " ... 6 more" + }; + + for (String input : inputs) { + String sanitized = AgentUtils.santizeUrlPwds(input); + assertThat(sanitized, not(containsString(pwd))); + } + } +} diff --git a/marvel/src/test/java/org/elasticsearch/marvel/agent/collector/indices/IndexCollectorTests.java b/marvel/src/test/java/org/elasticsearch/marvel/agent/collector/indices/IndexCollectorTests.java new file mode 100644 index 00000000000..cd01c2d9562 --- /dev/null +++ b/marvel/src/test/java/org/elasticsearch/marvel/agent/collector/indices/IndexCollectorTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.collector.indices; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.marvel.agent.exporter.MarvelDoc; +import org.elasticsearch.test.ElasticsearchSingleNodeTest; +import org.junit.Test; + +import java.util.Collection; +import java.util.Iterator; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.*; + +public class IndexCollectorTests extends ElasticsearchSingleNodeTest { + + @Test + public void testIndexCollectorNoIndices() throws Exception { + Collection results = newIndexCollector().doCollect(); + assertThat(results, is(empty())); + } + + @Test + public void testIndexCollectorOneIndex() throws Exception { + int nbDocs = randomIntBetween(1, 20); + for (int i = 0; i < nbDocs; i++) { + client().prepareIndex("test", "test").setSource("num", i).get(); + } + client().admin().indices().prepareRefresh().get(); + assertHitCount(client().prepareCount().get(), nbDocs); + + Collection results = newIndexCollector().doCollect(); + assertThat(results, hasSize(1)); + + MarvelDoc marvelDoc = results.iterator().next(); + assertNotNull(marvelDoc); + assertThat(marvelDoc, instanceOf(IndexMarvelDoc.class)); + + IndexMarvelDoc indexMarvelDoc = (IndexMarvelDoc) marvelDoc; + assertThat(indexMarvelDoc.clusterName(), equalTo(client().admin().cluster().prepareHealth().get().getClusterName())); + assertThat(indexMarvelDoc.timestamp(), greaterThan(0L)); + assertThat(indexMarvelDoc.type(), equalTo(IndexCollector.TYPE)); + + assertThat(indexMarvelDoc.getIndex(), equalTo("test")); + assertNotNull(indexMarvelDoc.getDocs()); + assertThat(indexMarvelDoc.getDocs().getCount(), equalTo((long) nbDocs)); + assertNotNull(indexMarvelDoc.getStore()); + assertThat(indexMarvelDoc.getStore().getSizeInBytes(), greaterThan(0L)); + assertThat(indexMarvelDoc.getStore().getThrottleTimeInMillis(), equalTo(0L)); + assertNotNull(indexMarvelDoc.getIndexing()); + assertThat(indexMarvelDoc.getIndexing().getThrottleTimeInMillis(), equalTo(0L)); + } + + @Test + public void testIndexCollectorMultipleIndices() throws Exception { + int nbIndices = randomIntBetween(1, 5); + int[] docsPerIndex = new int[nbIndices]; + + for (int i = 0; i < nbIndices; i++) { + docsPerIndex[i] = randomIntBetween(1, 20); + for (int j = 0; j < docsPerIndex[i]; j++) { + client().prepareIndex("test-" + i, "test").setSource("num", i).get(); + } + } + + String clusterName = client().admin().cluster().prepareHealth().get().getClusterName(); + client().admin().indices().prepareRefresh().get(); + for (int i = 0; i < nbIndices; i++) { + assertHitCount(client().prepareCount("test-" + i).get(), docsPerIndex[i]); + } + + Collection results = newIndexCollector().doCollect(); + assertThat(results, hasSize(nbIndices)); + + for (int i = 0; i < nbIndices; i++) { + boolean found = false; + + Iterator it = results.iterator(); + while (!found && it.hasNext()) { + MarvelDoc marvelDoc = it.next(); + assertThat(marvelDoc, instanceOf(IndexMarvelDoc.class)); + + IndexMarvelDoc indexMarvelDoc = (IndexMarvelDoc) marvelDoc; + if (indexMarvelDoc.getIndex().equals("test-" + i)) { + assertThat(indexMarvelDoc.clusterName(), equalTo(clusterName)); + assertThat(indexMarvelDoc.timestamp(), greaterThan(0L)); + assertThat(indexMarvelDoc.type(), equalTo(IndexCollector.TYPE)); + + assertNotNull(indexMarvelDoc.getDocs()); + assertThat(indexMarvelDoc.getDocs().getCount(), equalTo((long) docsPerIndex[i])); + assertNotNull(indexMarvelDoc.getStore()); + assertThat(indexMarvelDoc.getStore().getSizeInBytes(), greaterThan(0L)); + assertThat(indexMarvelDoc.getStore().getThrottleTimeInMillis(), equalTo(0L)); + assertNotNull(indexMarvelDoc.getIndexing()); + assertThat(indexMarvelDoc.getIndexing().getThrottleTimeInMillis(), equalTo(0L)); + found = true; + } + } + assertThat("could not find collected stats for index [test-" + i + "]", found, is(true)); + } + } + + private IndexCollector newIndexCollector() { + return new IndexCollector(getInstanceFromNode(Settings.class), getInstanceFromNode(ClusterService.class), getInstanceFromNode(ClusterName.class), client()); + } +} diff --git a/marvel/src/test/java/org/elasticsearch/marvel/agent/collector/indices/IndexMarvelDocTests.java b/marvel/src/test/java/org/elasticsearch/marvel/agent/collector/indices/IndexMarvelDocTests.java new file mode 100644 index 00000000000..4401c3f70de --- /dev/null +++ b/marvel/src/test/java/org/elasticsearch/marvel/agent/collector/indices/IndexMarvelDocTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.collector.indices; + +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.Test; + +import static org.hamcrest.Matchers.equalTo; + +public class IndexMarvelDocTests extends ElasticsearchTestCase { + + @Test + public void testCreateMarvelDoc() { + String cluster = randomUnicodeOfLength(10); + String type = randomUnicodeOfLength(10); + long timestamp = randomLong(); + String index = randomUnicodeOfLength(10); + long docsCount = randomLong(); + long storeSize = randomLong(); + long storeThrottle = randomLong(); + long indexingThrottle = randomLong(); + + IndexMarvelDoc marvelDoc = IndexMarvelDoc.createMarvelDoc(cluster, type, timestamp, + index, docsCount, storeSize, storeThrottle, indexingThrottle); + + assertNotNull(marvelDoc); + assertThat(marvelDoc.clusterName(), equalTo(cluster)); + assertThat(marvelDoc.type(), equalTo(type)); + assertThat(marvelDoc.timestamp(), equalTo(timestamp)); + assertThat(marvelDoc.getIndex(), equalTo(index)); + assertNotNull(marvelDoc.getDocs()); + assertThat(marvelDoc.getDocs().getCount(), equalTo(docsCount)); + assertNotNull(marvelDoc.getStore()); + assertThat(marvelDoc.getStore().getSizeInBytes(), equalTo(storeSize)); + assertThat(marvelDoc.getStore().getThrottleTimeInMillis(), equalTo(storeThrottle)); + assertNotNull(marvelDoc.getIndexing()); + assertThat(marvelDoc.getIndexing().getThrottleTimeInMillis(), equalTo(indexingThrottle)); + } +} diff --git a/marvel/src/test/java/org/elasticsearch/marvel/agent/exporter/HttpESExporterTests.java b/marvel/src/test/java/org/elasticsearch/marvel/agent/exporter/HttpESExporterTests.java new file mode 100644 index 00000000000..2d23979d652 --- /dev/null +++ b/marvel/src/test/java/org/elasticsearch/marvel/agent/exporter/HttpESExporterTests.java @@ -0,0 +1,230 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.agent.exporter; + +import com.google.common.base.Predicate; +import com.google.common.collect.ImmutableList; +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.marvel.agent.AgentService; +import org.elasticsearch.marvel.agent.collector.indices.IndexMarvelDoc; +import org.elasticsearch.node.Node; +import org.elasticsearch.test.ElasticsearchIntegrationTest; +import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicLong; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + + +// Transport Client instantiation also calls the marvel plugin, which then fails to find modules +@ClusterScope(transportClientRatio = 0.0, scope = ElasticsearchIntegrationTest.Scope.TEST, numDataNodes = 0, numClientNodes = 0) +public class HttpESExporterTests extends ElasticsearchIntegrationTest { + + @Test + public void testHttpServerOff() { + Settings.Builder builder = Settings.builder() + .put(AgentService.SETTINGS_INTERVAL, "200m") + .put(Node.HTTP_ENABLED, false); + internalCluster().startNode(builder); + HttpESExporter httpEsExporter = getEsExporter(); + logger.info("trying exporting despite of no target"); + httpEsExporter.export(ImmutableList.of(newRandomMarvelDoc())); + } +/* + @Test + public void testLargeClusterStateSerialization() throws InterruptedException { + // make sure no other exporting is done (quicker).. + internalCluster().startNode(Settings.builder().put(AgentService.SETTINGS_INTERVAL, "200m").put(Node.HTTP_ENABLED, true)); + ESExporter esExporter = internalCluster().getInstance(ESExporter.class); + DiscoveryNodes.Builder nodesBuilder = new DiscoveryNodes.Builder(); + int nodeCount = randomIntBetween(10, 200); + for (int i = 0; i < nodeCount; i++) { + nodesBuilder.put(new DiscoveryNode("node_" + i, new LocalTransportAddress("node_" + i), Version.CURRENT)); + } + + // get the current cluster state rather then construct one because the constructors have changed across ES versions + ClusterService clusterService = internalCluster().getInstance(ClusterService.class); + ClusterState state = ClusterState.builder(clusterService.state()).nodes(nodesBuilder).build(); + logger.info("exporting cluster state with {} nodes", state.nodes().size()); + esExporter.exportEvents(new Event[]{ + new ClusterEvent.ClusterStateChange(1234l, state, "test", ClusterHealthStatus.GREEN, "testing_1234", "test_source_unique") + }); + logger.info("done exporting"); + + ensureYellow(); + client().admin().indices().prepareRefresh(".marvel-*").get(); + assertHitCount(client().prepareSearch().setQuery(QueryBuilders.termQuery("event_source", "test_source_unique")).get(), 1); + } + */ + + @Test + @LuceneTestCase.Slow + public void testTemplateAdditionDespiteOfLateClusterForming() { + Settings.Builder builder = Settings.builder() + .put(AgentService.SETTINGS_INTERVAL, "200m") + .put(Node.HTTP_ENABLED, true) + .put("discovery.type", "zen") + .put("discovery.zen.ping_timeout", "1s") + .put("discovery.initial_state_timeout", "100ms") + .put("discovery.zen.minimum_master_nodes", 2) + .put(HttpESExporter.SETTINGS_BULK_TIMEOUT, "1s") + .put(HttpESExporter.SETTINGS_CHECK_TEMPLATE_TIMEOUT, "1s"); + internalCluster().startNode(builder); + + HttpESExporter httpEsExporter = getEsExporter(); + logger.info("exporting events while there is no cluster"); + httpEsExporter.export(ImmutableList.of(newRandomMarvelDoc())); + + logger.info("bringing up a second node"); + internalCluster().startNode(builder); + ensureGreen(); + logger.info("exporting a second event"); + httpEsExporter.export(ImmutableList.of(newRandomMarvelDoc())); + + logger.info("verifying template is inserted"); + assertMarvelTemplate(); + } + + private void assertMarvelTemplate() { + boolean found; + found = findMarvelTemplate(); + assertTrue("failed to find a template named `marvel`", found); + } + + private boolean findMarvelTemplate() { + for (IndexTemplateMetaData template : client().admin().indices().prepareGetTemplates("marvel").get().getIndexTemplates()) { + if (template.getName().equals("marvel")) { + return true; + } + } + return false; + } + + + @Test + public void testDynamicHostChange() { + // disable exporting to be able to use non valid hosts + Settings.Builder builder = Settings.builder() + .put(AgentService.SETTINGS_INTERVAL, "-1"); + internalCluster().startNode(builder); + + HttpESExporter httpEsExporter = getEsExporter(); + + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put(HttpESExporter.SETTINGS_HOSTS, "test1"))); + assertThat(httpEsExporter.getHosts(), Matchers.arrayContaining("test1")); + + // wipes the non array settings + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() + .putArray(HttpESExporter.SETTINGS_HOSTS, "test2").put(HttpESExporter.SETTINGS_HOSTS, ""))); + assertThat(httpEsExporter.getHosts(), Matchers.arrayContaining("test2")); + + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().putArray(HttpESExporter.SETTINGS_HOSTS, "test3"))); + assertThat(httpEsExporter.getHosts(), Matchers.arrayContaining("test3")); + } + + @Test + public void testHostChangeReChecksTemplate() { + Settings.Builder builder = Settings.builder() + .put(AgentService.SETTINGS_INTERVAL, "200m") + .put(Node.HTTP_ENABLED, true); + internalCluster().startNode(builder); + + HttpESExporter httpEsExporter = getEsExporter(); + + logger.info("exporting an event"); + httpEsExporter.export(ImmutableList.of(newRandomMarvelDoc())); + + logger.info("removing the marvel template"); + + assertAcked(client().admin().indices().prepareDeleteTemplate("marvel").get()); + + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings( + Settings.builder().putArray(HttpESExporter.SETTINGS_HOSTS, httpEsExporter.getHosts())).get()); + + logger.info("exporting a second event"); + httpEsExporter.export(ImmutableList.of(newRandomMarvelDoc())); + + logger.info("verifying template is inserted"); + assertMarvelTemplate(); + } + + @Test + public void testHostFailureChecksTemplate() throws InterruptedException, IOException { + Settings.Builder builder = Settings.builder() + .put(AgentService.SETTINGS_INTERVAL, "200m") + .put(Node.HTTP_ENABLED, true); + final String node0 = internalCluster().startNode(builder); + String node1 = internalCluster().startNode(builder); + + HttpESExporter httpEsExporter0 = getEsExporter(node0); + final HttpESExporter httpEsExporter1 = getEsExporter(node1); + + logger.info("--> exporting events to force host resolution"); + httpEsExporter0.export(ImmutableList.of(newRandomMarvelDoc())); + httpEsExporter1.export(ImmutableList.of(newRandomMarvelDoc())); + + logger.info("--> setting exporting hosts to {} + {}", httpEsExporter0.getHosts(), httpEsExporter1.getHosts()); + ArrayList mergedHosts = new ArrayList(); + mergedHosts.addAll(Arrays.asList(httpEsExporter0.getHosts())); + mergedHosts.addAll(Arrays.asList(httpEsExporter1.getHosts())); + + assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings( + Settings.builder().putArray(HttpESExporter.SETTINGS_HOSTS, mergedHosts.toArray(Strings.EMPTY_ARRAY))).get()); + + logger.info("--> exporting events to have new settings take effect"); + httpEsExporter0.export(ImmutableList.of(newRandomMarvelDoc())); + httpEsExporter1.export(ImmutableList.of(newRandomMarvelDoc())); + + assertMarvelTemplate(); + + logger.info("--> removing the marvel template"); + assertAcked(client().admin().indices().prepareDeleteTemplate("marvel").get()); + + logger.info("--> shutting down node0"); + internalCluster().stopRandomNode(new Predicate() { + @Override + public boolean apply(Settings settings) { + return settings.get("name").equals(node0); + } + }); + + logger.info("--> exporting events from node1"); + // we use assert busy node because url caching may cause the node failure to be only detected while sending the event + assertTrue("failed to find a template named 'marvel'", awaitBusy(new Predicate() { + @Override + public boolean apply(Object o) { + httpEsExporter1.export(ImmutableList.of(newRandomMarvelDoc())); + logger.debug("--> checking for template"); + return findMarvelTemplate(); + } + })); + } + + private HttpESExporter getEsExporter() { + AgentService service = internalCluster().getInstance(AgentService.class); + return (HttpESExporter) service.getExporters().iterator().next(); + } + + private HttpESExporter getEsExporter(String node) { + AgentService service = internalCluster().getInstance(AgentService.class, node); + return (HttpESExporter) service.getExporters().iterator().next(); + } + + final static AtomicLong timeStampGenerator = new AtomicLong(); + + private MarvelDoc newRandomMarvelDoc() { + return IndexMarvelDoc.createMarvelDoc(internalCluster().getClusterName(), "test_marvelDoc", timeStampGenerator.incrementAndGet(), + "test_index", randomInt(), randomLong(), randomLong(), randomLong()); + } +} diff --git a/marvel/src/test/java/org/elasticsearch/marvel/license/LicenseIntegrationTests.java b/marvel/src/test/java/org/elasticsearch/marvel/license/LicenseIntegrationTests.java new file mode 100644 index 00000000000..595ac88a362 --- /dev/null +++ b/marvel/src/test/java/org/elasticsearch/marvel/license/LicenseIntegrationTests.java @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.marvel.license; + +import com.carrotsearch.randomizedtesting.RandomizedTest; +import com.carrotsearch.randomizedtesting.SysGlobals; +import com.google.common.collect.ImmutableSet; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.core.License; +import org.elasticsearch.license.plugin.core.LicensesClientService; +import org.elasticsearch.license.plugin.core.LicensesService; +import org.elasticsearch.marvel.MarvelPlugin; +import org.elasticsearch.marvel.mode.Mode; +import org.elasticsearch.plugins.AbstractPlugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.test.ElasticsearchIntegrationTest; +import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope.SUITE; +import static org.hamcrest.Matchers.equalTo; + +@ClusterScope(scope = SUITE, transportClientRatio = 0, numClientNodes = 0) +public class LicenseIntegrationTests extends ElasticsearchIntegrationTest { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.settingsBuilder() + .put(super.nodeSettings(nodeOrdinal)) + .put("plugin.types", MarvelPlugin.class.getName() + "," + MockLicensePlugin.class.getName()) + .put(PluginsService.LOAD_PLUGIN_FROM_CLASSPATH, false) + .build(); + } + + @Test + public void testEnableDisableLicense() { + assertMarvelMode(Mode.STANDARD); + disableLicensing(); + + assertMarvelMode(Mode.LITE); + enableLicensing(); + + assertMarvelMode(Mode.STANDARD); + } + + private void assertMarvelMode(Mode expected) { + LicenseService licenseService = internalCluster().getInstance(LicenseService.class); + assertNotNull(licenseService); + assertThat(licenseService.mode(), equalTo(expected)); + } + + + public static void disableLicensing() { + for (MockLicenseService service : internalCluster().getInstances(MockLicenseService.class)) { + service.disable(); + } + } + + public static void enableLicensing() { + for (MockLicenseService service : internalCluster().getInstances(MockLicenseService.class)) { + service.enable(); + } + } + + public static class MockLicensePlugin extends AbstractPlugin { + + public static final String NAME = "internal-test-licensing"; + + @Override + public String name() { + return NAME; + } + + @Override + public String description() { + return name(); + } + + @Override + public Collection> modules() { + return ImmutableSet.>of(InternalLicenseModule.class); + } + } + + public static class InternalLicenseModule extends AbstractModule { + @Override + protected void configure() { + bind(MockLicenseService.class).asEagerSingleton(); + bind(LicensesClientService.class).to(MockLicenseService.class); + } + } + + public static class MockLicenseService extends AbstractComponent implements LicensesClientService { + + static final License DUMMY_LICENSE = License.builder() + .feature(LicenseService.FEATURE_NAME) + .expiryDate(System.currentTimeMillis()) + .issueDate(System.currentTimeMillis()) + .issuedTo("LicensingTests") + .issuer("test") + .maxNodes(Integer.MAX_VALUE) + .signature("_signature") + .type("standard") + .subscriptionType("all_is_good") + .uid(String.valueOf(RandomizedTest.systemPropertyAsInt(SysGlobals.CHILDVM_SYSPROP_JVM_ID, 0)) + System.identityHashCode(LicenseIntegrationTests.class)) + .build(); + + private final List listeners = new ArrayList<>(); + + @Inject + public MockLicenseService(Settings settings) { + super(settings); + enable(); + } + + @Override + public void register(String s, LicensesService.TrialLicenseOptions trialLicenseOptions, Collection collection, Listener listener) { + listeners.add(listener); + enable(); + } + + public void enable() { + // enabled all listeners (incl. shield) + for (Listener listener : listeners) { + listener.onEnabled(DUMMY_LICENSE); + } + } + + public void disable() { + // only disable watcher listener (we need shield to work) + for (Listener listener : listeners) { + listener.onDisabled(DUMMY_LICENSE); + } + } + } +} diff --git a/marvel/src/test/resources/log4j.properties b/marvel/src/test/resources/log4j.properties new file mode 100644 index 00000000000..a917be95b83 --- /dev/null +++ b/marvel/src/test/resources/log4j.properties @@ -0,0 +1,11 @@ +es.logger.level=DEBUG +log4j.rootLogger=${es.logger.level}, out + +log4j.logger.org.apache.http=INFO, out +log4j.additivity.org.apache.http=false + +log4j.appender.out=org.apache.log4j.ConsoleAppender +log4j.appender.out.layout=org.apache.log4j.PatternLayout +log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n +log4j.logger.org.elasticsearch.marvel.agent=TRACE, out +log4j.additivity.org.elasticsearch.marvel.agent=false diff --git a/watcher/README.md b/watcher/README.md deleted file mode 100644 index 0d29c7e0225..00000000000 --- a/watcher/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Marvel Watchers - -In this directory you will find example Watchers for Marvel that were distributed with the Watcher Beta documentation. The `watches` directory contains the example watches and the `test` directory contains integration test for each watcher. - -The testing framework is written in Node.js and use ESVM to create an Elasticsearch instance for running the tests against - -### Setup and Running Tests - -- Run `npm install` in this directory -- Ensure you have `mocha` installed globally -- Run `mocha` in this directory diff --git a/watcher/lib/client.js b/watcher/lib/client.js deleted file mode 100644 index 7118f52e4fc..00000000000 --- a/watcher/lib/client.js +++ /dev/null @@ -1,4 +0,0 @@ -var Client = require('elasticsearch').Client; -module.exports = new Client({ - host: 'http://localhost:9800' -}); diff --git a/watcher/lib/create_template.js b/watcher/lib/create_template.js deleted file mode 100644 index bf0506c3daf..00000000000 --- a/watcher/lib/create_template.js +++ /dev/null @@ -1,11 +0,0 @@ -var client = require('./client'); -var template = require('./template.json'); - -module.exports = function () { - return client.indices.putTemplate({ - body: template, - name: 'marvel' - }); -}; - - diff --git a/watcher/lib/execute_watcher.js b/watcher/lib/execute_watcher.js deleted file mode 100644 index 2247ff83112..00000000000 --- a/watcher/lib/execute_watcher.js +++ /dev/null @@ -1,40 +0,0 @@ -var Promise = require('bluebird'); -var request = require('request'); -var injectStats = require('./inject_stats'); -var loadWatcher = require('./load_watcher'); - -module.exports = function (watcher, fixture) { - - return injectStats(fixture).then(function () { - return loadWatcher(watcher); - }).then(function () { - return new Promise(function (resolve, reject) { - var options = { - method: 'POST', - url: 'http://localhost:9800/_watcher/watch/' + watcher + '/_execute', - json: true, - body: { - trigger_event: { - schedule: { - scheduled_time: 'now', - triggered_time: 'now' - } - } - } - }; - - request(options, function (err, resp, body) { - if (err) return reject(err); - if (resp.statusCode === 200) return resolve(body); - var message = options.url + ' responed with ' + resp.statusCode; - var error = new Error(body.error || message); - error.body = body; - error.resp = resp; - error.code = resp.statusCode; - error.options = options; - reject(error); - }); - }); - }); - -}; diff --git a/watcher/lib/find_port.js b/watcher/lib/find_port.js deleted file mode 100644 index 93313bb095e..00000000000 --- a/watcher/lib/find_port.js +++ /dev/null @@ -1,13 +0,0 @@ -var Promise = require('bluebird'); -var portscanner = require('portscanner'); -module.exports = function findPort(start, end, host) { - host = host || 'localhost'; - return new Promise(function (resolve, reject) { - portscanner.findAPortNotInUse(start, end, host, function (err, port) { - if (err) return reject(err); - resolve(port); - }); - }); -}; - - diff --git a/watcher/lib/inject_stats.js b/watcher/lib/inject_stats.js deleted file mode 100644 index e14ec726915..00000000000 --- a/watcher/lib/inject_stats.js +++ /dev/null @@ -1,74 +0,0 @@ -var _ = require('lodash'); -_.mixin(require('lodash-deep')); -var moment = require('moment'); -var client = require('./client'); - -module.exports = function (fixture) { - var indexPattern = fixture.indexPattern; - var type = fixture.type; - var data = fixture.data; - var rawData = fixture.rawData; - var startDate = fixture.startDate; - var duration = fixture.duration; - var dateField = fixture.dateField; - var workingDate = moment.utc(); - var indices = []; - var body = []; - var fields; - - - function createEntries(row) { - var index = workingDate.format(indexPattern); - var entry = { '@timestamp': workingDate.toISOString() }; - row.forEach(function (val, index) { - _.deepSet(entry, fields[index], val); - }); - - indices.push(index); - - body.push({ - index: { - _index: index, - _type: type - } - }); - body.push(entry); - } - - if (rawData) { - rawData.forEach(function (row) { - var index = moment.utc(row[dateField]).format(indexPattern); - var entry = {}; - _.each(row, function (val, key) { - _.deepSet(entry, key, val); - }); - indices.push(index); - body.push({ - index: { - _index: index, - _type: type - } - }); - body.push(entry); - }); - } else { - fields = data.shift(); - while(startDate <= workingDate) { - data.forEach(createEntries); - workingDate.subtract(duration); - } - } - - return client.deleteByQuery({ - index: _.unique(indices), - ignoreUnavailable: true, - allowNoIndices: true, - q: '*' - }) - .then(function (arg) { - return client.bulk({ - body: body, - refresh: true - }); - }); -}; diff --git a/watcher/lib/load_watcher.js b/watcher/lib/load_watcher.js deleted file mode 100644 index 6fc03a7fa08..00000000000 --- a/watcher/lib/load_watcher.js +++ /dev/null @@ -1,26 +0,0 @@ -var Promise = require('bluebird'); -var request = require('request'); -var path = require('path'); - -module.exports = function (name) { - var watch = require(path.join(__dirname, '..', 'watches', name + '.json')); - var options = { - method: 'PUT', - url: 'http://localhost:9800/_watcher/watch/' + name, - json: true, - body: watch - }; - return new Promise(function (resolve, reject) { - request(options, function (err, resp, body) { - if (err) return reject(err); - if (resp.statusCode <= 201) resolve({ resp: resp, body: body }); - var message = options.url + ' responded with ' + resp.statusCode; - var error = new Error(body.error || message); - error.body = body; - error.resp = resp; - error.code = resp.statusCode; - error.options = options; - reject(error); - }); - }); -}; diff --git a/watcher/lib/setup_es.js b/watcher/lib/setup_es.js deleted file mode 100644 index 436f37df02c..00000000000 --- a/watcher/lib/setup_es.js +++ /dev/null @@ -1,71 +0,0 @@ -var path = require('path'); -var Promise = require('bluebird'); -var libesvm = require('libesvm'); -var createTemplate = require('./create_template'); - -function startEs() { - var options = { - version: '1.5.2', - directory: path.join(__dirname, '..', 'esvm'), - purge: true, - plugins: [ - 'elasticsearch/watcher/1.0.0-Beta1-5', - 'elasticsearch/license/latest' - ], - config: { - 'script.groovy.sandbox.enabled': true, - 'cluster.name': 'test', - 'network.host': '127.0.0.1', - 'http.port': 9800, - 'watcher.actions.email.service.account': { - 'local': { - 'email_defaults.from': 'admin@example.com', - 'smtp': { - 'host': 'localhost', - 'user': 'test', - 'password': 'test', - 'port': 5555 - } - } - } - - } - }; - var cluster = libesvm.createCluster(options); - cluster.on('log', function (log) { - if (log.type === 'progress') return; - if (process.env.DEBUG) console.log('%s %s %s %s', log.level, log.node, log.type, log.message); - }); - return cluster.install() - .then(function () { - return cluster.installPlugins(); - }) - .then(function () { - return cluster.start(); - }) - .then(function () { - after(function () { - this.timeout(60000); - return cluster.shutdown(); - }); - return cluster; - }); -} - -before(function () { - var self = this; - this.timeout(60000); - return new Promise(function (resolve, reject) { - startEs().then(function (cluster) { - self.cluster = cluster; - cluster.on('log', function (log) { - if (/watch service has started/.test(log.message)) { - createTemplate().then(function () { - resolve(cluster); - }); - } - }); - }).catch(reject); - }); -}); - diff --git a/watcher/lib/setup_smtp_server.js b/watcher/lib/setup_smtp_server.js deleted file mode 100644 index 12e4f7ec922..00000000000 --- a/watcher/lib/setup_smtp_server.js +++ /dev/null @@ -1,56 +0,0 @@ -var Promise = require('bluebird'); -var SMTPServer = require('smtp-server').SMTPServer; -var MailParser = require('mailparser').MailParser; -var acceptAny = function (address, session, done) { - return done(); -}; - -var mailbox = []; - -function startSMTP() { - return new Promise(function (resolve, reject) { - var server = new SMTPServer({ - logger: false, - disabledCommands: ['STARTTLS'], - onAuth: function (auth, session, done) { - done(null, { user: 1 }); - }, - onMailFrom: acceptAny, - onRcptTo: acceptAny, - onData: function (stream, session, done) { - var mailparser = new MailParser(); - mailparser.on('end', function (mailObj) { - mailbox.push(mailObj); - done(); - }); - stream.pipe(mailparser); - } - }); - - server.listen(5555, function (err) { - if (err) return reject(err); - after(function (done) { - server.close(done); - }); - resolve(server); - }); - }); -} - -before(function () { - var self = this; - return startSMTP().then(function (server) { - this.smtp = server; - return server; - }); -}); - -beforeEach(function () { - this.mailbox = mailbox; -}); - -afterEach(function () { - mailbox = []; -}); - -module.exports = mailbox; diff --git a/watcher/lib/test_no_execute.js b/watcher/lib/test_no_execute.js deleted file mode 100644 index 7c94d531487..00000000000 --- a/watcher/lib/test_no_execute.js +++ /dev/null @@ -1,28 +0,0 @@ -var lib = require('requirefrom')('lib'); -var executeWatcher = lib('execute_watcher'); -var expect = require('expect.js'); -module.exports = function testNoExecute(options, message, generateRawData) { - describe(message, function () { - var response; - beforeEach(function () { - this.timeout(5000); - var rawData = generateRawData(); - var fixture = { - indexPattern: options.indexPattern, - type: options.type, - dateField: '@timestamp', - rawData: rawData - }; - return executeWatcher(options.watcher, fixture).then(function (resp) { - response = resp; - if (process.env.DEBUG) console.log(JSON.stringify(resp, null, ' ')); - return resp; - }); - }); - it('should not meet the script condition', function () { - expect(response.state).to.be('execution_not_needed'); - expect(response.execution_result.condition.script.met).to.be(false); - }); - }); -}; - diff --git a/watcher/package.json b/watcher/package.json deleted file mode 100644 index ab38bb94d83..00000000000 --- a/watcher/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "marvel-watchers", - "version": "1.0.0", - "description": "", - "main": "index.js", - "directories": { - "test": "test" - }, - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "author": "", - "license": "ISC", - "dependencies": { - "libesvm": "0.0.12", - "lodash": "^3.8.0", - "lodash-deep": "^1.6.0", - "mailparser": "^0.5.1", - "moment": "^2.10.3", - "portscanner": "^1.0.0", - "requirefrom": "^0.2.0" - } -} diff --git a/watcher/test/cluster_status.js b/watcher/test/cluster_status.js deleted file mode 100644 index 91eeb774c92..00000000000 --- a/watcher/test/cluster_status.js +++ /dev/null @@ -1,95 +0,0 @@ -var _ = require('lodash'); -var lib = require('requirefrom')('lib'); -var expect = require('expect.js'); -var moment = require('moment'); -var executeWatcher = lib('execute_watcher'); -var options = { - indexPattern: '[.marvel-]YYYY.MM.DD', - type: 'cluster_stats', - watcher: 'cluster_status' -}; -var testNoExecute = lib('test_no_execute').bind(null, options); -var client = lib('client'); -lib('setup_es'); -lib('setup_smtp_server'); - -describe('Marvel Watchers', function () { - describe('Cluster Status', function () { - - describe('Red for 60 seconds', function () { - var response; - beforeEach(function () { - this.timeout(5000); - var workingDate = moment.utc(); - var rawData = _.times(12, function () { - return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; - }); - var fixture = { - indexPattern: '[.marvel-]YYYY.MM.DD', - type: 'cluster_stats', - dateField: '@timestamp', - rawData: rawData - }; - return executeWatcher('cluster_status', fixture).then(function (resp) { - response = resp; - if (process.env.DEBUG) console.log(JSON.stringify(resp, null, ' ')); - return resp; - }); - }); - - it('should meet the script condition', function () { - expect(response.state).to.be('executed'); - expect(response.execution_result.condition.script.met).to.be(true); - }); - - it('should send an email', function () { - expect(this.mailbox).to.have.length(1); - var message = this.mailbox[0]; - expect(message.subject).to.contain('Watcher Notification - Cluster has been RED for the last 60 seconds'); - expect(message.text).to.contain('Your cluster has been red for the last 60 seconds.'); - }); - - }); - - testNoExecute('Red for 55 then Yellow for 60 seconds', function () { - var workingDate = moment.utc(); - var rawData = _.times(11, function () { - return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; - }); - rawData.concat(_.times(12, function () { - return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'yellow' }; - })); - return rawData; - }); - - testNoExecute('Red for 30 then Yellow for 60 seconds', function () { - var workingDate = moment.utc(); - var rawData = _.times(6, function () { - return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; - }); - rawData.concat(_.times(12, function () { - return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'yellow' }; - })); - return rawData; - }); - - testNoExecute('Red for 5 Yellow for 10 Red for 10 Green for 60', function () { - var workingDate = moment.utc(); - var rawData = _.times(1, function () { - return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; - }); - rawData.concat(_.times(2, function () { - return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'yellow' }; - })); - rawData.concat(_.times(2, function () { - return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'red' }; - })); - rawData.concat(_.times(12, function () { - return { '@timestamp': workingDate.subtract(5, 's').format(), status: 'green' }; - })); - return rawData; - }); - - }); -}); - diff --git a/watcher/test/cpu_usage.js b/watcher/test/cpu_usage.js deleted file mode 100644 index 2920d7698af..00000000000 --- a/watcher/test/cpu_usage.js +++ /dev/null @@ -1,81 +0,0 @@ -var lib = require('requirefrom')('lib'); -var expect = require('expect.js'); -var moment = require('moment'); -var executeWatcher = lib('execute_watcher'); -var client = lib('client'); -var indexPattern = '[.marvel-]YYYY.MM.DD'; -lib('setup_es'); -lib('setup_smtp_server'); - -describe('Marvel Watchers', function () { - describe('CPU Usage', function () { - - describe('above 75%', function () { - var response; - beforeEach(function () { - this.timeout(5000); - var fixture = { - indexPattern: indexPattern, - type: 'node_stats', - duration: moment.duration(5, 's'), - startDate: moment.utc().subtract(5, 'm'), - data: [ - ['node.name', 'os.cpu.user'], - ['node-01', 75], - ['node-02', 85], - ['node-03', 60] - ] - }; - return executeWatcher('cpu_usage', fixture).then(function (resp) { - response = resp; - return resp; - }); - }); - - it('should meet the script condition', function () { - expect(response.state).to.be('executed'); - expect(response.execution_result.condition.script.met).to.be(true); - }); - - it('should send an email with multiple hosts', function () { - expect(this.mailbox).to.have.length(1); - var message = this.mailbox[0]; - expect(message.text).to.contain('"node-01" - CPU Usage is at 75.0%'); - expect(message.text).to.contain('"node-02" - CPU Usage is at 85.0%'); - }); - - }); - - describe('below 75%', function () { - var response; - beforeEach(function () { - var self = this; - this.timeout(5000); - var fixture = { - indexPattern: indexPattern, - type: 'node_stats', - duration: moment.duration(5, 's'), - startDate: moment.utc().subtract(5, 'm'), - data: [ - ['node.name', 'os.cpu.user'], - ['node-01', 35], - ['node-02', 25], - ['node-03', 10] - ] - }; - return executeWatcher('cpu_usage', fixture).then(function (resp) { - response = resp; - return resp; - }); - }); - - it('should not send an email', function () { - expect(response.state).to.be('execution_not_needed'); - expect(response.execution_result.condition.script.met).to.be(false); - expect(this.mailbox).to.have.length(0); - }); - - }); - - }); -}); diff --git a/watcher/test/fielddata.js b/watcher/test/fielddata.js deleted file mode 100644 index 0601afdf38e..00000000000 --- a/watcher/test/fielddata.js +++ /dev/null @@ -1,82 +0,0 @@ -var lib = require('requirefrom')('lib'); -var expect = require('expect.js'); -var moment = require('moment'); -var executeWatcher = lib('execute_watcher'); -var client = lib('client'); -var indexPattern = '[.marvel-]YYYY.MM.DD'; -lib('setup_es'); -lib('setup_smtp_server'); - -describe('Marvel Watchers', function () { - describe('File Descriptors', function () { - - describe('above 80%', function () { - var response; - beforeEach(function () { - this.timeout(5000); - var fixture = { - indexPattern: indexPattern, - type: 'node_stats', - duration: moment.duration(5, 's'), - startDate: moment.utc().subtract(5, 'm'), - data: [ - ['node.name', 'indices.fielddata.memory_size_in_bytes'], - ['node-01', 81000], - ['node-02', 70000], - ['node-03', 90000] - ] - }; - return executeWatcher('fielddata', fixture).then(function (resp) { - response = resp; - return resp; - }); - }); - - it('should meet the script condition', function () { - expect(response.state).to.be('executed'); - expect(response.execution_result.condition.script.met).to.be(true); - }); - - it('should send an email with multiple hosts', function () { - expect(this.mailbox).to.have.length(1); - var message = this.mailbox[0]; - expect(message.text).to.contain('"node-01" - Fielddata utilization is at 81000.0 bytes (81%)'); - expect(message.text).to.contain('"node-03" - Fielddata utilization is at 90000.0 bytes (90%)'); - }); - - }); - - describe('below 80%', function () { - var response; - beforeEach(function () { - var self = this; - this.timeout(5000); - var fixture = { - indexPattern: indexPattern, - type: 'node_stats', - duration: moment.duration(5, 's'), - startDate: moment.utc().subtract(5, 'm'), - data: [ - ['node.name', 'indices.fielddata.memory_size_in_bytes'], - ['node-01', 12039], - ['node-02', 54393], - ['node-03', 20302] - ] - }; - return executeWatcher('fielddata', fixture).then(function (resp) { - response = resp; - return resp; - }); - }); - - it('should not send an email', function () { - expect(response.state).to.be('execution_not_needed'); - expect(response.execution_result.condition.script.met).to.be(false); - expect(this.mailbox).to.have.length(0); - }); - - }); - - }); -}); - diff --git a/watcher/test/file_descriptors.js b/watcher/test/file_descriptors.js deleted file mode 100644 index 6de8a188f80..00000000000 --- a/watcher/test/file_descriptors.js +++ /dev/null @@ -1,82 +0,0 @@ -var lib = require('requirefrom')('lib'); -var expect = require('expect.js'); -var moment = require('moment'); -var executeWatcher = lib('execute_watcher'); -var client = lib('client'); -var indexPattern = '[.marvel-]YYYY.MM.DD'; -lib('setup_es'); -lib('setup_smtp_server'); - -describe('Marvel Watchers', function () { - describe('File Descriptors', function () { - - describe('above 80%', function () { - var response; - beforeEach(function () { - this.timeout(5000); - var fixture = { - indexPattern: indexPattern, - type: 'node_stats', - duration: moment.duration(5, 's'), - startDate: moment.utc().subtract(5, 'm'), - data: [ - ['node.name', 'process.open_file_descriptors'], - ['node-01', Math.round(65535*0.75)], - ['node-02', Math.round(65535*0.81)], - ['node-03', Math.round(65535*0.93)] - ] - }; - return executeWatcher('file_descriptors', fixture).then(function (resp) { - response = resp; - return resp; - }); - }); - - it('should meet the script condition', function () { - expect(response.state).to.be('executed'); - expect(response.execution_result.condition.script.met).to.be(true); - }); - - it('should send an email with multiple hosts', function () { - expect(this.mailbox).to.have.length(1); - var message = this.mailbox[0]; - expect(message.text).to.contain('"node-02" - File Descriptors is at ' + Math.round(65535*0.81) + '.0 ('+ Math.round(((65535*0.81)/65535)*100) + '%)'); - expect(message.text).to.contain('"node-03" - File Descriptors is at ' + Math.round(65535*0.93) + '.0 ('+ Math.round(((65535*0.93)/65535)*100) + '%)'); - }); - - }); - - describe('below 80%', function () { - var response; - beforeEach(function () { - var self = this; - this.timeout(5000); - var fixture = { - indexPattern: indexPattern, - type: 'node_stats', - duration: moment.duration(5, 's'), - startDate: moment.utc().subtract(5, 'm'), - data: [ - ['node.name', 'process.open_file_descriptors'], - ['node-01', Math.round(65535*0.05)], - ['node-02', Math.round(65535*0.30)], - ['node-03', Math.round(65535*0.23)] - ] - }; - return executeWatcher('file_descriptors', fixture).then(function (resp) { - response = resp; - return resp; - }); - }); - - it('should not send an email', function () { - expect(response.state).to.be('execution_not_needed'); - expect(response.execution_result.condition.script.met).to.be(false); - expect(this.mailbox).to.have.length(0); - }); - - }); - - }); -}); - diff --git a/watcher/test/heap_used.js b/watcher/test/heap_used.js deleted file mode 100644 index ee5b444088d..00000000000 --- a/watcher/test/heap_used.js +++ /dev/null @@ -1,81 +0,0 @@ -var lib = require('requirefrom')('lib'); -var expect = require('expect.js'); -var moment = require('moment'); -var executeWatcher = lib('execute_watcher'); -var client = lib('client'); -var indexPattern = '[.marvel-]YYYY.MM.DD'; -lib('setup_es'); -lib('setup_smtp_server'); - -describe('Marvel Watchers', function () { - describe('Memory Usage', function () { - - describe('above 75%', function () { - var response; - beforeEach(function () { - this.timeout(5000); - var fixture = { - indexPattern: indexPattern, - type: 'node_stats', - duration: moment.duration(5, 's'), - startDate: moment.utc().subtract(5, 'm'), - data: [ - ['node.name', 'jvm.mem.heap_used_percent'], - ['node-01', 75], - ['node-02', 85], - ['node-03', 60] - ] - }; - return executeWatcher('heap_used', fixture).then(function (resp) { - response = resp; - return resp; - }); - }); - - it('should meet the script condition', function () { - expect(response.state).to.be('executed'); - expect(response.execution_result.condition.script.met).to.be(true); - }); - - it('should send an email with multiple hosts', function () { - expect(this.mailbox).to.have.length(1); - var message = this.mailbox[0]; - expect(message.text).to.contain('"node-01" - Memory Usage is at 75.0%'); - expect(message.text).to.contain('"node-02" - Memory Usage is at 85.0%'); - }); - - }); - - describe('below 75%', function () { - var response; - beforeEach(function () { - var self = this; - this.timeout(5000); - var fixture = { - indexPattern: indexPattern, - type: 'node_stats', - duration: moment.duration(5, 's'), - startDate: moment.utc().subtract(5, 'm'), - data: [ - ['node.name', 'jvm.mem.heap_used_percent'], - ['node-01', 35], - ['node-02', 25], - ['node-03', 10] - ] - }; - return executeWatcher('heap_used', fixture).then(function (resp) { - response = resp; - return resp; - }); - }); - - it('should not send an email', function () { - expect(response.state).to.be('execution_not_needed'); - expect(response.execution_result.condition.script.met).to.be(false); - expect(this.mailbox).to.have.length(0); - }); - - }); - - }); -}); diff --git a/watcher/test/mocha.opts b/watcher/test/mocha.opts deleted file mode 100644 index ec71a5114b0..00000000000 --- a/watcher/test/mocha.opts +++ /dev/null @@ -1,2 +0,0 @@ ---require expect.js ---reporter spec diff --git a/watcher/watches/cluster_status.json b/watcher/watches/cluster_status.json deleted file mode 100644 index 52556996e73..00000000000 --- a/watcher/watches/cluster_status.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "trigger": { - "schedule": { - "interval": "1m" - } - }, - "input": { - "search": { - "request": { - "indices": ".marvel-*", - "types": "cluster_stats", - "body": { - "query": { - "filtered": { - "filter": { - "bool": { - "must": [ - { - "range": { - "@timestamp": { - "gte": "now-2m", - "lte": "now" - } - } - } - ], - "should": [ - { - "term": { - "status.raw": "red" - } - }, - { - "term": { - "status.raw": "green" - } - }, - { - "term": { - "status.raw": "yellow" - } - } - ] - } - } - } - }, - "fields": ["@timestamp","status"], - "sort": [ - { - "@timestamp": { - "order": "desc" - } - } - ], - "size": 1, - "aggs": { - "minutes": { - "date_histogram": { - "field": "@timestamp", - "interval": "5s" - }, - "aggs": { - "status": { - "terms": { - "field": "status.raw", - "size": 3 - } - } - } - } - } - } - } - } - }, - "throttle_period": "30m", - "condition": { - "script": { - "inline": "if (ctx.payload.hits.total < 1) return false; def rows = ctx.payload.hits.hits; if (rows[0].fields.status[0] != 'red') return false; if (ctx.payload.aggregations.minutes.buckets.size() < 12) return false; def last60Seconds = ctx.payload.aggregations.minutes.buckets[-12..-1]; return last60Seconds.every { it.status.buckets.every { s -> s.key == 'red' } }" - } - }, - "actions": { - "send_email": { - "email": { - "to": "user@example.com", - "subject": "Watcher Notification - Cluster has been RED for the last 60 seconds", - "body": "Your cluster has been red for the last 60 seconds." - } - } - } -} diff --git a/watcher/watches/cpu_usage.json b/watcher/watches/cpu_usage.json deleted file mode 100644 index edda8624f64..00000000000 --- a/watcher/watches/cpu_usage.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "trigger": { - "schedule": { - "interval": "1m" - } - }, - "input": { - "search": { - "request": { - "indices": [ - ".marvel-*" - ], - "search_type": "count", - "body": { - "query": { - "filtered": { - "filter": { - "range": { - "@timestamp": { - "gte": "now-2m", - "lte": "now" - } - } - } - } - }, - "aggs": { - "minutes": { - "date_histogram": { - "field": "@timestamp", - "interval": "minute" - }, - "aggs": { - "nodes": { - "terms": { - "field": "node.name.raw", - "size": 10, - "order": { - "cpu": "desc" - } - }, - "aggs": { - "cpu": { - "avg": { - "field": "os.cpu.user" - } - } - } - } - } - } - } - } - } - } - }, - "throttle_period": "30m", - "condition": { - "script": "if (ctx.payload.aggregations.minutes.buckets.size() == 0) return false; def latest = ctx.payload.aggregations.minutes.buckets[-1]; def node = latest.nodes.buckets[0]; return node && node.cpu && node.cpu.value >= 75;" - }, - "actions": { - "send_email": { - "transform": { - "script": "def latest = ctx.payload.aggregations.minutes.buckets[-1]; return latest.nodes.buckets.findAll { return it.cpu && it.cpu.value >= 75 };" - }, - "email": { - "to": "user@example.com", - "subject": "Watcher Notification - HIGH CPU USAGE", - "body": "Nodes with HIGH CPU Usage (above 75%):\n\n{{#ctx.payload._value}}\"{{key}}\" - CPU Usage is at {{cpu.value}}%\n{{/ctx.payload._value}}" - } - } - } -} diff --git a/watcher/watches/fielddata.json b/watcher/watches/fielddata.json deleted file mode 100644 index 08072f35ebf..00000000000 --- a/watcher/watches/fielddata.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "metadata": { - "fielddata_cache_size": 100000, - "threshold": 0.8 - }, - "trigger": { - "schedule": { - "interval": "1m" - } - }, - "input": { - "search": { - "request": { - "indices": [ - ".marvel-*" - ], - "types": "node_stats", - "search_type": "count", - "body": { - "query": { - "filtered": { - "filter": { - "range": { - "@timestamp": { - "gte": "now-1m", - "lte": "now" - } - } - } - } - }, - "aggs": { - "minutes": { - "date_histogram": { - "field": "@timestamp", - "interval": "5s" - }, - "aggs": { - "nodes": { - "terms": { - "field": "node.name.raw", - "size": 10, - "order": { - "fielddata": "desc" - } - }, - "aggs": { - "fielddata": { - "avg": { - "field": "indices.fielddata.memory_size_in_bytes" - } - } - } - } - } - } - } - } - } - } - }, - "throttle_period": "30m", - "condition": { - "script": "if (ctx.payload.aggregations.minutes.buckets.size() == 0) return false; def latest = ctx.payload.aggregations.minutes.buckets[-1]; def node = latest.nodes.buckets[0]; return node && node.fielddata && node.fielddata.value >= (ctx.metadata.fielddata_cache_size * ctx.metadata.threshold);" - }, - "actions": { - "send_email": { - "transform": { - "script": "def latest = ctx.payload.aggregations.minutes.buckets[-1]; return latest.nodes.buckets.findAll({ return it.fielddata && it.fielddata.value >= (ctx.metadata.fielddata_cache_size * ctx.metadata.threshold) }).collect({ it.fielddata.percent = Math.round((it.fielddata.value/ctx.metadata.fielddata_cache_size)*100); it });" - }, - "email": { - "to": "user@example.com", - "subject": "Watcher Notification - NODES WITH 80% FIELDDATA UTILIZATION", - "body": "Nodes with 80% FIELDDATA UTILIZATION (above 80%):\n\n{{#ctx.payload._value}}\"{{key}}\" - Fielddata utilization is at {{fielddata.value}} bytes ({{fielddata.percent}}%)\n{{/ctx.payload._value}}" - } - } - } -} - diff --git a/watcher/watches/file_descriptors.json b/watcher/watches/file_descriptors.json deleted file mode 100644 index 83fbe122aca..00000000000 --- a/watcher/watches/file_descriptors.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "metadata": { - "system_fd": 65535, - "threshold": 0.8 - }, - "trigger": { - "schedule": { - "interval": "1m" - } - }, - "input": { - "search": { - "request": { - "indices": [ - ".marvel-*" - ], - "types": "node_stats", - "search_type": "count", - "body": { - "query": { - "filtered": { - "filter": { - "range": { - "@timestamp": { - "gte": "now-1m", - "lte": "now" - } - } - } - } - }, - "aggs": { - "minutes": { - "date_histogram": { - "field": "@timestamp", - "interval": "5s" - }, - "aggs": { - "nodes": { - "terms": { - "field": "node.name.raw", - "size": 10, - "order": { - "fd": "desc" - } - }, - "aggs": { - "fd": { - "avg": { - "field": "process.open_file_descriptors" - } - } - } - } - } - } - } - } - } - } - }, - "throttle_period": "30m", - "condition": { - "script": "if (ctx.payload.aggregations.minutes.buckets.size() == 0) return false; def latest = ctx.payload.aggregations.minutes.buckets[-1]; def node = latest.nodes.buckets[0]; return node && node.fd && node.fd.value >= (ctx.metadata.system_fd * ctx.metadata.threshold);" - }, - "actions": { - "send_email": { - "transform": { - "script": "def latest = ctx.payload.aggregations.minutes.buckets[-1]; return latest.nodes.buckets.findAll({ return it.fd && it.fd.value >= (ctx.metadata.system_fd * ctx.metadata.threshold) }).collect({ it.fd.percent = Math.round((it.fd.value/ctx.metadata.system_fd)*100); it });" - }, - "email": { - "to": "user@example.com", - "subject": "Watcher Notification - NODES WITH 80% FILE DESCRIPTORS USED", - "body": "Nodes with 80% FILE DESCRIPTORS USED (above 80%):\n\n{{#ctx.payload._value}}\"{{key}}\" - File Descriptors is at {{fd.value}} ({{fd.percent}}%)\n{{/ctx.payload._value}}" - } - } - } -} - diff --git a/watcher/watches/heap_used.json b/watcher/watches/heap_used.json deleted file mode 100644 index 3f33719a9e8..00000000000 --- a/watcher/watches/heap_used.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "trigger": { - "schedule": { - "interval": "1m" - } - }, - "input": { - "search": { - "request": { - "indices": [ - ".marvel-*" - ], - "search_type": "count", - "body": { - "query": { - "filtered": { - "filter": { - "range": { - "@timestamp": { - "gte": "now-2m", - "lte": "now" - } - } - } - } - }, - "aggs": { - "minutes": { - "date_histogram": { - "field": "@timestamp", - "interval": "minute" - }, - "aggs": { - "nodes": { - "terms": { - "field": "node.name.raw", - "size": 10, - "order": { - "memory": "desc" - } - }, - "aggs": { - "memory": { - "avg": { - "field": "jvm.mem.heap_used_percent" - } - } - } - } - } - } - } - } - } - } - }, - "throttle_period": "30m", - "condition": { - "script": "if (ctx.payload.aggregations.minutes.buckets.size() == 0) return false; def latest = ctx.payload.aggregations.minutes.buckets[-1]; def node = latest.nodes.buckets[0]; return node && node.memory && node.memory.value >= 75;" - }, - "actions": { - "send_email": { - "transform": { - "script": "def latest = ctx.payload.aggregations.minutes.buckets[-1]; return latest.nodes.buckets.findAll { return it.memory && it.memory.value >= 75 };" - }, - "email": { - "to": "user@example.com", - "subject": "Watcher Notification - HIGH MEMORY USAGE", - "body": "Nodes with HIGH MEMORY Usage (above 75%):\n\n{{#ctx.payload._value}}\"{{key}}\" - Memory Usage is at {{memory.value}}%\n{{/ctx.payload._value}}" - } - } - } -}