Moved plugin to a subfolder

Tweaked build system to allow serving both marvel and kibana content without building and without symlinks

Original commit: elastic/x-pack-elasticsearch@837ad48e7f
This commit is contained in:
Boaz Leskes 2013-11-18 21:36:21 +01:00
parent 7a026e2f32
commit 04876eff6c
11 changed files with 78 additions and 1091 deletions

34
.jshintrc Normal file
View File

@ -0,0 +1,34 @@
{
"browser": true,
"bitwise":false,
"curly": true,
"eqnull": true,
"globalstrict": true,
"devel": true,
"eqeqeq": true,
"forin": false,
"immed": true,
"supernew": true,
"expr": true,
"indent": 2,
"latedef": true,
"newcap": true,
"noarg": true,
"noempty": true,
"undef": true,
"boss": true,
"trailing": false,
"laxbreak": true,
"laxcomma": true,
"sub": true,
"unused": true,
"maxlen": 140,
"globals": {
"define": true,
"require": true,
"Chromath": false
}
}

View File

@ -0,0 +1,44 @@
## Grunt build system
This grunt-based build system handles Kibana development environment setup for Marvel as well as building, packaging and distribution of the Marvel plugin. Note that you **must** run *grunt setup* before any other tasks as this build system reuses parts of the Kibana build system that must be fetched
### Installing
You will need node.js+npm and grunt. Node is available via brew, install grunt with the command below. Once grunt is installed you may run grunt tasks to setup your environment and build Marvel
```npm install -g grunt```
```npm install```
### Tasks
```grunt setup```
**Run this first.** It will download the right Kibana version to ./vendor/kibana, copies the appropriate config.js to the right place and make any symlinks needed for a proper marvel/kibana environment
```grunt server```
Starts a web server on http://127.0.0.1:5601 pointing at the kibana directory, while also serving custom marvel panels
```grunt jshint```
Lints code without building
```grunt build```
Merges kibana and marvel code, builds Kibana and the plugin (via mvn) and puts them in ./build.
```grunt package```
Zips and tar+gzips the build in ./packages. Includes grunt build
```grunt release```
Uploads archives to download.elasticsearch.org/elasticsearch/marvel/marvel-VERSION.extention. Includes grunt build and grunt package. You will need S3 credentials in .aws-config.json. Format as so:
```
{
"key":"MY_KEY_HERE",
"secret":"your/long/secret/string"
}
```

135
pom.xml
View File

@ -1,135 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<name>elasticsearch-marvel</name>
<modelVersion>4.0.0</modelVersion>
<groupId>org.elasticsearch</groupId>
<artifactId>marvel</artifactId>
<version>0.1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<description>Elasticsearch Marvel</description>
<inceptionYear>2013</inceptionYear>
<licenses>
<license>
<name>The Apache Software License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<parent>
<groupId>org.sonatype.oss</groupId>
<artifactId>oss-parent</artifactId>
<version>7</version>
</parent>
<properties>
<elasticsearch.version>0.90.7-SNAPSHOT</elasticsearch.version>
</properties>
<repositories>
<repository>
<id>sonatype</id>
<url>http://oss.sonatype.org/content/repositories/releases/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>${elasticsearch.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<scope>runtime</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
</exclusion>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-all</artifactId>
<version>1.3</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.11</version>
<configuration>
<includes>
<include>**/*Tests.java</include>
</includes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.1.2</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.3</version>
<configuration>
<appendAssemblyId>false</appendAssemblyId>
<outputDirectory>${project.build.directory}/releases/</outputDirectory>
<descriptors>
<descriptor>${basedir}/src/main/assemblies/plugin.xml</descriptor>
</descriptors>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,17 +0,0 @@
<assembly>
<id>plugin</id>
<formats>
<format>zip</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<dependencySets>
<dependencySet>
<outputDirectory>/</outputDirectory>
<useProjectArtifact>true</useProjectArtifact>
<useTransitiveFiltering>true</useTransitiveFiltering>
<excludes>
<exclude>org.elasticsearch:elasticsearch</exclude>
</excludes>
</dependencySet>
</dependencySets>
</assembly>

View File

@ -1,51 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.monitor;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.AbstractPlugin;
import java.util.ArrayList;
import java.util.Collection;
public class Plugin extends AbstractPlugin {
public Plugin() {
}
@Override
public String name() {
return "Elasticsearch marvel - monitor";
}
@Override
public String description() {
return "Monitoring with an elastic sauce";
}
@Override
public Collection<Module> modules(Settings settings) {
Module m = new AbstractModule() {
@Override
protected void configure() {
bind(StatsExportersService.class).asEagerSingleton();
}
};
return ImmutableList.of(m);
}
@Override
public Collection<Class<? extends LifecycleComponent>> services() {
Collection<Class<? extends LifecycleComponent>> l = new ArrayList<Class<? extends LifecycleComponent>>();
l.add(StatsExportersService.class);
return l;
}
}

View File

@ -1,228 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.monitor;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableSet;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.indices.IndicesLifecycle;
import org.elasticsearch.marvel.monitor.annotation.Annotation;
import org.elasticsearch.marvel.monitor.annotation.ShardEventAnnotation;
import org.elasticsearch.marvel.monitor.exporter.ESExporter;
import org.elasticsearch.marvel.monitor.exporter.StatsExporter;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InternalIndicesService;
import org.elasticsearch.node.service.NodeService;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingDeque;
public class StatsExportersService extends AbstractLifecycleComponent<StatsExportersService> {
private final InternalIndicesService indicesService;
private final NodeService nodeService;
private final ClusterService clusterService;
private final Client client;
private final IndicesLifecycle.Listener indicesLifeCycleListener;
private volatile ExportingWorker exp;
private volatile Thread thread;
private final TimeValue interval;
private Collection<StatsExporter> exporters;
private final BlockingQueue<Annotation> pendingAnnotationsQueue;
@Inject
public StatsExportersService(Settings settings, IndicesService indicesService,
NodeService nodeService, ClusterService clusterService,
Client client,
Discovery discovery) {
super(settings);
this.indicesService = (InternalIndicesService) indicesService;
this.clusterService = clusterService;
this.nodeService = nodeService;
this.interval = componentSettings.getAsTime("interval", TimeValue.timeValueSeconds(5));
this.client = client;
StatsExporter esExporter = new ESExporter(settings.getComponentSettings(ESExporter.class), discovery);
this.exporters = ImmutableSet.of(esExporter);
indicesLifeCycleListener = new IndicesLifeCycleListener();
pendingAnnotationsQueue = ConcurrentCollections.newBlockingQueue();
}
@Override
protected void doStart() throws ElasticSearchException {
for (StatsExporter e : exporters)
e.start();
this.exp = new ExportingWorker();
this.thread = new Thread(exp, EsExecutors.threadName(settings, "monitor"));
this.thread.setDaemon(true);
this.thread.start();
indicesService.indicesLifecycle().addListener(indicesLifeCycleListener);
}
@Override
protected void doStop() throws ElasticSearchException {
this.exp.closed = true;
this.thread.interrupt();
try {
this.thread.join(60000);
} catch (InterruptedException e) {
// we don't care...
}
for (StatsExporter e : exporters)
e.stop();
indicesService.indicesLifecycle().removeListener(indicesLifeCycleListener);
}
@Override
protected void doClose() throws ElasticSearchException {
for (StatsExporter e : exporters)
e.close();
}
class ExportingWorker implements Runnable {
volatile boolean closed;
@Override
public void run() {
while (!closed) {
// sleep first to allow node to complete initialization before collecting the first start
try {
Thread.sleep(interval.millis());
} catch (InterruptedException e) {
// ignore, if closed, good....
}
// do the actual export..., go over the actual exporters list and...
try {
exportNodeStats();
exportShardStats();
exportAnnotations();
if (clusterService.state().nodes().localNodeMaster()) {
exportIndicesStats();
}
} catch (Throwable t) {
logger.error("Background thread had an uncaught exception:", t);
}
}
logger.debug("shutting down worker, exporting pending annotation");
exportAnnotations();
logger.debug("worker shutdown");
}
private void exportIndicesStats() {
logger.debug("local node is master, exporting aggregated stats");
IndicesStatsResponse indicesStatsResponse = client.admin().indices().prepareStats().all().get();
for (StatsExporter e : exporters) {
try {
e.exportIndicesStats(indicesStatsResponse);
} catch (Throwable t) {
logger.error("StatsExporter [{}] has thrown an exception:", t, e.name());
}
}
}
private void exportAnnotations() {
logger.debug("Exporting annotations");
ArrayList<Annotation> annotationsList = new ArrayList<Annotation>(pendingAnnotationsQueue.size());
pendingAnnotationsQueue.drainTo(annotationsList);
Annotation[] annotations = new Annotation[annotationsList.size()];
annotationsList.toArray(annotations);
for (StatsExporter e : exporters) {
try {
e.exportAnnotations(annotations);
} catch (Throwable t) {
logger.error("StatsExporter [{}] has thrown an exception:", t, e.name());
}
}
}
private void exportShardStats() {
logger.debug("Collecting shard stats");
ShardStats[] shardStatsArray = indicesService.shardStats(CommonStatsFlags.ALL);
logger.debug("Exporting shards stats");
for (StatsExporter e : exporters) {
try {
e.exportShardStats(shardStatsArray);
} catch (Throwable t) {
logger.error("StatsExporter [{}] has thrown an exception:", t, e.name());
}
}
}
private void exportNodeStats() {
logger.debug("Collecting node stats");
NodeStats nodeStats = nodeService.stats();
logger.debug("Exporting node stats");
for (StatsExporter e : exporters) {
try {
e.exportNodeStats(nodeStats);
} catch (Throwable t) {
logger.error("StatsExporter [{}] has thrown an exception:", t, e.name());
}
}
}
}
class IndicesLifeCycleListener extends IndicesLifecycle.Listener {
@Override
public void afterIndexShardStarted(IndexShard indexShard) {
pendingAnnotationsQueue.add(new ShardEventAnnotation(System.currentTimeMillis(), ShardEventAnnotation.EventType.STARTED,
indexShard.shardId(), indexShard.routingEntry()));
}
@Override
public void beforeIndexShardCreated(ShardId shardId) {
pendingAnnotationsQueue.add(new ShardEventAnnotation(System.currentTimeMillis(), ShardEventAnnotation.EventType.CREATED,
shardId, null));
}
@Override
public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard) {
pendingAnnotationsQueue.add(new ShardEventAnnotation(System.currentTimeMillis(), ShardEventAnnotation.EventType.CLOSED,
indexShard.shardId(), indexShard.routingEntry()));
}
}
}

View File

@ -1,48 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.monitor.annotation;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.joda.time.format.DateTimeFormatter;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
public abstract class Annotation {
public final static DateTimeFormatter datePrinter = Joda.forPattern("date_time").printer();
protected long timestamp;
public Annotation(long timestamp) {
this.timestamp = timestamp;
}
public long timestamp() {
return timestamp;
}
/**
* @return annotation's type as a short string without spaces
*/
public abstract String type();
/**
* should return a short string based description of the annotation
*/
abstract String conciseDescription();
@Override
public String toString() {
return "[" + type() + "] annotation: [" + conciseDescription() + "]";
}
public XContentBuilder addXContentBody(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.field("@timestamp", datePrinter.print(timestamp));
builder.field("message", conciseDescription());
return builder;
}
}

View File

@ -1,57 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.monitor.annotation;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
public class ShardEventAnnotation extends Annotation {
private final ShardRouting shardRouting;
private final ShardId shardId;
private EventType event;
public enum EventType {
CREATED,
STARTED,
CLOSED
}
public ShardEventAnnotation(long timestamp, EventType event, ShardId shardId, ShardRouting shardRouting) {
super(timestamp);
this.event = event;
this.shardId = shardId;
this.shardRouting = shardRouting;
}
@Override
public String type() {
return "shard_event";
}
@Override
String conciseDescription() {
return "[" + event + "]" + (shardRouting != null ? shardRouting : shardId);
}
@Override
public XContentBuilder addXContentBody(XContentBuilder builder, ToXContent.Params params) throws IOException {
super.addXContentBody(builder, params);
builder.field("event", event);
builder.field("index", shardId.index());
builder.field("shard_id", shardId.id());
if (shardRouting != null) {
builder.field("routing");
shardRouting.toXContent(builder, params);
}
return builder;
}
}

View File

@ -1,526 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.monitor.exporter;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.indices.stats.CommonStats;
import org.elasticsearch.action.admin.indices.stats.IndexStats;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.joda.time.format.DateTimeFormat;
import org.elasticsearch.common.joda.time.format.DateTimeFormatter;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.network.NetworkUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.smile.SmileXContent;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.marvel.monitor.annotation.Annotation;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.URL;
import java.util.Map;
public class ESExporter extends AbstractLifecycleComponent<ESExporter> implements StatsExporter<ESExporter> {
final String[] hosts;
final String indexPrefix;
final DateTimeFormatter indexTimeFormatter;
final int timeout;
final Discovery discovery;
final String hostname;
// TODO: logger name is not good now. Figure out why.
final ESLogger logger = ESLoggerFactory.getLogger(ESExporter.class.getName());
public final static DateTimeFormatter defaultDatePrinter = Joda.forPattern("date_time").printer();
boolean checkedForIndexTemplate = false;
final NodeStatsRenderer nodeStatsRenderer;
final ShardStatsRenderer shardStatsRenderer;
final IndexStatsRenderer indexStatsRenderer;
final IndicesStatsRenderer indicesStatsRenderer;
final AnnotationsRenderer annotationsRenderer;
public ESExporter(Settings settings, Discovery discovery) {
super(settings);
this.discovery = discovery;
InetAddress address = NetworkUtils.getLocalAddress();
this.hostname = address == null ? null : address.getHostName();
hosts = settings.getAsArray("hosts", new String[]{"localhost:9200"});
indexPrefix = settings.get("index.prefix", "marvel");
String indexTimeFormat = settings.get("index.timeformat", "YYYY.MM.dd");
indexTimeFormatter = DateTimeFormat.forPattern(indexTimeFormat).withZoneUTC();
timeout = (int) settings.getAsTime("timeout", new TimeValue(6000)).seconds();
nodeStatsRenderer = new NodeStatsRenderer();
shardStatsRenderer = new ShardStatsRenderer();
indexStatsRenderer = new IndexStatsRenderer();
indicesStatsRenderer = new IndicesStatsRenderer();
annotationsRenderer = new AnnotationsRenderer();
logger.info("ESExporter initialized. Targets: {}, index prefix [{}], index time format [{}]", hosts, indexPrefix, indexTimeFormat);
}
@Override
public String name() {
return "ESExporter";
}
@Override
public void exportNodeStats(NodeStats nodeStats) {
nodeStatsRenderer.reset(nodeStats);
exportXContent(nodeStatsRenderer);
}
@Override
public void exportShardStats(ShardStats[] shardStatsArray) {
shardStatsRenderer.reset(shardStatsArray);
exportXContent(shardStatsRenderer);
}
@Override
public void exportIndicesStats(IndicesStatsResponse indicesStats) {
Map<String, IndexStats> perIndexStats = indicesStats.getIndices();
indexStatsRenderer.reset(perIndexStats.values().toArray(new IndexStats[perIndexStats.size()]));
indicesStatsRenderer.reset(indicesStats.getTotal(), indicesStats.getPrimaries());
logger.debug("exporting index_stats + indices_stats");
HttpURLConnection conn = openExportingConnection();
if (conn == null) {
return;
}
try {
addXContentRendererToConnection(conn, indexStatsRenderer);
addXContentRendererToConnection(conn, indicesStatsRenderer);
sendCloseExportingConnection(conn);
} catch (IOException e) {
logger.error("error sending data", e);
return;
}
}
@Override
public void exportAnnotations(Annotation[] annotations) {
annotationsRenderer.reset(annotations);
exportXContent(annotationsRenderer);
}
private HttpURLConnection openExportingConnection() {
if (!checkedForIndexTemplate) {
if (!checkForIndexTemplate()) {
logger.debug("no template defined yet. skipping");
return null;
}
}
logger.trace("setting up an export connection");
HttpURLConnection conn = openConnection("POST", "/_bulk", XContentType.SMILE.restContentType());
if (conn == null) {
logger.error("could not connect to any configured elasticsearch instances: [{}]", hosts);
}
return conn;
}
private void addXContentRendererToConnection(HttpURLConnection conn,
MultiXContentRenderer renderer) throws IOException {
OutputStream os = conn.getOutputStream();
// TODO: find a way to disable builder's substream flushing or something neat solution
for (int i = 0; i < renderer.length(); i++) {
XContentBuilder builder = XContentFactory.smileBuilder(os);
builder.startObject().startObject("index")
.field("_index", getIndexName()).field("_type", renderer.type(i)).endObject().endObject();
builder.flush();
os.write(SmileXContent.smileXContent.streamSeparator());
builder = XContentFactory.smileBuilder(os);
builder.humanReadable(false);
renderer.render(i, builder);
builder.flush();
os.write(SmileXContent.smileXContent.streamSeparator());
}
}
private void sendCloseExportingConnection(HttpURLConnection conn) throws IOException {
logger.trace("sending exporting content");
OutputStream os = conn.getOutputStream();
os.close();
if (conn.getResponseCode() != 200) {
logConnectionError("remote target didn't respond with 200 OK", conn);
} else {
conn.getInputStream().close(); // close and release to connection pool.
}
}
private void exportXContent(MultiXContentRenderer xContentRenderer) {
if (xContentRenderer.length() == 0) {
return;
}
HttpURLConnection conn = openExportingConnection();
if (conn == null) {
return;
}
try {
addXContentRendererToConnection(conn, xContentRenderer);
sendCloseExportingConnection(conn);
} catch (IOException e) {
logger.error("error sending data", e);
return;
}
}
@Override
protected void doStart() throws ElasticSearchException {
}
@Override
protected void doStop() throws ElasticSearchException {
}
@Override
protected void doClose() throws ElasticSearchException {
}
private String getIndexName() {
return indexPrefix + "-" + indexTimeFormatter.print(System.currentTimeMillis());
}
private HttpURLConnection openConnection(String method, String uri) {
return openConnection(method, uri, null);
}
private HttpURLConnection openConnection(String method, String uri, String contentType) {
for (String host : hosts) {
try {
URL templateUrl = new URL("http://" + host + uri);
HttpURLConnection conn = (HttpURLConnection) templateUrl.openConnection();
conn.setRequestMethod(method);
conn.setConnectTimeout(timeout);
if (contentType != null) {
conn.setRequestProperty("Content-Type", XContentType.SMILE.restContentType());
}
conn.setUseCaches(false);
if (method.equalsIgnoreCase("POST") || method.equalsIgnoreCase("PUT")) {
conn.setDoOutput(true);
}
conn.connect();
return conn;
} catch (IOException e) {
logger.error("error connecting to [{}]", e, host);
}
}
return null;
}
private boolean checkForIndexTemplate() {
try {
String templateName = "marvel.monitor." + indexPrefix;
logger.debug("checking of target has template [{}]", templateName);
// DO HEAD REQUEST, when elasticsearch supports it
HttpURLConnection conn = openConnection("GET", "/_template/" + templateName);
if (conn == null) {
logger.error("Could not connect to any configured elasticsearch instances: [{}]", hosts);
return false;
}
boolean hasTemplate = conn.getResponseCode() == 200;
// nothing there, lets create it
if (!hasTemplate) {
logger.debug("no template found in elasticsearch for [{}]. Adding...", templateName);
conn = openConnection("PUT", "/_template/" + templateName, XContentType.SMILE.restContentType());
OutputStream os = conn.getOutputStream();
XContentBuilder builder = XContentFactory.smileBuilder(os);
builder.startObject();
builder.field("template", indexPrefix + "*");
builder.startObject("mappings").startObject("_default_");
builder.startArray("dynamic_templates").startObject().startObject("string_fields")
.field("match", "*")
.field("match_mapping_type", "string")
.startObject("mapping").field("index", "not_analyzed").endObject()
.endObject().endObject().endArray();
builder.endObject().endObject(); // mapping + root object.
builder.close();
os.close();
if (conn.getResponseCode() != 200) {
logConnectionError("error adding index template to elasticsearch", conn);
}
conn.getInputStream().close(); // close and release to connection pool.
}
checkedForIndexTemplate = true;
} catch (IOException e) {
logger.error("error when checking/adding template to elasticsearch", e);
return false;
}
return true;
}
private void logConnectionError(String msg, HttpURLConnection conn) {
InputStream inputStream = conn.getErrorStream();
java.util.Scanner s = new java.util.Scanner(inputStream, "UTF-8").useDelimiter("\\A");
String err = s.hasNext() ? s.next() : "";
try {
logger.error("{} response code [{} {}]. content: {}", msg, conn.getResponseCode(), conn.getResponseMessage(), err);
} catch (IOException e) {
logger.error("connection had an error while reporting the error. tough life.");
}
}
interface MultiXContentRenderer {
int length();
String type(int i);
void render(int index, XContentBuilder builder) throws IOException;
}
private void addNodeInfo(XContentBuilder builder) throws IOException {
addNodeInfo(builder, "node");
}
private void addNodeInfo(XContentBuilder builder, String fieldname) throws IOException {
builder.startObject(fieldname);
DiscoveryNode node = discovery.localNode();
builder.field("id", node.id());
builder.field("name", node.name());
builder.field("transport_address", node.address());
if (node.address().uniqueAddressTypeId() == 1) { // InetSocket
InetSocketTransportAddress address = (InetSocketTransportAddress) node.address();
InetAddress inetAddress = address.address().getAddress();
if (inetAddress != null) {
builder.field("ip", inetAddress.getHostAddress());
}
}
if (hostname != null) {
builder.field("hostname", hostname);
}
if (!node.attributes().isEmpty()) {
builder.startObject("attributes");
for (Map.Entry<String, String> attr : node.attributes().entrySet()) {
builder.field(attr.getKey(), attr.getValue());
}
builder.endObject();
}
builder.endObject();
}
class NodeStatsRenderer implements MultiXContentRenderer {
NodeStats stats;
ToXContent.MapParams xContentParams = new ToXContent.MapParams(
ImmutableMap.of("node_info_format", "none", "load_average_format", "hash"));
public void reset(NodeStats stats) {
this.stats = stats;
}
@Override
public int length() {
return 1;
}
@Override
public String type(int i) {
return "node_stats";
}
@Override
public void render(int index, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field("@timestamp", defaultDatePrinter.print(stats.getTimestamp()));
addNodeInfo(builder);
stats.toXContent(builder, xContentParams);
builder.endObject();
}
}
class ShardStatsRenderer implements MultiXContentRenderer {
ShardStats[] stats;
long collectionTime;
ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS;
public void reset(ShardStats[] stats) {
this.stats = stats;
collectionTime = System.currentTimeMillis();
}
@Override
public int length() {
return stats == null ? 0 : stats.length;
}
@Override
public String type(int i) {
return "shard_stats";
}
@Override
public void render(int index, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field("@timestamp", defaultDatePrinter.print(collectionTime));
ShardRouting shardRouting = stats[index].getShardRouting();
builder.field("index", shardRouting.index());
builder.field("shard_id", shardRouting.id());
builder.field("shard_state", shardRouting.state());
builder.field("primary", shardRouting.primary());
addNodeInfo(builder);
stats[index].getStats().toXContent(builder, xContentParams);
builder.endObject();
}
}
class IndexStatsRenderer implements MultiXContentRenderer {
IndexStats[] stats;
long collectionTime;
ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS;
public void reset(IndexStats[] stats) {
this.stats = stats;
collectionTime = System.currentTimeMillis();
}
@Override
public int length() {
return stats == null ? 0 : stats.length;
}
@Override
public String type(int i) {
return "index_stats";
}
@Override
public void render(int index, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field("@timestamp", defaultDatePrinter.print(collectionTime));
IndexStats indexStats = stats[index];
builder.field("index", indexStats.getIndex());
addNodeInfo(builder, "_source_node");
builder.startObject("primaries");
indexStats.getPrimaries().toXContent(builder, xContentParams);
builder.endObject();
builder.startObject("total");
indexStats.getTotal().toXContent(builder, xContentParams);
builder.endObject();
builder.endObject();
}
}
class IndicesStatsRenderer implements MultiXContentRenderer {
CommonStats totalStats;
CommonStats primariesStats;
long collectionTime;
ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS;
public void reset(CommonStats totalStats, CommonStats primariesStats) {
this.totalStats = totalStats;
this.primariesStats = primariesStats;
collectionTime = System.currentTimeMillis();
}
@Override
public int length() {
return totalStats == null ? 0 : 1;
}
@Override
public String type(int i) {
return "indices_stats";
}
@Override
public void render(int index, XContentBuilder builder) throws IOException {
assert index == 0;
builder.startObject();
builder.field("@timestamp", defaultDatePrinter.print(collectionTime));
addNodeInfo(builder, "_source_node");
builder.startObject("primaries");
primariesStats.toXContent(builder, xContentParams);
builder.endObject();
builder.startObject("total");
totalStats.toXContent(builder, xContentParams);
builder.endObject();
builder.endObject();
}
}
class AnnotationsRenderer implements MultiXContentRenderer {
Annotation[] annotations;
ToXContent.Params xContentParams = ToXContent.EMPTY_PARAMS;
public void reset(Annotation[] annotations) {
this.annotations = annotations;
}
@Override
public int length() {
return annotations == null ? 0 : annotations.length;
}
@Override
public String type(int i) {
return annotations[i].type();
}
@Override
public void render(int index, XContentBuilder builder) throws IOException {
builder.startObject();
addNodeInfo(builder, "node");
annotations[index].addXContentBody(builder, xContentParams);
builder.endObject();
}
}
}

View File

@ -1,28 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.monitor.exporter;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.indices.stats.CommonStats;
import org.elasticsearch.action.admin.indices.stats.IndexStats;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.marvel.monitor.annotation.Annotation;
import java.util.Map;
public interface StatsExporter<T> extends LifecycleComponent<T> {
String name();
void exportNodeStats(NodeStats nodeStats);
void exportShardStats(ShardStats[] shardStatsArray);
void exportIndicesStats(IndicesStatsResponse indicesStats);
void exportAnnotations(Annotation[] annotations);
}

View File

@ -1 +0,0 @@
plugin=org.elasticsearch.marvel.monitor.Plugin