Marvel: Add Node Stats collector
Original commit: elastic/x-pack-elasticsearch@1381905198
This commit is contained in:
parent
5b519935be
commit
5a6e142bc8
|
@ -10,6 +10,7 @@ import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateCollector;
|
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.node.NodeStatsCollector;
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -23,6 +24,7 @@ public class CollectorModule extends AbstractModule {
|
||||||
registerCollector(IndexStatsCollector.class);
|
registerCollector(IndexStatsCollector.class);
|
||||||
registerCollector(ClusterStatsCollector.class);
|
registerCollector(ClusterStatsCollector.class);
|
||||||
registerCollector(ClusterStateCollector.class);
|
registerCollector(ClusterStateCollector.class);
|
||||||
|
registerCollector(NodeStatsCollector.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -0,0 +1,87 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.collector.node;
|
||||||
|
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||||
|
import org.elasticsearch.bootstrap.Bootstrap;
|
||||||
|
import org.elasticsearch.cluster.ClusterName;
|
||||||
|
import org.elasticsearch.cluster.ClusterService;
|
||||||
|
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
|
||||||
|
import org.elasticsearch.common.inject.ConfigurationException;
|
||||||
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.inject.Provider;
|
||||||
|
import org.elasticsearch.common.inject.ProvisionException;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.discovery.DiscoveryService;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||||
|
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||||
|
import org.elasticsearch.marvel.agent.settings.MarvelSettingsService;
|
||||||
|
import org.elasticsearch.node.service.NodeService;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collector for nodes statistics.
|
||||||
|
* <p/>
|
||||||
|
* This collector runs on every non-client node and collect
|
||||||
|
* a {@link NodeStatsMarvelDoc} document for each node of the cluster.
|
||||||
|
*/
|
||||||
|
public class NodeStatsCollector extends AbstractCollector<NodeStatsCollector> {
|
||||||
|
|
||||||
|
public static final String NAME = "node-stats-collector";
|
||||||
|
public static final String TYPE = "marvel_node_stats";
|
||||||
|
|
||||||
|
private final NodeService nodeService;
|
||||||
|
private final DiscoveryService discoveryService;
|
||||||
|
|
||||||
|
// Use a provider in order to avoid Guice circular injection
|
||||||
|
// issues because AllocationDecider is not an interface and cannot be proxied
|
||||||
|
private final Provider<DiskThresholdDecider> diskThresholdDeciderProvider;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
public NodeStatsCollector(Settings settings, ClusterService clusterService, ClusterName clusterName,
|
||||||
|
MarvelSettingsService marvelSettings,
|
||||||
|
NodeService nodeService, DiscoveryService discoveryService,
|
||||||
|
Provider<DiskThresholdDecider> diskThresholdDeciderProvider) {
|
||||||
|
super(settings, NAME, clusterService, clusterName, marvelSettings);
|
||||||
|
this.nodeService = nodeService;
|
||||||
|
this.discoveryService = discoveryService;
|
||||||
|
this.diskThresholdDeciderProvider = diskThresholdDeciderProvider;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||||
|
ImmutableList.Builder<MarvelDoc> results = ImmutableList.builder();
|
||||||
|
|
||||||
|
NodeStats nodeStats = nodeService.stats();
|
||||||
|
|
||||||
|
DiskThresholdDecider diskThresholdDecider = null;
|
||||||
|
try {
|
||||||
|
diskThresholdDecider = diskThresholdDeciderProvider.get();
|
||||||
|
} catch (ProvisionException | ConfigurationException e) {
|
||||||
|
logger.warn("unable to retrieve disk threshold decider information", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Here we are calling directly the DiskThresholdDecider to retrieve the high watermark value
|
||||||
|
// It would be nicer to use a settings API like documented in #6732
|
||||||
|
Double diskThresholdWatermarkHigh = (diskThresholdDecider != null) ? 100.0 - diskThresholdDecider.getFreeDiskThresholdHigh() : -1;
|
||||||
|
boolean diskThresholdDeciderEnabled = (diskThresholdDecider != null) && diskThresholdDecider.isEnabled();
|
||||||
|
|
||||||
|
results.add(buildMarvelDoc(clusterName.value(), TYPE, System.currentTimeMillis(),
|
||||||
|
discoveryService.localNode().id(), clusterService.state().nodes().localNodeMaster(), nodeStats,
|
||||||
|
Bootstrap.isMemoryLocked(), diskThresholdWatermarkHigh, diskThresholdDeciderEnabled));
|
||||||
|
|
||||||
|
return results.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected MarvelDoc buildMarvelDoc(String clusterName, String type, long timestamp,
|
||||||
|
String nodeId, boolean nodeMaster, NodeStats nodeStats,
|
||||||
|
boolean mlockall, Double diskThresholdWaterMarkHigh, boolean diskThresholdDeciderEnabled) {
|
||||||
|
return NodeStatsMarvelDoc.createMarvelDoc(clusterName, type, timestamp, nodeId, nodeMaster, nodeStats, mlockall, diskThresholdWaterMarkHigh, diskThresholdDeciderEnabled);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,75 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.collector.node;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||||
|
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||||
|
|
||||||
|
public class NodeStatsMarvelDoc extends MarvelDoc<NodeStatsMarvelDoc.Payload> {
|
||||||
|
|
||||||
|
private final Payload payload;
|
||||||
|
|
||||||
|
public NodeStatsMarvelDoc(String clusterName, String type, long timestamp, Payload payload) {
|
||||||
|
super(clusterName, type, timestamp);
|
||||||
|
this.payload = payload;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Payload payload() {
|
||||||
|
return payload;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static NodeStatsMarvelDoc createMarvelDoc(String clusterName, String type, long timestamp,
|
||||||
|
String nodeId, boolean nodeMaster, NodeStats nodeStats,
|
||||||
|
boolean mlockall, Double diskThresholdWaterMarkHigh, boolean diskThresholdDeciderEnabled) {
|
||||||
|
return new NodeStatsMarvelDoc(clusterName, type, timestamp, new Payload(nodeId, nodeMaster, nodeStats, mlockall, diskThresholdWaterMarkHigh, diskThresholdDeciderEnabled));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Payload {
|
||||||
|
|
||||||
|
private final String nodeId;
|
||||||
|
private final boolean nodeMaster;
|
||||||
|
private final NodeStats nodeStats;
|
||||||
|
|
||||||
|
private final boolean mlockall;
|
||||||
|
private final Double diskThresholdWaterMarkHigh;
|
||||||
|
private final boolean diskThresholdDeciderEnabled;
|
||||||
|
|
||||||
|
Payload(String nodeId, boolean nodeMaster, NodeStats nodeStats, boolean mlockall, Double diskThresholdWaterMarkHigh, boolean diskThresholdDeciderEnabled) {
|
||||||
|
this.nodeId = nodeId;
|
||||||
|
this.nodeMaster = nodeMaster;
|
||||||
|
this.nodeStats = nodeStats;
|
||||||
|
this.mlockall = mlockall;
|
||||||
|
this.diskThresholdWaterMarkHigh = diskThresholdWaterMarkHigh;
|
||||||
|
this.diskThresholdDeciderEnabled = diskThresholdDeciderEnabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getNodeId() {
|
||||||
|
return nodeId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isNodeMaster() {
|
||||||
|
return nodeMaster;
|
||||||
|
}
|
||||||
|
|
||||||
|
public NodeStats getNodeStats() {
|
||||||
|
return nodeStats;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isMlockall() {
|
||||||
|
return mlockall;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Double getDiskThresholdWaterMarkHigh() {
|
||||||
|
return diskThresholdWaterMarkHigh;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isDiskThresholdDeciderEnabled() {
|
||||||
|
return diskThresholdDeciderEnabled;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -195,6 +195,10 @@ public class HttpESExporter extends AbstractExporter<HttpESExporter> implements
|
||||||
|
|
||||||
// Get the appropriate renderer in order to render the MarvelDoc
|
// Get the appropriate renderer in order to render the MarvelDoc
|
||||||
Renderer renderer = registry.renderer(marvelDoc.type());
|
Renderer renderer = registry.renderer(marvelDoc.type());
|
||||||
|
if (renderer == null) {
|
||||||
|
logger.warn("unable to render marvel document of type [{}]: no renderer found in registry", marvelDoc.type());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
try (XContentBuilder builder = new XContentBuilder(xContentType.xContent(), os)) {
|
try (XContentBuilder builder = new XContentBuilder(xContentType.xContent(), os)) {
|
||||||
|
|
||||||
|
|
|
@ -10,9 +10,11 @@ import org.elasticsearch.common.inject.multibindings.MapBinder;
|
||||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateCollector;
|
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.node.NodeStatsCollector;
|
||||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStateRenderer;
|
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStateRenderer;
|
||||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStatsRenderer;
|
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStatsRenderer;
|
||||||
import org.elasticsearch.marvel.agent.renderer.indices.IndexStatsRenderer;
|
import org.elasticsearch.marvel.agent.renderer.indices.IndexStatsRenderer;
|
||||||
|
import org.elasticsearch.marvel.agent.renderer.node.NodeStatsRenderer;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -39,6 +41,9 @@ public class RendererModule extends AbstractModule {
|
||||||
bind(ClusterStateRenderer.class).asEagerSingleton();
|
bind(ClusterStateRenderer.class).asEagerSingleton();
|
||||||
mbinder.addBinding(ClusterStateCollector.TYPE).to(ClusterStateRenderer.class);
|
mbinder.addBinding(ClusterStateCollector.TYPE).to(ClusterStateRenderer.class);
|
||||||
|
|
||||||
|
bind(NodeStatsRenderer.class).asEagerSingleton();
|
||||||
|
mbinder.addBinding(NodeStatsCollector.TYPE).to(NodeStatsRenderer.class);
|
||||||
|
|
||||||
for (Map.Entry<String, Class<? extends Renderer>> entry : renderers.entrySet()) {
|
for (Map.Entry<String, Class<? extends Renderer>> entry : renderers.entrySet()) {
|
||||||
bind(entry.getValue()).asEagerSingleton();
|
bind(entry.getValue()).asEagerSingleton();
|
||||||
mbinder.addBinding(entry.getKey()).to(entry.getValue());
|
mbinder.addBinding(entry.getKey()).to(entry.getValue());
|
||||||
|
|
|
@ -0,0 +1,86 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.renderer.node;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.node.NodeStatsMarvelDoc;
|
||||||
|
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class NodeStatsRenderer extends AbstractRenderer<NodeStatsMarvelDoc> {
|
||||||
|
|
||||||
|
private static final String[] FILTERS = {
|
||||||
|
// Extra information
|
||||||
|
"node_stats.node_id",
|
||||||
|
"node_stats.node_master",
|
||||||
|
"node_stats.mlockall",
|
||||||
|
"node_stats.disk_threshold_enabled",
|
||||||
|
"node_stats.disk_threshold_watermark_high",
|
||||||
|
// Node Stats
|
||||||
|
"node_stats.indices.docs.count",
|
||||||
|
"node_stats.indices.store.size_in_bytes",
|
||||||
|
"node_stats.indices.store.throttle_time_in_millis",
|
||||||
|
"node_stats.indices.indexing.throttle_time_in_millis",
|
||||||
|
"node_stats.indices.segments.count",
|
||||||
|
"node_stats.fs.total.total_in_bytes",
|
||||||
|
"node_stats.fs.total.free_in_bytes",
|
||||||
|
"node_stats.fs.total.available_in_bytes",
|
||||||
|
"node_stats.process.max_file_descriptors",
|
||||||
|
"node_stats.process.open_file_descriptors",
|
||||||
|
"node_stats.jvm.mem.heap_used_in_bytes",
|
||||||
|
"node_stats.jvm.mem.heap_used_percent",
|
||||||
|
"node_stats.jvm.gc.collectors.young",
|
||||||
|
"node_stats.jvm.gc.collectors.young.collection_count",
|
||||||
|
"node_stats.jvm.gc.collectors.young.collection_time_in_millis",
|
||||||
|
"node_stats.jvm.gc.collectors.old",
|
||||||
|
"node_stats.jvm.gc.collectors.old.collection_count",
|
||||||
|
"node_stats.jvm.gc.collectors.old.collection_time_in_millis",
|
||||||
|
"node_stats.thread_pool.index.rejected",
|
||||||
|
"node_stats.thread_pool.search.rejected",
|
||||||
|
"node_stats.thread_pool.bulk.rejected",
|
||||||
|
};
|
||||||
|
|
||||||
|
public NodeStatsRenderer() {
|
||||||
|
super(FILTERS, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doRender(NodeStatsMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||||
|
builder.startObject(Fields.NODE_STATS);
|
||||||
|
|
||||||
|
NodeStatsMarvelDoc.Payload payload = marvelDoc.payload();
|
||||||
|
if (payload != null) {
|
||||||
|
|
||||||
|
builder.field(Fields.NODE_ID, payload.getNodeId());
|
||||||
|
builder.field(Fields.NODE_MASTER, payload.isNodeMaster());
|
||||||
|
builder.field(Fields.MLOCKALL, payload.isMlockall());
|
||||||
|
builder.field(Fields.DISK_THRESHOLD_ENABLED, payload.isDiskThresholdDeciderEnabled());
|
||||||
|
builder.field(Fields.DISK_THRESHOLD_WATERMARK_HIGH, payload.getDiskThresholdWaterMarkHigh());
|
||||||
|
|
||||||
|
NodeStats nodeStats = payload.getNodeStats();
|
||||||
|
if (nodeStats != null) {
|
||||||
|
nodeStats.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
static final class Fields {
|
||||||
|
static final XContentBuilderString NODE_STATS = new XContentBuilderString("node_stats");
|
||||||
|
static final XContentBuilderString NODE_ID = new XContentBuilderString("node_id");
|
||||||
|
static final XContentBuilderString NODE_MASTER = new XContentBuilderString("node_master");
|
||||||
|
static final XContentBuilderString MLOCKALL = new XContentBuilderString("mlockall");
|
||||||
|
static final XContentBuilderString DISK_THRESHOLD_ENABLED = new XContentBuilderString("disk_threshold_enabled");
|
||||||
|
static final XContentBuilderString DISK_THRESHOLD_WATERMARK_HIGH = new XContentBuilderString("disk_threshold_watermark_high");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -103,6 +103,45 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"marvel_node_stats": {
|
||||||
|
"properties": {
|
||||||
|
"node_stats": {
|
||||||
|
"properties": {
|
||||||
|
"node_id": {
|
||||||
|
"type": "string",
|
||||||
|
"index": "not_analyzed"
|
||||||
|
},
|
||||||
|
"node_master": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"mlockall": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"disk_threshold_enabled": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"disk_threshold_watermark_high": {
|
||||||
|
"type": "short"
|
||||||
|
},
|
||||||
|
"indices": {
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"fs": {
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"process": {
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"jvm": {
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"thread_pool": {
|
||||||
|
"type": "object"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -0,0 +1,71 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.collector.node;
|
||||||
|
|
||||||
|
import org.elasticsearch.bootstrap.Bootstrap;
|
||||||
|
import org.elasticsearch.cluster.ClusterName;
|
||||||
|
import org.elasticsearch.cluster.ClusterService;
|
||||||
|
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
|
||||||
|
import org.elasticsearch.common.inject.Provider;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.discovery.DiscoveryService;
|
||||||
|
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||||
|
import org.elasticsearch.marvel.agent.settings.MarvelSettingsService;
|
||||||
|
import org.elasticsearch.node.service.NodeService;
|
||||||
|
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.*;
|
||||||
|
|
||||||
|
public class NodeStatsCollectorTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNodeStatsCollector() throws Exception {
|
||||||
|
String[] nodes = internalCluster().getNodeNames();
|
||||||
|
for (String node : nodes) {
|
||||||
|
logger.info("--> collecting node stats on node [{}]", node);
|
||||||
|
Collection<MarvelDoc> results = newNodeStatsCollector(node).doCollect();
|
||||||
|
assertThat(results, hasSize(1));
|
||||||
|
|
||||||
|
MarvelDoc marvelDoc = results.iterator().next();
|
||||||
|
assertNotNull(marvelDoc);
|
||||||
|
assertThat(marvelDoc, instanceOf(NodeStatsMarvelDoc.class));
|
||||||
|
|
||||||
|
NodeStatsMarvelDoc nodeStatsMarvelDoc = (NodeStatsMarvelDoc) marvelDoc;
|
||||||
|
assertThat(nodeStatsMarvelDoc.clusterName(), equalTo(client().admin().cluster().prepareHealth().get().getClusterName()));
|
||||||
|
assertThat(nodeStatsMarvelDoc.timestamp(), greaterThan(0L));
|
||||||
|
assertThat(nodeStatsMarvelDoc.type(), equalTo(NodeStatsCollector.TYPE));
|
||||||
|
|
||||||
|
NodeStatsMarvelDoc.Payload payload = nodeStatsMarvelDoc.payload();
|
||||||
|
assertNotNull(payload);
|
||||||
|
assertThat(payload.getNodeId(), equalTo(internalCluster().getInstance(DiscoveryService.class, node).localNode().id()));
|
||||||
|
assertThat(payload.isNodeMaster(), equalTo(node.equals(internalCluster().getMasterName())));
|
||||||
|
assertThat(payload.isMlockall(), equalTo(Bootstrap.isMemoryLocked()));
|
||||||
|
assertNotNull(payload.isDiskThresholdDeciderEnabled());
|
||||||
|
assertNotNull(payload.getDiskThresholdWaterMarkHigh());
|
||||||
|
|
||||||
|
assertNotNull(payload.getNodeStats());
|
||||||
|
assertThat(payload.getNodeStats().getProcess().getOpenFileDescriptors(), greaterThan(0L));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private NodeStatsCollector newNodeStatsCollector(final String nodeId) {
|
||||||
|
return new NodeStatsCollector(internalCluster().getInstance(Settings.class, nodeId),
|
||||||
|
internalCluster().getInstance(ClusterService.class, nodeId),
|
||||||
|
internalCluster().getInstance(ClusterName.class, nodeId),
|
||||||
|
internalCluster().getInstance(MarvelSettingsService.class, nodeId),
|
||||||
|
internalCluster().getInstance(NodeService.class, nodeId),
|
||||||
|
internalCluster().getInstance(DiscoveryService.class, nodeId),
|
||||||
|
new Provider<DiskThresholdDecider>() {
|
||||||
|
@Override
|
||||||
|
public DiskThresholdDecider get() {
|
||||||
|
return internalCluster().getInstance(DiskThresholdDecider.class, nodeId);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.renderer.node;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||||
|
import org.elasticsearch.common.io.Streams;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.node.NodeStatsMarvelDoc;
|
||||||
|
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||||
|
import org.elasticsearch.marvel.agent.renderer.RendererTestUtils;
|
||||||
|
import org.elasticsearch.node.service.NodeService;
|
||||||
|
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public class NodeStatsRendererTests extends ElasticsearchSingleNodeTest {
|
||||||
|
|
||||||
|
private static final String SAMPLE_FILE = "/samples/marvel_node_stats.json";
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNodeStatsRenderer() throws Exception {
|
||||||
|
createIndex("index-0");
|
||||||
|
|
||||||
|
logger.debug("--> retrieving node stats");
|
||||||
|
NodeStats nodeStats = getInstanceFromNode(NodeService.class).stats();
|
||||||
|
|
||||||
|
logger.debug("--> creating the node stats marvel document");
|
||||||
|
NodeStatsMarvelDoc marvelDoc = NodeStatsMarvelDoc.createMarvelDoc("test", "marvel_node_stats", 1437580442979L,
|
||||||
|
"node-0", true, nodeStats, false, 90.0, true);
|
||||||
|
|
||||||
|
logger.debug("--> rendering the document");
|
||||||
|
Renderer renderer = new NodeStatsRenderer();
|
||||||
|
String result = RendererTestUtils.renderAsJSON(marvelDoc, renderer);
|
||||||
|
|
||||||
|
logger.debug("--> loading sample document from file {}", SAMPLE_FILE);
|
||||||
|
String expected = Streams.copyToStringFromClasspath(SAMPLE_FILE);
|
||||||
|
|
||||||
|
logger.debug("--> comparing both documents, they must have the same structure");
|
||||||
|
RendererTestUtils.assertJSONStructure(result, expected);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,66 @@
|
||||||
|
{
|
||||||
|
"cluster_name": "test",
|
||||||
|
"timestamp": "2015-07-22T15:54:02.979Z",
|
||||||
|
"node_stats": {
|
||||||
|
"node_id": "OB5Mr2NGQe6xYCFvMggReg",
|
||||||
|
"node_master": true,
|
||||||
|
"mlockall": false,
|
||||||
|
"disk_threshold_enabled": true,
|
||||||
|
"disk_threshold_watermark_high": 90.0,
|
||||||
|
"indices": {
|
||||||
|
"docs": {
|
||||||
|
"count": 0
|
||||||
|
},
|
||||||
|
"store": {
|
||||||
|
"size_in_bytes": 0,
|
||||||
|
"throttle_time_in_millis": 0
|
||||||
|
},
|
||||||
|
"indexing": {
|
||||||
|
"throttle_time_in_millis": 0
|
||||||
|
},
|
||||||
|
"segments": {
|
||||||
|
"count": 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"process": {
|
||||||
|
"open_file_descriptors": 235,
|
||||||
|
"max_file_descriptors": 4096
|
||||||
|
},
|
||||||
|
"jvm": {
|
||||||
|
"mem": {
|
||||||
|
"heap_used_in_bytes": 58976328,
|
||||||
|
"heap_used_percent": 1
|
||||||
|
},
|
||||||
|
"gc": {
|
||||||
|
"collectors": {
|
||||||
|
"young": {
|
||||||
|
"collection_count": 4,
|
||||||
|
"collection_time_in_millis": 50
|
||||||
|
},
|
||||||
|
"old": {
|
||||||
|
"collection_count": 1,
|
||||||
|
"collection_time_in_millis": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"thread_pool": {
|
||||||
|
"index": {
|
||||||
|
"rejected": 0
|
||||||
|
},
|
||||||
|
"bulk": {
|
||||||
|
"rejected": 0
|
||||||
|
},
|
||||||
|
"search": {
|
||||||
|
"rejected": 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fs": {
|
||||||
|
"total": {
|
||||||
|
"total_in_bytes": 486599139328,
|
||||||
|
"free_in_bytes": 228134465536,
|
||||||
|
"available_in_bytes": 203393003520
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue