Marvel: Index Nodes in Time Based Index and Data Index
Closes elastic/elasticsearch#946 Original commit: elastic/x-pack-elasticsearch@0f5721ffbf
This commit is contained in:
parent
ff1e6ec776
commit
a47adfb270
|
@ -9,6 +9,8 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
|||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||
|
@ -25,18 +27,20 @@ import java.util.List;
|
|||
/**
|
||||
* Collector for cluster state.
|
||||
* <p>
|
||||
* This collector runs on the master node only and collects the {@link ClusterStateMarvelDoc} document
|
||||
* This collector runs on the master node only and collects {@link ClusterStateMarvelDoc} document
|
||||
* at a given frequency.
|
||||
*/
|
||||
public class ClusterStateCollector extends AbstractCollector<ClusterStateCollector> {
|
||||
|
||||
public static final String NAME = "cluster-state-collector";
|
||||
public static final String TYPE = "cluster_state";
|
||||
public static final String NODES_TYPE = "nodes";
|
||||
public static final String NODE_TYPE = "node";
|
||||
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public ClusterStateCollector(Settings settings, ClusterService clusterService, MarvelSettings marvelSettings, MarvelLicensee marvelLicensee,
|
||||
public ClusterStateCollector(Settings settings, ClusterService clusterService, MarvelSettings marvelSettings, MarvelLicensee marvelLicensee,
|
||||
SecuredClient client) {
|
||||
super(settings, NAME, clusterService, marvelSettings, marvelLicensee);
|
||||
this.client = client;
|
||||
|
@ -49,12 +53,28 @@ public class ClusterStateCollector extends AbstractCollector<ClusterStateCollect
|
|||
|
||||
@Override
|
||||
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||
List<MarvelDoc> results = new ArrayList<>(1);
|
||||
List<MarvelDoc> results = new ArrayList<>(3);
|
||||
|
||||
ClusterState clusterState = clusterService.state();
|
||||
ClusterHealthResponse clusterHealth = client.admin().cluster().prepareHealth().get(marvelSettings.clusterStateTimeout());
|
||||
String clusterUUID = clusterState.metaData().clusterUUID();
|
||||
String stateUUID = clusterState.stateUUID();
|
||||
long timestamp = System.currentTimeMillis();
|
||||
|
||||
// Adds a cluster_state document with associated status
|
||||
ClusterHealthResponse clusterHealth = client.admin().cluster().prepareHealth().get(marvelSettings.clusterStateTimeout());
|
||||
results.add(new ClusterStateMarvelDoc(clusterUUID, TYPE, timestamp, clusterState, clusterHealth.getStatus()));
|
||||
|
||||
DiscoveryNodes nodes = clusterState.nodes();
|
||||
if (nodes != null) {
|
||||
for (DiscoveryNode node : nodes) {
|
||||
// Adds a document for every node in the marvel timestamped index (type "nodes")
|
||||
results.add(new ClusterStateNodeMarvelDoc(clusterUUID, NODES_TYPE, timestamp, stateUUID, node.getId()));
|
||||
|
||||
// Adds a document for every node in the marvel data index (type "node")
|
||||
results.add(new DiscoveryNodeMarvelDoc(MarvelSettings.MARVEL_DATA_INDEX_NAME, NODE_TYPE, node.getId(), clusterUUID, timestamp, node));
|
||||
}
|
||||
}
|
||||
|
||||
results.add(new ClusterStateMarvelDoc(clusterUUID(), TYPE, System.currentTimeMillis(), clusterState, clusterHealth.getStatus()));
|
||||
return Collections.unmodifiableCollection(results);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.collector.cluster;
|
||||
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
|
||||
public class ClusterStateNodeMarvelDoc extends MarvelDoc {
|
||||
|
||||
private final String stateUUID;
|
||||
private final String nodeId;
|
||||
|
||||
public ClusterStateNodeMarvelDoc(String clusterUUID, String type, long timestamp, String stateUUID, String nodeId) {
|
||||
super(clusterUUID, type, timestamp);
|
||||
this.stateUUID = stateUUID;
|
||||
this.nodeId = nodeId;
|
||||
}
|
||||
|
||||
public String getStateUUID() {
|
||||
return stateUUID;
|
||||
}
|
||||
|
||||
public String getNodeId() {
|
||||
return nodeId;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.collector.cluster;
|
||||
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
|
||||
public class DiscoveryNodeMarvelDoc extends MarvelDoc {
|
||||
|
||||
private final DiscoveryNode node;
|
||||
|
||||
public DiscoveryNodeMarvelDoc(String index, String type, String id, String clusterUUID, long timestamp, DiscoveryNode node) {
|
||||
super(index, type, id, clusterUUID, timestamp);
|
||||
this.node = node;
|
||||
}
|
||||
|
||||
public DiscoveryNode getNode() {
|
||||
return node;
|
||||
}
|
||||
}
|
||||
|
|
@ -9,6 +9,7 @@ package org.elasticsearch.marvel.agent.collector.node;
|
|||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.bootstrap.BootstrapInfo;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -61,7 +62,7 @@ public class NodeStatsCollector extends AbstractCollector<NodeStatsCollector> {
|
|||
// for nodes that can hold data. Client nodes can collect nodes stats because
|
||||
// elasticsearch correctly handles the nodes stats for client nodes.
|
||||
return super.shouldCollect()
|
||||
&& (clusterService.localNode().isDataNode() == false || nodeEnvironment.hasNodeFile());
|
||||
&& (DiscoveryNode.nodeRequiresLocalStorage(settings) == false || nodeEnvironment.hasNodeFile());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -15,9 +15,7 @@ import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
|||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsCollector;
|
||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsCollector;
|
||||
import org.elasticsearch.marvel.agent.collector.shards.ShardsCollector;
|
||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterInfoRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStateRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStatsRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.cluster.*;
|
||||
import org.elasticsearch.marvel.agent.renderer.indices.IndexRecoveryRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.indices.IndexStatsRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.indices.IndicesStatsRenderer;
|
||||
|
@ -54,6 +52,8 @@ public class RendererModule extends AbstractModule {
|
|||
|
||||
bind(ClusterStateRenderer.class).asEagerSingleton();
|
||||
mbinder.addBinding(ClusterStateCollector.TYPE).to(ClusterStateRenderer.class);
|
||||
mbinder.addBinding(ClusterStateCollector.NODES_TYPE).to(ClusterStateNodeRenderer.class);
|
||||
mbinder.addBinding(ClusterStateCollector.NODE_TYPE).to(DiscoveryNodeRenderer.class);
|
||||
|
||||
bind(ShardsRenderer.class).asEagerSingleton();
|
||||
mbinder.addBinding(ShardsCollector.TYPE).to(ShardsRenderer.class);
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateNodeMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ClusterStateNodeRenderer extends AbstractRenderer<ClusterStateNodeMarvelDoc> {
|
||||
|
||||
public ClusterStateNodeRenderer() {
|
||||
super(null, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(ClusterStateNodeMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.field(Fields.STATE_UUID, marvelDoc.getStateUUID());
|
||||
builder.startObject(Fields.NODE);
|
||||
builder.field(Fields.ID, marvelDoc.getNodeId());
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString STATE_UUID = new XContentBuilderString("state_uuid");
|
||||
static final XContentBuilderString NODE = new XContentBuilderString("node");
|
||||
static final XContentBuilderString ID = new XContentBuilderString("id");
|
||||
}
|
||||
}
|
|
@ -22,7 +22,6 @@ public class ClusterStateRenderer extends AbstractRenderer<ClusterStateMarvelDoc
|
|||
"cluster_state.master_node",
|
||||
"cluster_state.state_uuid",
|
||||
"cluster_state.status",
|
||||
"cluster_state.nodes",
|
||||
};
|
||||
|
||||
public ClusterStateRenderer() {
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.DiscoveryNodeMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DiscoveryNodeRenderer extends AbstractRenderer<DiscoveryNodeMarvelDoc> {
|
||||
|
||||
public DiscoveryNodeRenderer() {
|
||||
super(null, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(DiscoveryNodeMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.NODE);
|
||||
|
||||
DiscoveryNode node = marvelDoc.getNode();
|
||||
if (node != null) {
|
||||
builder.field(Fields.NAME, node.getName());
|
||||
builder.field(Fields.TRANSPORT_ADDRESS, node.getAddress().toString());
|
||||
|
||||
builder.startObject(Fields.ATTRIBUTES);
|
||||
for (ObjectObjectCursor<String, String> attr : node.getAttributes()) {
|
||||
builder.field(attr.key, attr.value);
|
||||
}
|
||||
builder.endObject();
|
||||
builder.field(Fields.ID, node.getId());
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString NODE = new XContentBuilderString("node");
|
||||
static final XContentBuilderString NAME = new XContentBuilderString("name");
|
||||
static final XContentBuilderString TRANSPORT_ADDRESS = new XContentBuilderString("transport_address");
|
||||
static final XContentBuilderString ATTRIBUTES = new XContentBuilderString("attributes");
|
||||
static final XContentBuilderString ID = new XContentBuilderString("id");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -186,9 +186,6 @@
|
|||
"type": "string",
|
||||
"index": "not_analyzed"
|
||||
},
|
||||
"nodes": {
|
||||
"enabled": false
|
||||
},
|
||||
"shards": {
|
||||
"type": "object"
|
||||
}
|
||||
|
@ -199,6 +196,25 @@
|
|||
"cluster_info": {
|
||||
"enabled": false
|
||||
},
|
||||
"node": {
|
||||
"enabled": false
|
||||
},
|
||||
"nodes": {
|
||||
"properties": {
|
||||
"state_uuid": {
|
||||
"type": "string",
|
||||
"index": "not_analyzed"
|
||||
},
|
||||
"node": {
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"index": "not_analyzed"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_stats": {
|
||||
"properties": {
|
||||
"node_stats": {
|
||||
|
|
|
@ -14,32 +14,18 @@ import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
|||
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
public class ClusterStateCollectorTests extends AbstractCollectorTestCase {
|
||||
|
||||
public void testClusterStateCollectorNoIndices() throws Exception {
|
||||
Collection<MarvelDoc> results = newClusterStateCollector().doCollect();
|
||||
assertThat(results, hasSize(1));
|
||||
|
||||
MarvelDoc marvelDoc = results.iterator().next();
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(ClusterStateMarvelDoc.class));
|
||||
|
||||
ClusterStateMarvelDoc clusterStateMarvelDoc = (ClusterStateMarvelDoc) marvelDoc;
|
||||
assertThat(clusterStateMarvelDoc.clusterUUID(), equalTo(client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(clusterStateMarvelDoc.timestamp(), greaterThan(0L));
|
||||
assertThat(clusterStateMarvelDoc.type(), equalTo(ClusterStateCollector.TYPE));
|
||||
assertNotNull(clusterStateMarvelDoc.getClusterState());
|
||||
|
||||
ClusterState clusterState = clusterStateMarvelDoc.getClusterState();
|
||||
assertThat(clusterState.getRoutingTable().allShards(), hasSize(0));
|
||||
assertMarvelDocs(newClusterStateCollector().doCollect(), 0);
|
||||
}
|
||||
|
||||
public void testClusterStateCollectorOneIndex() throws Exception {
|
||||
|
@ -56,33 +42,20 @@ public class ClusterStateCollectorTests extends AbstractCollectorTestCase {
|
|||
|
||||
securedFlush();
|
||||
securedRefresh();
|
||||
|
||||
assertHitCount(client().prepareSearch().setSize(0).get(), nbDocs);
|
||||
|
||||
Collection<MarvelDoc> results = newClusterStateCollector().doCollect();
|
||||
assertThat(results, hasSize(1));
|
||||
|
||||
MarvelDoc marvelDoc = results.iterator().next();
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(ClusterStateMarvelDoc.class));
|
||||
|
||||
ClusterStateMarvelDoc clusterStateMarvelDoc = (ClusterStateMarvelDoc) marvelDoc;
|
||||
assertThat(clusterStateMarvelDoc.clusterUUID(), equalTo(client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(clusterStateMarvelDoc.timestamp(), greaterThan(0L));
|
||||
assertThat(clusterStateMarvelDoc.type(), equalTo(ClusterStateCollector.TYPE));
|
||||
|
||||
assertNotNull(clusterStateMarvelDoc.getClusterState());
|
||||
|
||||
ClusterState clusterState = clusterStateMarvelDoc.getClusterState();
|
||||
assertThat(clusterState.getRoutingTable().allShards("test"), hasSize(nbShards));
|
||||
assertMarvelDocs(newClusterStateCollector().doCollect(), nbShards);
|
||||
}
|
||||
|
||||
public void testClusterStateCollectorMultipleIndices() throws Exception {
|
||||
int nbIndices = randomIntBetween(1, 5);
|
||||
int[] docsPerIndex = new int[nbIndices];
|
||||
int[] shardsPerIndex = new int[nbIndices];
|
||||
int nbShards = 0;
|
||||
|
||||
for (int i = 0; i < nbIndices; i++) {
|
||||
shardsPerIndex[i] = randomIntBetween(1, 5);
|
||||
nbShards += shardsPerIndex[i];
|
||||
assertAcked(prepareCreate("test-" + i).setSettings(Settings.settingsBuilder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, shardsPerIndex[i])
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
|
@ -96,11 +69,14 @@ public class ClusterStateCollectorTests extends AbstractCollectorTestCase {
|
|||
|
||||
securedFlush();
|
||||
securedRefresh();
|
||||
|
||||
for (int i = 0; i < nbIndices; i++) {
|
||||
assertHitCount(client().prepareSearch("test-" + i).setSize(0).get(), docsPerIndex[i]);
|
||||
}
|
||||
|
||||
Collection<MarvelDoc> results = newClusterStateCollector().doCollect();
|
||||
assertMarvelDocs(results, nbShards);
|
||||
|
||||
MarvelDoc marvelDoc = results.iterator().next();
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(ClusterStateMarvelDoc.class));
|
||||
|
@ -168,4 +144,70 @@ public class ClusterStateCollectorTests extends AbstractCollectorTestCase {
|
|||
internalCluster().getInstance(MarvelLicensee.class, nodeId),
|
||||
securedClient(nodeId));
|
||||
}
|
||||
|
||||
private void assertMarvelDocs(Collection<MarvelDoc> results, final int nbShards) {
|
||||
assertThat("expecting 1 document for the cluster state and 2 documents per node", results, hasSize(1 + internalCluster().size() * 2));
|
||||
|
||||
final ClusterState clusterState = securedClient().admin().cluster().prepareState().get().getState();
|
||||
final String clusterUUID = clusterState.getMetaData().clusterUUID();
|
||||
final String stateUUID = clusterState.stateUUID();
|
||||
|
||||
List<ClusterStateNodeMarvelDoc> clusterStateNodes = new ArrayList<>();
|
||||
List<DiscoveryNodeMarvelDoc> discoveryNodes = new ArrayList<>();
|
||||
|
||||
for (MarvelDoc marvelDoc : results) {
|
||||
assertThat(marvelDoc.clusterUUID(), equalTo(clusterUUID));
|
||||
assertThat(marvelDoc.timestamp(), greaterThan(0L));
|
||||
assertThat(marvelDoc, anyOf(instanceOf(ClusterStateMarvelDoc.class), instanceOf(ClusterStateNodeMarvelDoc.class), instanceOf(DiscoveryNodeMarvelDoc.class)));
|
||||
|
||||
switch (marvelDoc.type()) {
|
||||
case ClusterStateCollector.TYPE:
|
||||
ClusterStateMarvelDoc clusterStateMarvelDoc = (ClusterStateMarvelDoc) marvelDoc;
|
||||
assertThat(clusterStateMarvelDoc.getClusterState().getRoutingTable().allShards(), hasSize(nbShards));
|
||||
break;
|
||||
|
||||
case ClusterStateCollector.NODES_TYPE:
|
||||
ClusterStateNodeMarvelDoc clusterStateNodeMarvelDoc = (ClusterStateNodeMarvelDoc) marvelDoc;
|
||||
assertThat(clusterStateNodeMarvelDoc.getStateUUID(), equalTo(stateUUID));
|
||||
assertThat(clusterStateNodeMarvelDoc.getNodeId(), not(isEmptyOrNullString()));
|
||||
clusterStateNodes.add(clusterStateNodeMarvelDoc);
|
||||
break;
|
||||
|
||||
case ClusterStateCollector.NODE_TYPE:
|
||||
DiscoveryNodeMarvelDoc discoveryNodeMarvelDoc = (DiscoveryNodeMarvelDoc) marvelDoc;
|
||||
assertThat(discoveryNodeMarvelDoc.index(), equalTo(MarvelSettings.MARVEL_DATA_INDEX_NAME));
|
||||
assertThat(discoveryNodeMarvelDoc.id(), not(isEmptyOrNullString()));
|
||||
assertNotNull(discoveryNodeMarvelDoc.getNode());
|
||||
discoveryNodes.add(discoveryNodeMarvelDoc);
|
||||
break;
|
||||
default:
|
||||
fail("unknown marvel document type " + marvelDoc.type());
|
||||
}
|
||||
}
|
||||
|
||||
assertThat(clusterStateNodes, hasSize(internalCluster().size()));
|
||||
assertThat(discoveryNodes, hasSize(internalCluster().size()));
|
||||
|
||||
for (final String nodeName : internalCluster().getNodeNames()) {
|
||||
final String nodeId = internalCluster().clusterService(nodeName).localNode().getId();
|
||||
|
||||
boolean found = false;
|
||||
for (ClusterStateNodeMarvelDoc doc : clusterStateNodes) {
|
||||
if (nodeId.equals(doc.getNodeId())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assertTrue("Could not find node id [" + nodeName + "]", found);
|
||||
|
||||
found = false;
|
||||
for (DiscoveryNodeMarvelDoc doc : discoveryNodes) {
|
||||
if (nodeName.equals(doc.getNode().getName())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assertTrue("Could not find node name [" + nodeName + "]", found);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateCollector;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
@ -23,6 +24,8 @@ import java.util.concurrent.TimeUnit;
|
|||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
@ClusterScope(scope = Scope.TEST)
|
||||
public class ClusterStateTests extends MarvelIntegTestCase {
|
||||
|
@ -89,4 +92,80 @@ public class ClusterStateTests extends MarvelIntegTestCase {
|
|||
.setTypes(ClusterStateCollector.TYPE)
|
||||
.setQuery(QueryBuilders.matchQuery("cluster_state.nodes." + nodes.masterNodeId() + ".name", nodes.masterNode().name())).get(), 0L);
|
||||
}
|
||||
|
||||
public void testClusterStateNodes() throws Exception {
|
||||
final long nbNodes = internalCluster().size();
|
||||
|
||||
logger.debug("--> waiting for documents to be collected");
|
||||
awaitMarvelDocsCount(greaterThanOrEqualTo(nbNodes), ClusterStateCollector.NODES_TYPE);
|
||||
|
||||
logger.debug("--> searching for marvel documents of type [{}]", ClusterStateCollector.NODES_TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(ClusterStateCollector.NODES_TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(nbNodes));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = {
|
||||
AbstractRenderer.Fields.CLUSTER_UUID.underscore().toString(),
|
||||
AbstractRenderer.Fields.TIMESTAMP.underscore().toString(),
|
||||
ClusterStateNodeRenderer.Fields.STATE_UUID.underscore().toString(),
|
||||
ClusterStateNodeRenderer.Fields.NODE.underscore().toString(),
|
||||
ClusterStateNodeRenderer.Fields.NODE.underscore().toString() + "." + ClusterStateNodeRenderer.Fields.ID.underscore().toString(),
|
||||
};
|
||||
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||
|
||||
for (String filter : filters) {
|
||||
assertContains(filter, fields);
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("--> cluster state nodes successfully collected");
|
||||
}
|
||||
|
||||
public void testClusterStateNode() throws Exception {
|
||||
final long nbNodes = internalCluster().size();
|
||||
|
||||
logger.debug("--> waiting for documents to be collected");
|
||||
awaitMarvelDocsCount(greaterThanOrEqualTo(nbNodes), ClusterStateCollector.NODE_TYPE);
|
||||
|
||||
logger.debug("--> searching for marvel documents of type [{}]", ClusterStateCollector.NODE_TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(ClusterStateCollector.NODE_TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(nbNodes));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = {
|
||||
AbstractRenderer.Fields.CLUSTER_UUID.underscore().toString(),
|
||||
AbstractRenderer.Fields.TIMESTAMP.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString() + "." + DiscoveryNodeRenderer.Fields.ID.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString() + "." + DiscoveryNodeRenderer.Fields.NAME.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString() + "." + DiscoveryNodeRenderer.Fields.ATTRIBUTES.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString() + "." + DiscoveryNodeRenderer.Fields.TRANSPORT_ADDRESS.underscore().toString(),
|
||||
};
|
||||
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||
|
||||
for (String filter : filters) {
|
||||
assertContains(filter, fields);
|
||||
}
|
||||
}
|
||||
|
||||
for (final String nodeName : internalCluster().getNodeNames()) {
|
||||
final String nodeId = internalCluster().clusterService(nodeName).localNode().getId();
|
||||
|
||||
logger.debug("--> getting marvel document for node id [{}]", nodeId);
|
||||
assertThat(client().prepareGet(MarvelSettings.MARVEL_DATA_INDEX_NAME, ClusterStateCollector.NODE_TYPE, nodeId).get().isExists(), is(true));
|
||||
|
||||
// checks that document is not indexed
|
||||
assertHitCount(client().prepareSearch().setSize(0)
|
||||
.setTypes(ClusterStateCollector.NODE_TYPE)
|
||||
.setQuery(QueryBuilders.boolQuery()
|
||||
.should(QueryBuilders.matchQuery("node.id", nodeId))
|
||||
.should(QueryBuilders.matchQuery("node.name", nodeName))).get(), 0);
|
||||
}
|
||||
|
||||
logger.debug("--> cluster state node successfully collected");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,15 +5,6 @@
|
|||
"status": "yellow",
|
||||
"version": 14,
|
||||
"state_uuid": "lj0hNoO9QaeNa1eR2ukktQ",
|
||||
"master_node": "__node_id__",
|
||||
"nodes": {
|
||||
"__node_id__": {
|
||||
"name": "Box",
|
||||
"transport_address": "inet[/127.0.0.1:9300]",
|
||||
"attributes": {
|
||||
"local": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
"master_node": "__node_id__"
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue