Marvel: Store shards as individual documents in marvel_shards
Closes elastic/elasticsearch#581 Original commit: elastic/x-pack-elasticsearch@059ad5f09f
This commit is contained in:
parent
81dc0ee210
commit
a640ab6bec
|
@ -14,6 +14,7 @@ import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.node.NodeStatsCollector;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.shards.ShardsCollector;
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -29,6 +30,7 @@ public class CollectorModule extends AbstractModule {
|
||||||
registerCollector(IndexStatsCollector.class);
|
registerCollector(IndexStatsCollector.class);
|
||||||
registerCollector(ClusterStatsCollector.class);
|
registerCollector(ClusterStatsCollector.class);
|
||||||
registerCollector(ClusterStateCollector.class);
|
registerCollector(ClusterStateCollector.class);
|
||||||
|
registerCollector(ShardsCollector.class);
|
||||||
registerCollector(NodeStatsCollector.class);
|
registerCollector(NodeStatsCollector.class);
|
||||||
registerCollector(IndexRecoveryCollector.class);
|
registerCollector(IndexRecoveryCollector.class);
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.collector.shards;
|
||||||
|
|
||||||
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
|
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||||
|
|
||||||
|
public class ShardMarvelDoc extends MarvelDoc {
|
||||||
|
|
||||||
|
private final ShardRouting shardRouting;
|
||||||
|
private final String clusterStateUUID;
|
||||||
|
|
||||||
|
public ShardMarvelDoc(String index, String type, String id, String clusterUUID, long timestamp,
|
||||||
|
ShardRouting shardRouting, String clusterStateUUID) {
|
||||||
|
super(index, type, id, clusterUUID, timestamp);
|
||||||
|
this.shardRouting = shardRouting;
|
||||||
|
this.clusterStateUUID = clusterStateUUID;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ShardRouting getShardRouting() {
|
||||||
|
return shardRouting;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getClusterStateUUID() {
|
||||||
|
return clusterStateUUID;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,104 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.collector.shards;
|
||||||
|
|
||||||
|
import org.elasticsearch.cluster.ClusterService;
|
||||||
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
|
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||||
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.regex.Regex;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.util.CollectionUtils;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||||
|
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||||
|
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||||
|
import org.elasticsearch.marvel.license.LicenseService;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collector for shards.
|
||||||
|
* <p/>
|
||||||
|
* This collector runs on the master node only and collects the {@link ShardMarvelDoc} documents
|
||||||
|
* for every index shard.
|
||||||
|
*/
|
||||||
|
public class ShardsCollector extends AbstractCollector<ShardsCollector> {
|
||||||
|
|
||||||
|
public static final String NAME = "shards-collector";
|
||||||
|
public static final String TYPE = "marvel_shards";
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
public ShardsCollector(Settings settings, ClusterService clusterService, MarvelSettings marvelSettings, LicenseService licenseService) {
|
||||||
|
super(settings, NAME, clusterService, marvelSettings, licenseService);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean canCollect() {
|
||||||
|
return super.canCollect() && isLocalNodeMaster();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||||
|
List<MarvelDoc> results = new ArrayList<>(1);
|
||||||
|
|
||||||
|
ClusterState clusterState = clusterService.state();
|
||||||
|
if (clusterState != null) {
|
||||||
|
RoutingTable routingTable = clusterState.routingTable();
|
||||||
|
if (routingTable != null) {
|
||||||
|
List<ShardRouting> shards = routingTable.allShards();
|
||||||
|
if (shards != null) {
|
||||||
|
String clusterUUID = clusterUUID();
|
||||||
|
String stateUUID = clusterState.stateUUID();
|
||||||
|
long timestamp = System.currentTimeMillis();
|
||||||
|
|
||||||
|
for (ShardRouting shard : shards) {
|
||||||
|
if (match(shard.getIndex())) {
|
||||||
|
results.add(new ShardMarvelDoc(null, TYPE, id(stateUUID, shard), clusterUUID, timestamp, shard, stateUUID));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Collections.unmodifiableCollection(results);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean match(String indexName) {
|
||||||
|
String[] indices = marvelSettings.indices();
|
||||||
|
return CollectionUtils.isEmpty(indices) || Regex.simpleMatch(indices, indexName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute an id that has the format:
|
||||||
|
*
|
||||||
|
* {state_uuid}:{node_id || '_na'}:{index}:{shard}:{'p' || 'r'}
|
||||||
|
*/
|
||||||
|
static String id(String stateUUID, ShardRouting shardRouting) {
|
||||||
|
StringBuilder builder = new StringBuilder();
|
||||||
|
builder.append(stateUUID);
|
||||||
|
builder.append(':');
|
||||||
|
if (shardRouting.assignedToNode()) {
|
||||||
|
builder.append(shardRouting.currentNodeId());
|
||||||
|
} else {
|
||||||
|
builder.append("_na");
|
||||||
|
}
|
||||||
|
builder.append(':');
|
||||||
|
builder.append(shardRouting.index());
|
||||||
|
builder.append(':');
|
||||||
|
builder.append(Integer.valueOf(shardRouting.id()));
|
||||||
|
builder.append(':');
|
||||||
|
if (shardRouting.primary()) {
|
||||||
|
builder.append("p");
|
||||||
|
} else {
|
||||||
|
builder.append("r");
|
||||||
|
}
|
||||||
|
return builder.toString();
|
||||||
|
}
|
||||||
|
}
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsCollector;
|
||||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsCollector;
|
import org.elasticsearch.marvel.agent.collector.node.NodeStatsCollector;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.shards.ShardsCollector;
|
||||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterInfoRenderer;
|
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterInfoRenderer;
|
||||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStateRenderer;
|
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStateRenderer;
|
||||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStatsRenderer;
|
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStatsRenderer;
|
||||||
|
@ -21,6 +22,7 @@ import org.elasticsearch.marvel.agent.renderer.indices.IndexRecoveryRenderer;
|
||||||
import org.elasticsearch.marvel.agent.renderer.indices.IndexStatsRenderer;
|
import org.elasticsearch.marvel.agent.renderer.indices.IndexStatsRenderer;
|
||||||
import org.elasticsearch.marvel.agent.renderer.indices.IndicesStatsRenderer;
|
import org.elasticsearch.marvel.agent.renderer.indices.IndicesStatsRenderer;
|
||||||
import org.elasticsearch.marvel.agent.renderer.node.NodeStatsRenderer;
|
import org.elasticsearch.marvel.agent.renderer.node.NodeStatsRenderer;
|
||||||
|
import org.elasticsearch.marvel.agent.renderer.shards.ShardsRenderer;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -53,6 +55,9 @@ public class RendererModule extends AbstractModule {
|
||||||
bind(ClusterStateRenderer.class).asEagerSingleton();
|
bind(ClusterStateRenderer.class).asEagerSingleton();
|
||||||
mbinder.addBinding(ClusterStateCollector.TYPE).to(ClusterStateRenderer.class);
|
mbinder.addBinding(ClusterStateCollector.TYPE).to(ClusterStateRenderer.class);
|
||||||
|
|
||||||
|
bind(ShardsRenderer.class).asEagerSingleton();
|
||||||
|
mbinder.addBinding(ShardsCollector.TYPE).to(ShardsRenderer.class);
|
||||||
|
|
||||||
bind(NodeStatsRenderer.class).asEagerSingleton();
|
bind(NodeStatsRenderer.class).asEagerSingleton();
|
||||||
mbinder.addBinding(NodeStatsCollector.TYPE).to(NodeStatsRenderer.class);
|
mbinder.addBinding(NodeStatsCollector.TYPE).to(NodeStatsRenderer.class);
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,6 @@
|
||||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||||
|
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||||
|
@ -15,7 +13,6 @@ import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMarvelDoc;
|
||||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
|
||||||
public class ClusterStateRenderer extends AbstractRenderer<ClusterStateMarvelDoc> {
|
public class ClusterStateRenderer extends AbstractRenderer<ClusterStateMarvelDoc> {
|
||||||
|
@ -25,7 +22,6 @@ public class ClusterStateRenderer extends AbstractRenderer<ClusterStateMarvelDoc
|
||||||
"cluster_state.master_node",
|
"cluster_state.master_node",
|
||||||
"cluster_state.status",
|
"cluster_state.status",
|
||||||
"cluster_state.nodes",
|
"cluster_state.nodes",
|
||||||
"cluster_state.shards",
|
|
||||||
};
|
};
|
||||||
|
|
||||||
public ClusterStateRenderer() {
|
public ClusterStateRenderer() {
|
||||||
|
@ -39,21 +35,7 @@ public class ClusterStateRenderer extends AbstractRenderer<ClusterStateMarvelDoc
|
||||||
ClusterState clusterState = marvelDoc.getClusterState();
|
ClusterState clusterState = marvelDoc.getClusterState();
|
||||||
if (clusterState != null) {
|
if (clusterState != null) {
|
||||||
builder.field(Fields.STATUS, marvelDoc.getStatus().name().toLowerCase(Locale.ROOT));
|
builder.field(Fields.STATUS, marvelDoc.getStatus().name().toLowerCase(Locale.ROOT));
|
||||||
|
|
||||||
clusterState.toXContent(builder, params);
|
clusterState.toXContent(builder, params);
|
||||||
|
|
||||||
RoutingTable routingTable = clusterState.routingTable();
|
|
||||||
if (routingTable != null) {
|
|
||||||
List<ShardRouting> shards = routingTable.allShards();
|
|
||||||
if (shards != null) {
|
|
||||||
|
|
||||||
builder.startArray(Fields.SHARDS);
|
|
||||||
for (ShardRouting shard : shards) {
|
|
||||||
shard.toXContent(builder, params);
|
|
||||||
}
|
|
||||||
builder.endArray();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
@ -62,6 +44,5 @@ public class ClusterStateRenderer extends AbstractRenderer<ClusterStateMarvelDoc
|
||||||
static final class Fields {
|
static final class Fields {
|
||||||
static final XContentBuilderString CLUSTER_STATE = new XContentBuilderString("cluster_state");
|
static final XContentBuilderString CLUSTER_STATE = new XContentBuilderString("cluster_state");
|
||||||
static final XContentBuilderString STATUS = new XContentBuilderString("status");
|
static final XContentBuilderString STATUS = new XContentBuilderString("status");
|
||||||
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.renderer.shards;
|
||||||
|
|
||||||
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.shards.ShardMarvelDoc;
|
||||||
|
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class ShardsRenderer extends AbstractRenderer<ShardMarvelDoc> {
|
||||||
|
|
||||||
|
public static final String[] FILTERS = {
|
||||||
|
"state_uuid",
|
||||||
|
"shard.state",
|
||||||
|
"shard.primary",
|
||||||
|
"shard.node",
|
||||||
|
"shard.relocating_node",
|
||||||
|
"shard.shard",
|
||||||
|
"shard.index",
|
||||||
|
};
|
||||||
|
|
||||||
|
public ShardsRenderer() {
|
||||||
|
super(FILTERS, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doRender(ShardMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||||
|
builder.field(Fields.STATE_UUID, marvelDoc.getClusterStateUUID());
|
||||||
|
|
||||||
|
ShardRouting shardRouting = marvelDoc.getShardRouting();
|
||||||
|
if (shardRouting != null) {
|
||||||
|
// ShardRouting is rendered inside a startObject() / endObject() but without a name,
|
||||||
|
// so we must use XContentBuilder.field(String, ToXContent, ToXContent.Params) here
|
||||||
|
builder.field(Fields.SHARD.underscore().toString(), shardRouting, params);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static final class Fields {
|
||||||
|
static final XContentBuilderString SHARD = new XContentBuilderString("shard");
|
||||||
|
static final XContentBuilderString STATE_UUID = new XContentBuilderString("state_uuid");
|
||||||
|
}
|
||||||
|
}
|
|
@ -177,9 +177,6 @@
|
||||||
},
|
},
|
||||||
"nodes": {
|
"nodes": {
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
|
||||||
"shards": {
|
|
||||||
"type": "object"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -233,6 +230,17 @@
|
||||||
"type": "object"
|
"type": "object"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"marvel_shards": {
|
||||||
|
"properties": {
|
||||||
|
"state_uuid": {
|
||||||
|
"type": "string",
|
||||||
|
"index": "not_analyzed"
|
||||||
|
},
|
||||||
|
"shard": {
|
||||||
|
"type": "object"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.license.plugin.core.LicensesService;
|
||||||
import org.elasticsearch.marvel.MarvelPlugin;
|
import org.elasticsearch.marvel.MarvelPlugin;
|
||||||
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||||
import org.elasticsearch.marvel.license.LicenseService;
|
import org.elasticsearch.marvel.license.LicenseService;
|
||||||
|
import org.elasticsearch.node.Node;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
|
@ -45,6 +46,15 @@ public class AbstractCollectorTestCase extends ESIntegTestCase {
|
||||||
return nodePlugins();
|
return nodePlugins();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Settings nodeSettings(int nodeOrdinal) {
|
||||||
|
return Settings.builder()
|
||||||
|
.put(super.nodeSettings(nodeOrdinal))
|
||||||
|
.put(Node.HTTP_ENABLED, false)
|
||||||
|
.put(MarvelSettings.INTERVAL, "60m")
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void ensureLicenseIsEnabled() {
|
public void ensureLicenseIsEnabled() {
|
||||||
enableLicense();
|
enableLicense();
|
||||||
|
|
|
@ -0,0 +1,203 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.collector.shards;
|
||||||
|
|
||||||
|
import org.elasticsearch.cluster.ClusterService;
|
||||||
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.AbstractCollectorTestCase;
|
||||||
|
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||||
|
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||||
|
import org.elasticsearch.marvel.license.LicenseService;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||||
|
import static org.hamcrest.Matchers.*;
|
||||||
|
|
||||||
|
public class ShardsCollectorTests extends AbstractCollectorTestCase {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShardsCollectorNoIndices() throws Exception {
|
||||||
|
Collection<MarvelDoc> results = newShardsCollector().doCollect();
|
||||||
|
assertThat(results, hasSize(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Settings nodeSettings(int nodeOrdinal) {
|
||||||
|
return Settings.builder()
|
||||||
|
.put(super.nodeSettings(nodeOrdinal))
|
||||||
|
.put(MarvelSettings.INDICES, "test-shards*")
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShardsCollectorOneIndex() throws Exception {
|
||||||
|
int nbDocs = randomIntBetween(1, 20);
|
||||||
|
for (int i = 0; i < nbDocs; i++) {
|
||||||
|
client().prepareIndex("test-shards", "test").setSource("num", i).get();
|
||||||
|
}
|
||||||
|
|
||||||
|
waitForRelocation();
|
||||||
|
ensureGreen();
|
||||||
|
refresh();
|
||||||
|
|
||||||
|
assertHitCount(client().prepareCount().get(), nbDocs);
|
||||||
|
|
||||||
|
Collection<MarvelDoc> results = newShardsCollector().doCollect();
|
||||||
|
assertThat(results, hasSize(getNumShards("test-shards").totalNumShards));
|
||||||
|
|
||||||
|
final ClusterState clusterState = client().admin().cluster().prepareState().setMetaData(true).get().getState();
|
||||||
|
|
||||||
|
int primaries = 0;
|
||||||
|
int replicas = 0;
|
||||||
|
|
||||||
|
for (MarvelDoc marvelDoc : results) {
|
||||||
|
assertNotNull(marvelDoc);
|
||||||
|
assertThat(marvelDoc, instanceOf(ShardMarvelDoc.class));
|
||||||
|
|
||||||
|
ShardMarvelDoc shardMarvelDoc = (ShardMarvelDoc) marvelDoc;
|
||||||
|
assertThat(shardMarvelDoc.clusterUUID(), equalTo(clusterState.metaData().clusterUUID()));
|
||||||
|
assertThat(shardMarvelDoc.timestamp(), greaterThan(0L));
|
||||||
|
assertThat(shardMarvelDoc.type(), equalTo(ShardsCollector.TYPE));
|
||||||
|
assertThat(shardMarvelDoc.id(), equalTo(ShardsCollector.id(clusterState.stateUUID(), ((ShardMarvelDoc) marvelDoc).getShardRouting())));
|
||||||
|
assertThat(shardMarvelDoc.getClusterStateUUID(), equalTo(clusterState.stateUUID()));
|
||||||
|
|
||||||
|
ShardRouting shardRouting = shardMarvelDoc.getShardRouting();
|
||||||
|
assertNotNull(shardRouting);
|
||||||
|
assertThat(shardMarvelDoc.getShardRouting().assignedToNode(), is(true));
|
||||||
|
|
||||||
|
if (shardRouting.primary()) {
|
||||||
|
primaries++;
|
||||||
|
} else {
|
||||||
|
replicas++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int expectedPrimaries = getNumShards("test-shards").numPrimaries;
|
||||||
|
int expectedReplicas = expectedPrimaries * getNumShards("test-shards").numReplicas;
|
||||||
|
assertThat(primaries, equalTo(expectedPrimaries));
|
||||||
|
assertThat(replicas, equalTo(expectedReplicas));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShardsCollectorMultipleIndices() throws Exception {
|
||||||
|
final String indexPrefix = "test-shards-";
|
||||||
|
final int nbIndices = randomIntBetween(1, 3);
|
||||||
|
final int[] nbShardsPerIndex = new int[nbIndices];
|
||||||
|
final int[] nbReplicasPerIndex = new int[nbIndices];
|
||||||
|
final int[] nbDocsPerIndex = new int[nbIndices];
|
||||||
|
|
||||||
|
int totalShards = 0;
|
||||||
|
for (int i = 0; i < nbIndices; i++) {
|
||||||
|
nbShardsPerIndex[i] = randomIntBetween(1, 3);
|
||||||
|
nbReplicasPerIndex[i] = randomIntBetween(0, Math.min(2, internalCluster().numDataNodes()));
|
||||||
|
|
||||||
|
assertAcked(prepareCreate(indexPrefix + String.valueOf(i)).setSettings(Settings.settingsBuilder()
|
||||||
|
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, nbShardsPerIndex[i])
|
||||||
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, nbReplicasPerIndex[i])
|
||||||
|
.build()));
|
||||||
|
|
||||||
|
totalShards = totalShards + nbShardsPerIndex[i] + (nbShardsPerIndex[i] * nbReplicasPerIndex[i]);
|
||||||
|
|
||||||
|
nbDocsPerIndex[i] = randomIntBetween(1, 20);
|
||||||
|
for (int j = 0; j < nbDocsPerIndex[i]; j++) {
|
||||||
|
client().prepareIndex(indexPrefix + String.valueOf(i), "test").setSource("num", i).get();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
waitForRelocation();
|
||||||
|
refresh();
|
||||||
|
|
||||||
|
for (int i = 0; i < nbIndices; i++) {
|
||||||
|
assertHitCount(client().prepareCount(indexPrefix + String.valueOf(i)).get(), nbDocsPerIndex[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
Collection<MarvelDoc> results = newShardsCollector().doCollect();
|
||||||
|
assertThat(results, hasSize(totalShards));
|
||||||
|
|
||||||
|
final ClusterState clusterState = client().admin().cluster().prepareState().setMetaData(true).get().getState();
|
||||||
|
|
||||||
|
for (MarvelDoc marvelDoc : results) {
|
||||||
|
assertNotNull(marvelDoc);
|
||||||
|
assertThat(marvelDoc, instanceOf(ShardMarvelDoc.class));
|
||||||
|
|
||||||
|
ShardMarvelDoc shardMarvelDoc = (ShardMarvelDoc) marvelDoc;
|
||||||
|
assertThat(shardMarvelDoc.clusterUUID(), equalTo(clusterState.metaData().clusterUUID()));
|
||||||
|
assertThat(shardMarvelDoc.timestamp(), greaterThan(0L));
|
||||||
|
assertThat(shardMarvelDoc.type(), equalTo(ShardsCollector.TYPE));
|
||||||
|
assertThat(shardMarvelDoc.id(), equalTo(ShardsCollector.id(clusterState.stateUUID(), ((ShardMarvelDoc) marvelDoc).getShardRouting())));
|
||||||
|
assertThat(shardMarvelDoc.getClusterStateUUID(), equalTo(clusterState.stateUUID()));
|
||||||
|
|
||||||
|
ShardRouting shardRouting = shardMarvelDoc.getShardRouting();
|
||||||
|
assertNotNull(shardRouting);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks that a correct number of ShardMarvelDoc documents has been created for each index
|
||||||
|
int[] shards = new int[nbIndices];
|
||||||
|
for (MarvelDoc marvelDoc : results) {
|
||||||
|
ShardRouting routing = ((ShardMarvelDoc) marvelDoc).getShardRouting();
|
||||||
|
int index = Integer.parseInt(routing.index().substring(indexPrefix.length()));
|
||||||
|
shards[index]++;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < nbIndices; i++) {
|
||||||
|
int total = getNumShards(indexPrefix + String.valueOf(i)).totalNumShards;
|
||||||
|
assertThat("expecting " + total + " shards marvel documents for index [" + indexPrefix + String.valueOf(i) + "]", shards[i], equalTo(total));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShardsCollectorWithLicensing() {
|
||||||
|
String[] nodes = internalCluster().getNodeNames();
|
||||||
|
for (String node : nodes) {
|
||||||
|
logger.debug("--> creating a new instance of the collector");
|
||||||
|
ShardsCollector collector = newShardsCollector(node);
|
||||||
|
assertNotNull(collector);
|
||||||
|
|
||||||
|
logger.debug("--> enabling license and checks that the collector can collect data if node is master");
|
||||||
|
enableLicense();
|
||||||
|
if (node.equals(internalCluster().getMasterName())) {
|
||||||
|
assertCanCollect(collector);
|
||||||
|
} else {
|
||||||
|
assertCannotCollect(collector);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug("--> starting graceful period and checks that the collector can still collect data if node is master");
|
||||||
|
beginGracefulPeriod();
|
||||||
|
if (node.equals(internalCluster().getMasterName())) {
|
||||||
|
assertCanCollect(collector);
|
||||||
|
} else {
|
||||||
|
assertCannotCollect(collector);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug("--> ending graceful period and checks that the collector cannot collect data");
|
||||||
|
endGracefulPeriod();
|
||||||
|
assertCannotCollect(collector);
|
||||||
|
|
||||||
|
logger.debug("--> disabling license and checks that the collector cannot collect data");
|
||||||
|
disableLicense();
|
||||||
|
assertCannotCollect(collector);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private ShardsCollector newShardsCollector() {
|
||||||
|
// This collector runs on master node only
|
||||||
|
return newShardsCollector(internalCluster().getMasterName());
|
||||||
|
}
|
||||||
|
|
||||||
|
private ShardsCollector newShardsCollector(String nodeId) {
|
||||||
|
assertNotNull(nodeId);
|
||||||
|
return new ShardsCollector(internalCluster().getInstance(Settings.class, nodeId),
|
||||||
|
internalCluster().getInstance(ClusterService.class, nodeId),
|
||||||
|
internalCluster().getInstance(MarvelSettings.class, nodeId),
|
||||||
|
internalCluster().getInstance(LicenseService.class, nodeId));
|
||||||
|
}
|
||||||
|
}
|
|
@ -76,8 +76,8 @@ public abstract class AbstractRendererTestCase extends ESIntegTestCase {
|
||||||
* it recurses to check if 'bar' exists in the sub-map.
|
* it recurses to check if 'bar' exists in the sub-map.
|
||||||
*/
|
*/
|
||||||
protected void assertContains(String field, Map<String, Object> values) {
|
protected void assertContains(String field, Map<String, Object> values) {
|
||||||
assertNotNull(field);
|
assertNotNull("field name should not be null", field);
|
||||||
assertNotNull(values);
|
assertNotNull("values map should not be null", values);
|
||||||
|
|
||||||
int point = field.indexOf('.');
|
int point = field.indexOf('.');
|
||||||
if (point > -1) {
|
if (point > -1) {
|
||||||
|
@ -98,7 +98,7 @@ public abstract class AbstractRendererTestCase extends ESIntegTestCase {
|
||||||
assertFalse(value instanceof Map);
|
assertFalse(value instanceof Map);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
assertNotNull(values.get(field));
|
assertTrue("expecting field [" + field + "] to be present in marvel document", values.containsKey(field));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -42,10 +42,10 @@ public class RendererTestUtils {
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
assertNotNull(expected);
|
assertNotNull(expected);
|
||||||
|
|
||||||
try {
|
try (
|
||||||
XContentParser resultParser = XContentFactory.xContent(result).createParser(result);
|
XContentParser resultParser = XContentFactory.xContent(result).createParser(result);
|
||||||
XContentParser expectedParser = XContentFactory.xContent(expected).createParser(expected);
|
XContentParser expectedParser = XContentFactory.xContent(expected).createParser(expected);
|
||||||
|
) {
|
||||||
while (true) {
|
while (true) {
|
||||||
XContentParser.Token token1 = resultParser.nextToken();
|
XContentParser.Token token1 = resultParser.nextToken();
|
||||||
XContentParser.Token token2 = expectedParser.nextToken();
|
XContentParser.Token token2 = expectedParser.nextToken();
|
||||||
|
|
|
@ -33,11 +33,11 @@ public class ClusterInfoIT extends AbstractRendererTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLicenses() throws Exception {
|
public void testClusterInfo() throws Exception {
|
||||||
final String clusterUUID = client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID();
|
final String clusterUUID = client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID();
|
||||||
assertTrue(Strings.hasText(clusterUUID));
|
assertTrue(Strings.hasText(clusterUUID));
|
||||||
|
|
||||||
logger.debug("--> waiting for licenses collector to collect data (ie, the trial marvel license)");
|
logger.debug("--> waiting for cluster info collector to collect data (ie, the trial marvel license)");
|
||||||
GetResponse response = assertBusy(new Callable<GetResponse>() {
|
GetResponse response = assertBusy(new Callable<GetResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public GetResponse call() throws Exception {
|
public GetResponse call() throws Exception {
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.renderer.shards;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.shards.ShardsCollector;
|
||||||
|
import org.elasticsearch.marvel.agent.renderer.AbstractRendererTestCase;
|
||||||
|
import org.elasticsearch.search.SearchHit;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
|
|
||||||
|
public class ShardsIT extends AbstractRendererTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<String> collectors() {
|
||||||
|
return Collections.singletonList(ShardsCollector.NAME);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShards() throws Exception {
|
||||||
|
logger.debug("--> creating some indices so that shards collector reports data");
|
||||||
|
for (int i = 0; i < randomIntBetween(1, 5); i++) {
|
||||||
|
client().prepareIndex("test-" + i, "foo").setRefresh(true).setSource("field1", "value1").get();
|
||||||
|
}
|
||||||
|
|
||||||
|
waitForMarvelDocs(ShardsCollector.TYPE);
|
||||||
|
|
||||||
|
logger.debug("--> searching for marvel documents of type [{}]", ShardsCollector.TYPE);
|
||||||
|
SearchResponse response = client().prepareSearch().setTypes(ShardsCollector.TYPE).get();
|
||||||
|
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||||
|
|
||||||
|
logger.debug("--> checking that every document contains the expected fields");
|
||||||
|
String[] filters = ShardsRenderer.FILTERS;
|
||||||
|
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||||
|
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||||
|
|
||||||
|
for (String filter : filters) {
|
||||||
|
assertContains(filter, fields);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug("--> shards successfully collected");
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,55 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.marvel.agent.renderer.shards;
|
||||||
|
|
||||||
|
import org.elasticsearch.cluster.ClusterService;
|
||||||
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.marvel.agent.collector.shards.ShardMarvelDoc;
|
||||||
|
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||||
|
import org.elasticsearch.marvel.agent.renderer.RendererTestUtils;
|
||||||
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.elasticsearch.test.StreamsUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class ShardsRendererTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
private static final String SAMPLE_FILE = "/samples/marvel_shards.json";
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShardsRenderer() throws Exception {
|
||||||
|
createIndex("my-index", Settings.settingsBuilder()
|
||||||
|
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||||
|
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||||
|
.build());
|
||||||
|
|
||||||
|
logger.debug("--> retrieving cluster state");
|
||||||
|
ClusterState clusterState = getInstanceFromNode(ClusterService.class).state();
|
||||||
|
|
||||||
|
logger.debug("--> creating the shard marvel document");
|
||||||
|
ShardMarvelDoc marvelDoc = new ShardMarvelDoc("my-index", "marvel_shards", "my-id",
|
||||||
|
clusterState.metaData().clusterUUID(), 1437580442979L, clusterState.routingTable().allShards().iterator().next(), clusterState.stateUUID());
|
||||||
|
|
||||||
|
logger.debug("--> rendering the document");
|
||||||
|
Renderer renderer = new ShardsRenderer();
|
||||||
|
String result = RendererTestUtils.renderAsJSON(marvelDoc, renderer);
|
||||||
|
|
||||||
|
logger.debug("--> loading sample document from file {}", SAMPLE_FILE);
|
||||||
|
String expected = StreamsUtils.copyToStringFromClasspath(SAMPLE_FILE);
|
||||||
|
|
||||||
|
logger.debug("--> comparing both documents, they must have the same structure");
|
||||||
|
RendererTestUtils.assertJSONStructure(result, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNoShard() throws IOException {
|
||||||
|
String result = RendererTestUtils.renderAsJSON(new ShardMarvelDoc("my-index", "marvel_shards", "my-id", "cluster-uuid", 1437580442979L, null, "my-state-uuid"), new ShardsRenderer());
|
||||||
|
RendererTestUtils.assertJSONStructureAndValues(result, "{\"cluster_uuid\":\"my-cluster-uuid\",\"timestamp\":\"2015-07-22T15:54:02.979Z\",\"state_uuid\":\"my-state-uuid\"}");
|
||||||
|
}
|
||||||
|
}
|
|
@ -13,44 +13,6 @@
|
||||||
"local": "true"
|
"local": "true"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"shards": [
|
|
||||||
{
|
|
||||||
"state": "STARTED",
|
|
||||||
"primary": true,
|
|
||||||
"node": "__node_id__",
|
|
||||||
"relocating_node": null,
|
|
||||||
"shard": 0,
|
|
||||||
"index": "my-index",
|
|
||||||
"version": 2,
|
|
||||||
"allocation_id": {
|
|
||||||
"id": "p6c9fBsNSc6SBTq0E0jLZw"
|
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"state": "STARTED",
|
|
||||||
"primary": true,
|
|
||||||
"node": "__node_id__",
|
|
||||||
"relocating_node": null,
|
|
||||||
"shard": 1,
|
|
||||||
"index": "my-index",
|
|
||||||
"version": 2,
|
|
||||||
"allocation_id": {
|
|
||||||
"id": "KBOkx9UmRNmlZ6LGsnqxJw"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"state": "STARTED",
|
|
||||||
"primary": true,
|
|
||||||
"node": "__node_id__",
|
|
||||||
"relocating_node": null,
|
|
||||||
"shard": 2,
|
|
||||||
"index": "my-index",
|
|
||||||
"version": 2,
|
|
||||||
"allocation_id": {
|
|
||||||
"id": "aHXocqcnRme-y6OJIZX-CQ"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"cluster_uuid": "dsFPzYRyQCe6cq48a0wxkQ",
|
||||||
|
"timestamp": "2015-07-22T15:54:02.979Z",
|
||||||
|
"state_uuid": "YYvS4BSURvaqm2h7Kzqfwg",
|
||||||
|
"shard": {
|
||||||
|
"state": "STARTED",
|
||||||
|
"primary": true,
|
||||||
|
"node": "6MMNl9dXRV-kFRKh_fXxxA",
|
||||||
|
"relocating_node": null,
|
||||||
|
"shard": 0,
|
||||||
|
"index": ".marvel-2015.09.03"
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue