Monitoring: Clean up and refactoring
This commit removes various constructors in monitoring documents and add a single constructeur that accepts a monitoring id and version. It also renames *Renderer classes to *Resolver and centralizes the logic of resolving the index name, type name and id in 1 place. It changes Exporter so that they use these resolvers to know in which index a given document must be indexed. Original commit: elastic/x-pack-elasticsearch@c2349a95a6
This commit is contained in:
parent
e015d7e501
commit
a8e52eb520
|
@ -15,7 +15,6 @@ import org.elasticsearch.common.settings.SettingsModule;
|
|||
import org.elasticsearch.marvel.agent.AgentService;
|
||||
import org.elasticsearch.marvel.agent.collector.CollectorModule;
|
||||
import org.elasticsearch.marvel.agent.exporter.ExporterModule;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererModule;
|
||||
import org.elasticsearch.marvel.cleaner.CleanerService;
|
||||
import org.elasticsearch.marvel.license.LicenseModule;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
|
@ -53,7 +52,6 @@ public class Marvel {
|
|||
modules.add(new LicenseModule());
|
||||
modules.add(new CollectorModule());
|
||||
modules.add(new ExporterModule(settings));
|
||||
modules.add(new RendererModule());
|
||||
}
|
||||
return Collections.unmodifiableList(modules);
|
||||
}
|
||||
|
|
|
@ -22,8 +22,6 @@ import java.util.function.Function;
|
|||
|
||||
public class MarvelSettings extends AbstractComponent {
|
||||
|
||||
public static final String MONITORING_INDICES_PREFIX = ".monitoring-es-";
|
||||
public static final String MONITORING_DATA_INDEX_PREFIX = ".monitoring-es-data-";
|
||||
public static final String LEGACY_DATA_INDEX_NAME = ".marvel-es-data";
|
||||
|
||||
public static final String HISTORY_DURATION_SETTING_NAME = "history.duration";
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
public enum MonitoringIds {
|
||||
|
||||
ES("es");
|
||||
|
||||
private final String id;
|
||||
|
||||
MonitoringIds(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public static MonitoringIds fromId(String id) {
|
||||
switch (id.toLowerCase(Locale.ROOT)) {
|
||||
case "es":
|
||||
return ES;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown monitoring id [" + id + "]");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -14,13 +14,13 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.Collector;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.ExportBulk;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporter;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporters;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -184,8 +184,9 @@ public class AgentService extends AbstractLifecycleComponent<AgentService> {
|
|||
}
|
||||
for (Collector collector : collectors) {
|
||||
if (collecting) {
|
||||
Collection<MarvelDoc> docs = collector.collect();
|
||||
if (docs != null) {
|
||||
Collection<MonitoringDoc> docs = collector.collect();
|
||||
|
||||
if ((docs != null) && (docs.size() > 0)) {
|
||||
logger.trace("bulk [{}] - adding [{}] collected docs from [{}] collector", bulk, docs.size(),
|
||||
collector.name());
|
||||
bulk.add(docs);
|
||||
|
|
|
@ -6,19 +6,18 @@
|
|||
package org.elasticsearch.marvel.agent.collector;
|
||||
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringIndexNameResolver;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Objects;
|
||||
|
||||
public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T> implements Collector<T> {
|
||||
|
||||
|
@ -27,7 +26,6 @@ public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T>
|
|||
protected final ClusterService clusterService;
|
||||
protected final MarvelSettings marvelSettings;
|
||||
protected final MarvelLicensee licensee;
|
||||
private final MonitoringIndexNameResolver dataIndexNameResolver;
|
||||
|
||||
@Inject
|
||||
public AbstractCollector(Settings settings, String name, ClusterService clusterService,
|
||||
|
@ -37,7 +35,6 @@ public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T>
|
|||
this.clusterService = clusterService;
|
||||
this.marvelSettings = marvelSettings;
|
||||
this.licensee = licensee;
|
||||
this.dataIndexNameResolver = new DataIndexNameResolver(MarvelTemplateUtils.TEMPLATE_VERSION);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -76,7 +73,7 @@ public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T>
|
|||
}
|
||||
|
||||
@Override
|
||||
public Collection<MarvelDoc> collect() {
|
||||
public Collection<MonitoringDoc> collect() {
|
||||
try {
|
||||
if (shouldCollect()) {
|
||||
logger.trace("collector [{}] - collecting data...", name());
|
||||
|
@ -90,7 +87,7 @@ public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T>
|
|||
return null;
|
||||
}
|
||||
|
||||
protected abstract Collection<MarvelDoc> doCollect() throws Exception;
|
||||
protected abstract Collection<MonitoringDoc> doCollect() throws Exception;
|
||||
|
||||
@Override
|
||||
public T stop() {
|
||||
|
@ -121,35 +118,13 @@ public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T>
|
|||
return clusterService.localNode();
|
||||
}
|
||||
|
||||
public String resolveDataIndexName(long timestamp) {
|
||||
return dataIndexNameResolver.resolve(timestamp);
|
||||
protected String monitoringId() {
|
||||
// Collectors always collects data for Elasticsearch
|
||||
return MonitoringIds.ES.getId();
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves monitoring's data index name
|
||||
*/
|
||||
public class DataIndexNameResolver implements MonitoringIndexNameResolver {
|
||||
|
||||
private final String index;
|
||||
|
||||
DataIndexNameResolver(Integer version) {
|
||||
Objects.requireNonNull(version, "index version cannot be null");
|
||||
this.index = MarvelSettings.MONITORING_DATA_INDEX_PREFIX + String.valueOf(version);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String resolve(MarvelDoc doc) {
|
||||
return index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String resolve(long timestamp) {
|
||||
return index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String indexPattern() {
|
||||
return index;
|
||||
}
|
||||
protected String monitoringVersion() {
|
||||
// Collectors always collects data for the current version of Elasticsearch
|
||||
return Version.CURRENT.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.marvel.agent.collector;
|
||||
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
|
@ -14,5 +14,5 @@ public interface Collector<T> extends LifecycleComponent<T> {
|
|||
|
||||
String name();
|
||||
|
||||
Collection<MarvelDoc> collect();
|
||||
Collection<MonitoringDoc> collect();
|
||||
}
|
|
@ -7,17 +7,17 @@ package org.elasticsearch.marvel.agent.collector.cluster;
|
|||
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class ClusterInfoMarvelDoc extends MarvelDoc {
|
||||
public class ClusterInfoMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private String clusterName;
|
||||
private String version;
|
||||
private License license;
|
||||
private ClusterStatsResponse clusterStats;
|
||||
|
||||
public ClusterInfoMarvelDoc(String index, String type, String id) {
|
||||
super(index, type, id);
|
||||
public ClusterInfoMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public String getClusterName() {
|
|
@ -15,7 +15,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
|
||||
|
@ -27,14 +27,12 @@ import java.util.List;
|
|||
/**
|
||||
* Collector for cluster state.
|
||||
* <p>
|
||||
* This collector runs on the master node only and collects {@link ClusterStateMarvelDoc} document
|
||||
* This collector runs on the master node only and collects {@link ClusterStateMonitoringDoc} document
|
||||
* at a given frequency.
|
||||
*/
|
||||
public class ClusterStateCollector extends AbstractCollector<ClusterStateCollector> {
|
||||
|
||||
public static final String NAME = "cluster-state-collector";
|
||||
public static final String TYPE = "cluster_state";
|
||||
public static final String NODE_TYPE = "node";
|
||||
|
||||
private final Client client;
|
||||
|
||||
|
@ -51,8 +49,8 @@ public class ClusterStateCollector extends AbstractCollector<ClusterStateCollect
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||
List<MarvelDoc> results = new ArrayList<>(3);
|
||||
protected Collection<MonitoringDoc> doCollect() throws Exception {
|
||||
List<MonitoringDoc> results = new ArrayList<>(3);
|
||||
|
||||
ClusterState clusterState = clusterService.state();
|
||||
String clusterUUID = clusterState.metaData().clusterUUID();
|
||||
|
@ -63,9 +61,8 @@ public class ClusterStateCollector extends AbstractCollector<ClusterStateCollect
|
|||
ClusterHealthResponse clusterHealth = client.admin().cluster().prepareHealth().get(marvelSettings.clusterStateTimeout());
|
||||
|
||||
// Adds a cluster_state document with associated status
|
||||
ClusterStateMarvelDoc clusterStateDoc = new ClusterStateMarvelDoc();
|
||||
ClusterStateMonitoringDoc clusterStateDoc = new ClusterStateMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
clusterStateDoc.setClusterUUID(clusterUUID);
|
||||
clusterStateDoc.setType(TYPE);
|
||||
clusterStateDoc.setTimestamp(timestamp);
|
||||
clusterStateDoc.setSourceNode(sourceNode);
|
||||
clusterStateDoc.setClusterState(clusterState);
|
||||
|
@ -75,19 +72,17 @@ public class ClusterStateCollector extends AbstractCollector<ClusterStateCollect
|
|||
DiscoveryNodes nodes = clusterState.nodes();
|
||||
if (nodes != null) {
|
||||
for (DiscoveryNode node : nodes) {
|
||||
// Adds a document for every node in the monitoring timestamped index (type "nodes")
|
||||
ClusterStateNodeMarvelDoc clusterStateNodeDoc = new ClusterStateNodeMarvelDoc();
|
||||
// Adds a document for every node in the marvel timestamped index (type "nodes")
|
||||
ClusterStateNodeMonitoringDoc clusterStateNodeDoc = new ClusterStateNodeMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
clusterStateNodeDoc.setClusterUUID(clusterUUID);;
|
||||
clusterStateNodeDoc.setType(NODE_TYPE);
|
||||
clusterStateNodeDoc.setTimestamp(timestamp);
|
||||
clusterStateNodeDoc.setSourceNode(sourceNode);
|
||||
clusterStateNodeDoc.setStateUUID(stateUUID);
|
||||
clusterStateNodeDoc.setNodeId(node.getId());
|
||||
results.add(clusterStateNodeDoc);
|
||||
|
||||
// Adds a document for every node in the monitoring data index (type "node")
|
||||
DiscoveryNodeMarvelDoc discoveryNodeDoc = new DiscoveryNodeMarvelDoc(resolveDataIndexName(timestamp), NODE_TYPE,
|
||||
node.getId());
|
||||
// Adds a document for every node in the marvel data index (type "node")
|
||||
DiscoveryNodeMonitoringDoc discoveryNodeDoc = new DiscoveryNodeMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
discoveryNodeDoc.setClusterUUID(clusterUUID);
|
||||
discoveryNodeDoc.setTimestamp(timestamp);
|
||||
discoveryNodeDoc.setSourceNode(node);
|
||||
|
|
|
@ -7,13 +7,17 @@ package org.elasticsearch.marvel.agent.collector.cluster;
|
|||
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class ClusterStateMarvelDoc extends MarvelDoc {
|
||||
public class ClusterStateMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private ClusterState clusterState;
|
||||
private ClusterHealthStatus status;
|
||||
|
||||
public ClusterStateMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public ClusterState getClusterState() {
|
||||
return clusterState;
|
||||
}
|
||||
|
@ -29,5 +33,4 @@ public class ClusterStateMarvelDoc extends MarvelDoc {
|
|||
public void setStatus(ClusterHealthStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
}
|
|
@ -5,13 +5,17 @@
|
|||
*/
|
||||
package org.elasticsearch.marvel.agent.collector.cluster;
|
||||
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class ClusterStateNodeMarvelDoc extends MarvelDoc {
|
||||
public class ClusterStateNodeMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private String stateUUID;
|
||||
private String nodeId;
|
||||
|
||||
public ClusterStateNodeMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public String getStateUUID() {
|
||||
return stateUUID;
|
||||
}
|
|
@ -18,7 +18,7 @@ import org.elasticsearch.license.plugin.core.LicenseUtils;
|
|||
import org.elasticsearch.license.plugin.core.LicensesManagerService;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
|
||||
|
@ -41,9 +41,6 @@ public class ClusterStatsCollector extends AbstractCollector<ClusterStatsCollect
|
|||
|
||||
public static final String NAME = "cluster-stats-collector";
|
||||
|
||||
public static final String CLUSTER_INFO_TYPE = "cluster_info";
|
||||
public static final String CLUSTER_STATS_TYPE = "cluster_stats";
|
||||
|
||||
private final ClusterName clusterName;
|
||||
private final LicensesManagerService licensesManagerService;
|
||||
private final Client client;
|
||||
|
@ -65,8 +62,8 @@ public class ClusterStatsCollector extends AbstractCollector<ClusterStatsCollect
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||
List<MarvelDoc> results = new ArrayList<>(1);
|
||||
protected Collection<MonitoringDoc> doCollect() throws Exception {
|
||||
List<MonitoringDoc> results = new ArrayList<>(1);
|
||||
|
||||
// Retrieves cluster stats
|
||||
ClusterStatsResponse clusterStats = null;
|
||||
|
@ -85,8 +82,7 @@ public class ClusterStatsCollector extends AbstractCollector<ClusterStatsCollect
|
|||
DiscoveryNode sourceNode = localNode();
|
||||
|
||||
// Adds a cluster info document
|
||||
String resolvedIndex = resolveDataIndexName(timestamp);
|
||||
ClusterInfoMarvelDoc clusterInfoDoc = new ClusterInfoMarvelDoc(resolvedIndex, CLUSTER_INFO_TYPE, clusterUUID);
|
||||
ClusterInfoMonitoringDoc clusterInfoDoc = new ClusterInfoMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
clusterInfoDoc.setClusterUUID(clusterUUID);
|
||||
clusterInfoDoc.setTimestamp(timestamp);
|
||||
clusterInfoDoc.setSourceNode(sourceNode);
|
||||
|
@ -98,9 +94,8 @@ public class ClusterStatsCollector extends AbstractCollector<ClusterStatsCollect
|
|||
|
||||
// Adds a cluster stats document
|
||||
if (super.shouldCollect()) {
|
||||
ClusterStatsMarvelDoc clusterStatsDoc = new ClusterStatsMarvelDoc();
|
||||
ClusterStatsMonitoringDoc clusterStatsDoc = new ClusterStatsMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
clusterStatsDoc.setClusterUUID(clusterUUID);
|
||||
clusterStatsDoc.setType(CLUSTER_STATS_TYPE);
|
||||
clusterStatsDoc.setTimestamp(timestamp);
|
||||
clusterStatsDoc.setSourceNode(sourceNode);
|
||||
clusterStatsDoc.setClusterStats(clusterStats);
|
||||
|
|
|
@ -6,12 +6,16 @@
|
|||
package org.elasticsearch.marvel.agent.collector.cluster;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class ClusterStatsMarvelDoc extends MarvelDoc {
|
||||
public class ClusterStatsMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private ClusterStatsResponse clusterStats;
|
||||
|
||||
public ClusterStatsMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public ClusterStatsResponse getClusterStats() {
|
||||
return clusterStats;
|
||||
}
|
||||
|
@ -19,5 +23,4 @@ public class ClusterStatsMarvelDoc extends MarvelDoc {
|
|||
public void setClusterStats(ClusterStatsResponse clusterStats) {
|
||||
this.clusterStats = clusterStats;
|
||||
}
|
||||
|
||||
}
|
|
@ -6,14 +6,14 @@
|
|||
package org.elasticsearch.marvel.agent.collector.cluster;
|
||||
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class DiscoveryNodeMarvelDoc extends MarvelDoc {
|
||||
public class DiscoveryNodeMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private DiscoveryNode node;
|
||||
|
||||
public DiscoveryNodeMarvelDoc(String index, String type, String id) {
|
||||
super(index, type, id);
|
||||
public DiscoveryNodeMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public DiscoveryNode getNode() {
|
|
@ -15,7 +15,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
import org.elasticsearch.shield.Shield;
|
||||
|
@ -29,13 +29,12 @@ import java.util.List;
|
|||
/**
|
||||
* Collector for the Recovery API.
|
||||
* <p>
|
||||
* This collector runs on the master node only and collects a {@link IndexRecoveryMarvelDoc} document
|
||||
* This collector runs on the master node only and collects a {@link IndexRecoveryMonitoringDoc} document
|
||||
* for every index that has on-going shard recoveries.
|
||||
*/
|
||||
public class IndexRecoveryCollector extends AbstractCollector<IndexRecoveryCollector> {
|
||||
|
||||
public static final String NAME = "index-recovery-collector";
|
||||
public static final String TYPE = "index_recovery";
|
||||
|
||||
private final Client client;
|
||||
|
||||
|
@ -52,8 +51,8 @@ public class IndexRecoveryCollector extends AbstractCollector<IndexRecoveryColle
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||
List<MarvelDoc> results = new ArrayList<>(1);
|
||||
protected Collection<MonitoringDoc> doCollect() throws Exception {
|
||||
List<MonitoringDoc> results = new ArrayList<>(1);
|
||||
try {
|
||||
RecoveryResponse recoveryResponse = client.admin().indices().prepareRecoveries()
|
||||
.setIndices(marvelSettings.indices())
|
||||
|
@ -62,9 +61,8 @@ public class IndexRecoveryCollector extends AbstractCollector<IndexRecoveryColle
|
|||
.get(marvelSettings.recoveryTimeout());
|
||||
|
||||
if (recoveryResponse.hasRecoveries()) {
|
||||
IndexRecoveryMarvelDoc indexRecoveryDoc = new IndexRecoveryMarvelDoc();;
|
||||
IndexRecoveryMonitoringDoc indexRecoveryDoc = new IndexRecoveryMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
indexRecoveryDoc.setClusterUUID(clusterUUID());
|
||||
indexRecoveryDoc.setType(TYPE);
|
||||
indexRecoveryDoc.setTimestamp(System.currentTimeMillis());
|
||||
indexRecoveryDoc.setSourceNode(localNode());
|
||||
indexRecoveryDoc.setRecoveryResponse(recoveryResponse);
|
||||
|
|
|
@ -6,12 +6,16 @@
|
|||
package org.elasticsearch.marvel.agent.collector.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class IndexRecoveryMarvelDoc extends MarvelDoc {
|
||||
public class IndexRecoveryMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private RecoveryResponse recoveryResponse;
|
||||
|
||||
public IndexRecoveryMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public RecoveryResponse getRecoveryResponse() {
|
||||
return recoveryResponse;
|
||||
}
|
|
@ -17,7 +17,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
import org.elasticsearch.shield.Shield;
|
||||
|
@ -31,13 +31,12 @@ import java.util.List;
|
|||
/**
|
||||
* Collector for indices statistics.
|
||||
* <p>
|
||||
* This collector runs on the master node only and collect a {@link IndexStatsMarvelDoc} document
|
||||
* This collector runs on the master node only and collect a {@link IndexStatsMonitoringDoc} document
|
||||
* for each existing index in the cluster.
|
||||
*/
|
||||
public class IndexStatsCollector extends AbstractCollector<IndexStatsCollector> {
|
||||
|
||||
public static final String NAME = "index-stats-collector";
|
||||
public static final String TYPE = "index_stats";
|
||||
|
||||
private final Client client;
|
||||
|
||||
|
@ -54,8 +53,8 @@ public class IndexStatsCollector extends AbstractCollector<IndexStatsCollector>
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||
List<MarvelDoc> results = new ArrayList<>(1);
|
||||
protected Collection<MonitoringDoc> doCollect() throws Exception {
|
||||
List<MonitoringDoc> results = new ArrayList<>();
|
||||
try {
|
||||
IndicesStatsResponse indicesStats = client.admin().indices().prepareStats()
|
||||
.setIndices(marvelSettings.indices())
|
||||
|
@ -76,9 +75,8 @@ public class IndexStatsCollector extends AbstractCollector<IndexStatsCollector>
|
|||
DiscoveryNode sourceNode = localNode();
|
||||
|
||||
for (IndexStats indexStats : indicesStats.getIndices().values()) {
|
||||
IndexStatsMarvelDoc indexStatsDoc = new IndexStatsMarvelDoc();
|
||||
IndexStatsMonitoringDoc indexStatsDoc = new IndexStatsMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
indexStatsDoc.setClusterUUID(clusterUUID);
|
||||
indexStatsDoc.setType(TYPE);
|
||||
indexStatsDoc.setTimestamp(timestamp);
|
||||
indexStatsDoc.setSourceNode(sourceNode);
|
||||
indexStatsDoc.setIndexStats(indexStats);
|
||||
|
|
|
@ -6,12 +6,16 @@
|
|||
package org.elasticsearch.marvel.agent.collector.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class IndexStatsMarvelDoc extends MarvelDoc {
|
||||
public class IndexStatsMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private IndexStats indexStats;
|
||||
|
||||
public IndexStatsMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public IndexStats getIndexStats() {
|
||||
return indexStats;
|
||||
}
|
||||
|
@ -19,5 +23,4 @@ public class IndexStatsMarvelDoc extends MarvelDoc {
|
|||
public void setIndexStats(IndexStats indexStats) {
|
||||
this.indexStats = indexStats;
|
||||
}
|
||||
|
||||
}
|
|
@ -15,7 +15,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
import org.elasticsearch.shield.Shield;
|
||||
|
@ -27,12 +27,11 @@ import java.util.Collections;
|
|||
/**
|
||||
* Collector for indices statistics.
|
||||
* <p>
|
||||
* This collector runs on the master node only and collect one {@link IndicesStatsMarvelDoc} document.
|
||||
* This collector runs on the master node only and collect one {@link IndicesStatsMonitoringDoc} document.
|
||||
*/
|
||||
public class IndicesStatsCollector extends AbstractCollector<IndicesStatsCollector> {
|
||||
|
||||
public static final String NAME = "indices-stats-collector";
|
||||
public static final String TYPE = "indices_stats";
|
||||
|
||||
private final Client client;
|
||||
|
||||
|
@ -49,7 +48,7 @@ public class IndicesStatsCollector extends AbstractCollector<IndicesStatsCollect
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||
protected Collection<MonitoringDoc> doCollect() throws Exception {
|
||||
try {
|
||||
IndicesStatsResponse indicesStats = client.admin().indices().prepareStats()
|
||||
.setIndices(marvelSettings.indices())
|
||||
|
@ -61,9 +60,8 @@ public class IndicesStatsCollector extends AbstractCollector<IndicesStatsCollect
|
|||
.setStore(true)
|
||||
.get(marvelSettings.indicesStatsTimeout());
|
||||
|
||||
IndicesStatsMarvelDoc indicesStatsDoc = new IndicesStatsMarvelDoc();
|
||||
IndicesStatsMonitoringDoc indicesStatsDoc = new IndicesStatsMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
indicesStatsDoc.setClusterUUID(clusterUUID());
|
||||
indicesStatsDoc.setType(TYPE);
|
||||
indicesStatsDoc.setTimestamp(System.currentTimeMillis());
|
||||
indicesStatsDoc.setSourceNode(localNode());
|
||||
indicesStatsDoc.setIndicesStats(indicesStats);
|
||||
|
|
|
@ -6,12 +6,16 @@
|
|||
package org.elasticsearch.marvel.agent.collector.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class IndicesStatsMarvelDoc extends MarvelDoc {
|
||||
public class IndicesStatsMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private IndicesStatsResponse indicesStats;
|
||||
|
||||
public IndicesStatsMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public IndicesStatsResponse getIndicesStats() {
|
||||
return indicesStats;
|
||||
}
|
||||
|
@ -19,5 +23,4 @@ public class IndicesStatsMarvelDoc extends MarvelDoc {
|
|||
public void setIndicesStats(IndicesStatsResponse indicesStats) {
|
||||
this.indicesStats = indicesStats;
|
||||
}
|
||||
|
||||
}
|
|
@ -19,7 +19,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
|
||||
|
@ -30,12 +30,11 @@ import java.util.Collections;
|
|||
* Collector for nodes statistics.
|
||||
* <p>
|
||||
* This collector runs on every non-client node and collect
|
||||
* a {@link NodeStatsMarvelDoc} document for each node of the cluster.
|
||||
* a {@link NodeStatsMonitoringDoc} document for each node of the cluster.
|
||||
*/
|
||||
public class NodeStatsCollector extends AbstractCollector<NodeStatsCollector> {
|
||||
|
||||
public static final String NAME = "node-stats-collector";
|
||||
public static final String TYPE = "node_stats";
|
||||
|
||||
private final Client client;
|
||||
private final NodeEnvironment nodeEnvironment;
|
||||
|
@ -64,7 +63,7 @@ public class NodeStatsCollector extends AbstractCollector<NodeStatsCollector> {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||
protected Collection<MonitoringDoc> doCollect() throws Exception {
|
||||
NodesStatsRequest request = new NodesStatsRequest("_local");
|
||||
request.indices(CommonStatsFlags.ALL);
|
||||
request.os(true);
|
||||
|
@ -81,18 +80,17 @@ public class NodeStatsCollector extends AbstractCollector<NodeStatsCollector> {
|
|||
|
||||
DiscoveryNode sourceNode = localNode();
|
||||
|
||||
NodeStatsMarvelDoc nodeStatsDoc = new NodeStatsMarvelDoc();
|
||||
NodeStatsMonitoringDoc nodeStatsDoc = new NodeStatsMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
nodeStatsDoc.setClusterUUID(clusterUUID());
|
||||
nodeStatsDoc.setType(TYPE);
|
||||
nodeStatsDoc.setTimestamp(System.currentTimeMillis());
|
||||
nodeStatsDoc.setSourceNode(sourceNode);
|
||||
nodeStatsDoc.setNodeId(sourceNode.getId());
|
||||
nodeStatsDoc.setNodeId(sourceNode.id());
|
||||
nodeStatsDoc.setNodeMaster(isLocalNodeMaster());
|
||||
nodeStatsDoc.setNodeStats(nodeStats);
|
||||
nodeStatsDoc.setMlockall(BootstrapInfo.isMemoryLocked());
|
||||
nodeStatsDoc.setDiskThresholdWaterMarkHigh(diskThresholdWatermarkHigh);
|
||||
nodeStatsDoc.setDiskThresholdDeciderEnabled(diskThresholdDeciderEnabled);
|
||||
|
||||
return Collections.singletonList((MarvelDoc) nodeStatsDoc);
|
||||
return Collections.singletonList(nodeStatsDoc);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
package org.elasticsearch.marvel.agent.collector.node;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class NodeStatsMarvelDoc extends MarvelDoc {
|
||||
public class NodeStatsMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private String nodeId;
|
||||
private boolean nodeMaster;
|
||||
|
@ -18,6 +18,10 @@ public class NodeStatsMarvelDoc extends MarvelDoc {
|
|||
private Double diskThresholdWaterMarkHigh;
|
||||
private boolean diskThresholdDeciderEnabled;
|
||||
|
||||
public NodeStatsMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public void setNodeId(String nodeId) {
|
||||
this.nodeId = nodeId;
|
||||
}
|
||||
|
@ -65,6 +69,5 @@ public class NodeStatsMarvelDoc extends MarvelDoc {
|
|||
public boolean isDiskThresholdDeciderEnabled() {
|
||||
return diskThresholdDeciderEnabled;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -6,15 +6,15 @@
|
|||
package org.elasticsearch.marvel.agent.collector.shards;
|
||||
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class ShardMarvelDoc extends MarvelDoc {
|
||||
public class ShardMonitoringDoc extends MonitoringDoc {
|
||||
|
||||
private ShardRouting shardRouting;
|
||||
private String clusterStateUUID;
|
||||
|
||||
public ShardMarvelDoc(String index, String type, String id) {
|
||||
super(index, type, id);
|
||||
public ShardMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
}
|
||||
|
||||
public void setShardRouting(ShardRouting shardRouting) {
|
||||
|
@ -32,5 +32,4 @@ public class ShardMarvelDoc extends MarvelDoc {
|
|||
public String getClusterStateUUID() {
|
||||
return clusterStateUUID;
|
||||
}
|
||||
|
||||
}
|
|
@ -15,7 +15,7 @@ import org.elasticsearch.common.regex.Regex;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -27,13 +27,12 @@ import java.util.List;
|
|||
/**
|
||||
* Collector for shards.
|
||||
* <p>
|
||||
* This collector runs on the master node only and collects the {@link ShardMarvelDoc} documents
|
||||
* This collector runs on the master node only and collects the {@link ShardMonitoringDoc} documents
|
||||
* for every index shard.
|
||||
*/
|
||||
public class ShardsCollector extends AbstractCollector<ShardsCollector> {
|
||||
|
||||
public static final String NAME = "shards-collector";
|
||||
public static final String TYPE = "shards";
|
||||
|
||||
@Inject
|
||||
public ShardsCollector(Settings settings, ClusterService clusterService,
|
||||
|
@ -47,8 +46,8 @@ public class ShardsCollector extends AbstractCollector<ShardsCollector> {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Collection<MarvelDoc> doCollect() throws Exception {
|
||||
List<MarvelDoc> results = new ArrayList<>(1);
|
||||
protected Collection<MonitoringDoc> doCollect() throws Exception {
|
||||
List<MonitoringDoc> results = new ArrayList<>(1);
|
||||
|
||||
ClusterState clusterState = clusterService.state();
|
||||
if (clusterState != null) {
|
||||
|
@ -62,7 +61,7 @@ public class ShardsCollector extends AbstractCollector<ShardsCollector> {
|
|||
|
||||
for (ShardRouting shard : shards) {
|
||||
if (match(shard.getIndexName())) {
|
||||
ShardMarvelDoc shardDoc = new ShardMarvelDoc(null, TYPE, id(stateUUID, shard));
|
||||
ShardMonitoringDoc shardDoc = new ShardMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
shardDoc.setClusterUUID(clusterUUID);
|
||||
shardDoc.setTimestamp(timestamp);
|
||||
if (shard.assignedToNode()) {
|
||||
|
@ -86,31 +85,4 @@ public class ShardsCollector extends AbstractCollector<ShardsCollector> {
|
|||
String[] indices = marvelSettings.indices();
|
||||
return IndexNameExpressionResolver.isAllIndices(Arrays.asList(marvelSettings.indices())) || Regex.simpleMatch(indices, indexName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute an id that has the format:
|
||||
* <p>
|
||||
* {state_uuid}:{node_id || '_na'}:{index}:{shard}:{'p' || 'r'}
|
||||
*/
|
||||
static String id(String stateUUID, ShardRouting shardRouting) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append(stateUUID);
|
||||
builder.append(':');
|
||||
if (shardRouting.assignedToNode()) {
|
||||
builder.append(shardRouting.currentNodeId());
|
||||
} else {
|
||||
builder.append("_na");
|
||||
}
|
||||
builder.append(':');
|
||||
builder.append(shardRouting.index());
|
||||
builder.append(':');
|
||||
builder.append(Integer.valueOf(shardRouting.id()));
|
||||
builder.append(':');
|
||||
if (shardRouting.primary()) {
|
||||
builder.append("p");
|
||||
} else {
|
||||
builder.append("r");
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.marvel.agent.exporter;
|
|||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
|
@ -21,16 +20,12 @@ public abstract class ExportBulk {
|
|||
this.name = name;
|
||||
}
|
||||
|
||||
public ExportBulk add(MarvelDoc... docs) throws Exception {
|
||||
return add(Arrays.asList(docs));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public abstract ExportBulk add(Collection<MarvelDoc> docs) throws Exception;
|
||||
public abstract ExportBulk add(Collection<MonitoringDoc> docs) throws Exception;
|
||||
|
||||
public abstract void flush() throws Exception;
|
||||
|
||||
|
@ -74,7 +69,7 @@ public abstract class ExportBulk {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ExportBulk add(Collection<MarvelDoc> docs) throws Exception {
|
||||
public ExportBulk add(Collection<MonitoringDoc> docs) throws Exception {
|
||||
for (ExportBulk bulk : bulks) {
|
||||
bulk.add(docs);
|
||||
}
|
||||
|
|
|
@ -9,11 +9,8 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
|
@ -22,20 +19,17 @@ public abstract class Exporter {
|
|||
public static final String INDEX_NAME_TIME_FORMAT_SETTING = "index.name.time_format";
|
||||
public static final String BULK_TIMEOUT_SETTING = "bulk.timeout";
|
||||
|
||||
public static final String DEFAULT_INDEX_NAME_TIME_FORMAT = "YYYY.MM.dd";
|
||||
|
||||
protected final String type;
|
||||
protected final Config config;
|
||||
protected final ESLogger logger;
|
||||
protected final MonitoringIndexNameResolver indexNameResolver;
|
||||
|
||||
protected final @Nullable TimeValue bulkTimeout;
|
||||
|
||||
public Exporter(String type, Config config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
this.logger = config.logger(getClass());
|
||||
this.indexNameResolver = new DefaultIndexNameResolver(config.settings);
|
||||
bulkTimeout = config.settings().getAsTime(BULK_TIMEOUT_SETTING, null);
|
||||
this.bulkTimeout = config.settings().getAsTime(BULK_TIMEOUT_SETTING, null);
|
||||
}
|
||||
|
||||
public String type() {
|
||||
|
@ -46,10 +40,6 @@ public abstract class Exporter {
|
|||
return config.name;
|
||||
}
|
||||
|
||||
public MonitoringIndexNameResolver indexNameResolver() {
|
||||
return indexNameResolver;
|
||||
}
|
||||
|
||||
public boolean masterOnly() {
|
||||
return false;
|
||||
}
|
||||
|
@ -60,10 +50,10 @@ public abstract class Exporter {
|
|||
*/
|
||||
public abstract ExportBulk openBulk();
|
||||
|
||||
public void export(Collection<MarvelDoc> marvelDocs) throws Exception {
|
||||
public void export(Collection<MonitoringDoc> monitoringDocs) throws Exception {
|
||||
ExportBulk bulk = openBulk();
|
||||
if (bulk != null) {
|
||||
bulk.add(marvelDocs).flush();
|
||||
bulk.add(monitoringDocs).flush();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -124,46 +114,4 @@ public abstract class Exporter {
|
|||
|
||||
public abstract E create(Config config);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class DefaultIndexNameResolver implements MonitoringIndexNameResolver {
|
||||
|
||||
private final DateTimeFormatter indexTimeFormatter;
|
||||
|
||||
public DefaultIndexNameResolver(Settings settings) {
|
||||
String indexTimeFormat = settings.get(INDEX_NAME_TIME_FORMAT_SETTING, DEFAULT_INDEX_NAME_TIME_FORMAT);
|
||||
try {
|
||||
indexTimeFormatter = DateTimeFormat.forPattern(indexTimeFormat).withZoneUTC();
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new SettingsException("invalid monitoring index name time format [" + indexTimeFormat + "] set for [" +
|
||||
settingFQN(INDEX_NAME_TIME_FORMAT_SETTING) + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String resolve(MarvelDoc doc) {
|
||||
if (doc.getIndex() != null) {
|
||||
return doc.getIndex();
|
||||
}
|
||||
return resolve(doc.getTimestamp());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String resolve(long timestamp) {
|
||||
return MarvelSettings.MONITORING_INDICES_PREFIX + String.valueOf(MarvelTemplateUtils.TEMPLATE_VERSION) + "-" +
|
||||
indexTimeFormatter.print(timestamp);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String indexPattern() {
|
||||
return MarvelSettings.MONITORING_INDICES_PREFIX + String.valueOf(MarvelTemplateUtils.TEMPLATE_VERSION) + "-*";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return indexTimeFormatter.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,8 @@ public final class MarvelTemplateUtils {
|
|||
static final String INDEX_TEMPLATE_FILE = "/monitoring-es.json";
|
||||
static final String INDEX_TEMPLATE_NAME_PREFIX = ".monitoring-es-";
|
||||
|
||||
static final String DATA_TEMPLATE_FILE = "/monitoring-es-data.json";
|
||||
static final String DATA_TEMPLATE_NAME_PREFIX = ".monitoring-es-data-";
|
||||
static final String DATA_TEMPLATE_FILE = "/monitoring-data.json";
|
||||
static final String DATA_TEMPLATE_NAME_PREFIX = ".monitoring-data-";
|
||||
|
||||
static final String PROPERTIES_FILE = "/monitoring.properties";
|
||||
static final String TEMPLATE_VERSION_PROPERTY = "template.version";
|
||||
|
|
|
@ -14,48 +14,21 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public abstract class MarvelDoc {
|
||||
/**
|
||||
* Base class for all monitoring documents.
|
||||
*/
|
||||
public class MonitoringDoc {
|
||||
|
||||
private String index;
|
||||
private String type;
|
||||
private String id;
|
||||
private final String monitoringId;
|
||||
private final String monitoringVersion;
|
||||
|
||||
private String clusterUUID;
|
||||
private long timestamp;
|
||||
|
||||
private Node sourceNode;
|
||||
|
||||
public MarvelDoc() {
|
||||
}
|
||||
|
||||
public MarvelDoc(String index, String type, String id) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getIndex() {
|
||||
return index;
|
||||
}
|
||||
|
||||
public void setIndex(String index) {
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
public MonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
this.monitoringId = monitoringId;
|
||||
this.monitoringVersion = monitoringVersion;
|
||||
}
|
||||
|
||||
public String getClusterUUID() {
|
||||
|
@ -66,12 +39,12 @@ public abstract class MarvelDoc {
|
|||
this.clusterUUID = clusterUUID;
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
return timestamp;
|
||||
public String getMonitoringId() {
|
||||
return monitoringId;
|
||||
}
|
||||
|
||||
public void setTimestamp(long timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
public String getMonitoringVersion() {
|
||||
return monitoringVersion;
|
||||
}
|
||||
|
||||
public Node getSourceNode() {
|
||||
|
@ -87,6 +60,22 @@ public abstract class MarvelDoc {
|
|||
node.getHostAddress(), node.getName(), node.getAttributes()));
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public void setTimestamp(long timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "marvel document [class=" + getClass().getName() +
|
||||
", monitoring id=" + getMonitoringId() +
|
||||
", monitoring version=" + getMonitoringVersion() +
|
||||
"]";
|
||||
}
|
||||
|
||||
public static class Node implements ToXContent {
|
||||
|
||||
private String uuid;
|
|
@ -1,24 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.exporter;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface MonitoringIndexNameResolver {
|
||||
|
||||
String resolve(MarvelDoc doc);
|
||||
|
||||
String resolve(long timestamp);
|
||||
|
||||
/**
|
||||
* Returns the generic part of the index name (ie without any dynamic part like a timestamp) that can be used to match indices names.
|
||||
*
|
||||
* @return the index pattern
|
||||
*/
|
||||
String indexPattern();
|
||||
|
||||
}
|
|
@ -22,13 +22,12 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.exporter.ExportBulk;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporter;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererRegistry;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.ResolversRegistry;
|
||||
import org.elasticsearch.marvel.support.VersionUtils;
|
||||
|
||||
import javax.net.ssl.HostnameVerifier;
|
||||
|
@ -83,6 +82,8 @@ public class HttpExporter extends Exporter {
|
|||
/** Minimum supported version of the remote monitoring cluster **/
|
||||
public static final Version MIN_SUPPORTED_CLUSTER_VERSION = Version.V_2_0_0_beta2;
|
||||
|
||||
private static final XContentType CONTENT_TYPE = XContentType.JSON;
|
||||
|
||||
volatile String[] hosts;
|
||||
final TimeValue connectionTimeout;
|
||||
final TimeValue connectionReadTimeout;
|
||||
|
@ -93,7 +94,7 @@ public class HttpExporter extends Exporter {
|
|||
final boolean hostnameVerification;
|
||||
|
||||
final Environment env;
|
||||
final RendererRegistry renderers;
|
||||
final ResolversRegistry resolvers;
|
||||
|
||||
final @Nullable TimeValue templateCheckTimeout;
|
||||
|
||||
|
@ -107,11 +108,10 @@ public class HttpExporter extends Exporter {
|
|||
final ConnectionKeepAliveWorker keepAliveWorker;
|
||||
Thread keepAliveThread;
|
||||
|
||||
public HttpExporter(Exporter.Config config, Environment env, RendererRegistry rendererRegistry) {
|
||||
public HttpExporter(Exporter.Config config, Environment env) {
|
||||
|
||||
super(TYPE, config);
|
||||
this.env = env;
|
||||
this.renderers = rendererRegistry;
|
||||
|
||||
hosts = config.settings().getAsArray(HOST_SETTING, Strings.EMPTY_ARRAY);
|
||||
if (hosts.length == 0) {
|
||||
|
@ -142,10 +142,13 @@ public class HttpExporter extends Exporter {
|
|||
if (templateVersion == null) {
|
||||
throw new IllegalStateException("unable to find built-in template version");
|
||||
}
|
||||
resolvers = new ResolversRegistry(config.settings());
|
||||
logger.debug("initialized with hosts [{}], index prefix [{}], template version [{}]",
|
||||
Strings.arrayToCommaDelimitedString(hosts), MonitoringIndexNameResolver.PREFIX, templateVersion);
|
||||
}
|
||||
|
||||
logger.debug("initialized with hosts [{}], index prefix [{}], index resolver [{}], template version [{}]",
|
||||
Strings.arrayToCommaDelimitedString(hosts),
|
||||
MarvelSettings.MONITORING_INDICES_PREFIX, indexNameResolver, templateVersion);
|
||||
ResolversRegistry getResolvers() {
|
||||
return resolvers;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -173,7 +176,7 @@ public class HttpExporter extends Exporter {
|
|||
if (bulkTimeout != null) {
|
||||
queryString = "?master_timeout=" + bulkTimeout;
|
||||
}
|
||||
HttpURLConnection conn = openAndValidateConnection("POST", "/_bulk" + queryString, XContentType.SMILE.mediaType());
|
||||
HttpURLConnection conn = openAndValidateConnection("POST", "/_bulk" + queryString, CONTENT_TYPE.mediaType());
|
||||
if (conn != null && (keepAliveThread == null || !keepAliveThread.isAlive())) {
|
||||
// start keep alive upon successful connection if not there.
|
||||
initKeepAliveThread();
|
||||
|
@ -181,46 +184,47 @@ public class HttpExporter extends Exporter {
|
|||
return conn;
|
||||
}
|
||||
|
||||
private void render(MarvelDoc marvelDoc, OutputStream out) throws IOException {
|
||||
final XContentType xContentType = XContentType.SMILE;
|
||||
private void render(MonitoringDoc doc, OutputStream out) throws IOException {
|
||||
try {
|
||||
MonitoringIndexNameResolver<MonitoringDoc> resolver = resolvers.getResolver(doc);
|
||||
if (resolver != null) {
|
||||
String index = resolver.index(doc);
|
||||
String type = resolver.type(doc);
|
||||
String id = resolver.id(doc);
|
||||
|
||||
// Get the appropriate renderer in order to render the MarvelDoc
|
||||
Renderer renderer = renderers.getRenderer(marvelDoc);
|
||||
assert renderer != null :
|
||||
"unable to render monitoring document of type [" + marvelDoc.getType() + "]. no renderer found in registry";
|
||||
try (XContentBuilder builder = new XContentBuilder(CONTENT_TYPE.xContent(), out)) {
|
||||
// Builds the bulk action metadata line
|
||||
builder.startObject();
|
||||
builder.startObject("index");
|
||||
builder.field("_index", index);
|
||||
builder.field("_type", type);
|
||||
if (id != null) {
|
||||
builder.field("_id", id);
|
||||
}
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
if (renderer == null) {
|
||||
logger.warn("http exporter [{}] - unable to render monitoring document of type [{}]: no renderer found in registry",
|
||||
name(), marvelDoc.getType());
|
||||
return;
|
||||
}
|
||||
// Adds action metadata line bulk separator
|
||||
out.write(CONTENT_TYPE.xContent().streamSeparator());
|
||||
|
||||
try (XContentBuilder builder = new XContentBuilder(xContentType.xContent(), out)) {
|
||||
// Render the monitoring document
|
||||
out.write(resolver.source(doc, CONTENT_TYPE).toBytes());
|
||||
|
||||
// Builds the bulk action metadata line
|
||||
builder.startObject();
|
||||
builder.startObject("index");
|
||||
// Adds final bulk separator
|
||||
out.write(CONTENT_TYPE.xContent().streamSeparator());
|
||||
|
||||
// we need the index to be based on the document timestamp and/or template version
|
||||
builder.field("_index", indexNameResolver.resolve(marvelDoc));
|
||||
|
||||
if (marvelDoc.getType() != null) {
|
||||
builder.field("_type", marvelDoc.getType());
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("http exporter [{}] - added index request [index={}, type={}, id={}]",
|
||||
name(), index, type, id);
|
||||
}
|
||||
} else {
|
||||
logger.warn("http exporter [{}] - unable to render monitoring document of type [{}]: no renderer found in registry",
|
||||
name(), doc);
|
||||
}
|
||||
if (marvelDoc.getId() != null) {
|
||||
builder.field("_id", marvelDoc.getId());
|
||||
}
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
} catch (Exception e) {
|
||||
logger.warn("http exporter [{}] - failed to render document [{}], skipping it", e, name(), doc);
|
||||
}
|
||||
// Adds action metadata line bulk separator
|
||||
out.write(xContentType.xContent().streamSeparator());
|
||||
|
||||
// Render the MarvelDoc
|
||||
renderer.render(marvelDoc, xContentType, out);
|
||||
|
||||
// Adds final bulk separator
|
||||
out.write(xContentType.xContent().streamSeparator());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -234,7 +238,7 @@ public class HttpExporter extends Exporter {
|
|||
}
|
||||
|
||||
InputStream inputStream = conn.getInputStream();
|
||||
try (XContentParser parser = XContentType.SMILE.xContent().createParser(inputStream)) {
|
||||
try (XContentParser parser = CONTENT_TYPE.xContent().createParser(inputStream)) {
|
||||
Map<String, Object> response = parser.map();
|
||||
if (response.get("items") != null) {
|
||||
ArrayList<Object> list = (ArrayList<Object>) response.get("items");
|
||||
|
@ -312,7 +316,6 @@ public class HttpExporter extends Exporter {
|
|||
logger.error("could not connect to any configured elasticsearch instances [{}]", Strings.arrayToCommaDelimitedString(hosts));
|
||||
|
||||
return null;
|
||||
|
||||
}
|
||||
|
||||
/** open a connection to the given hosts, returning null when not successful * */
|
||||
|
@ -527,7 +530,6 @@ public class HttpExporter extends Exporter {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
static private void validateHosts(String[] hosts) {
|
||||
for (String host : hosts) {
|
||||
try {
|
||||
|
@ -692,7 +694,7 @@ public class HttpExporter extends Exporter {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Bulk add(Collection<MarvelDoc> docs) throws Exception {
|
||||
public Bulk add(Collection<MonitoringDoc> docs) throws Exception {
|
||||
if (connection == null) {
|
||||
connection = openExportingConnection();
|
||||
}
|
||||
|
@ -704,9 +706,9 @@ public class HttpExporter extends Exporter {
|
|||
// We need to use a buffer to render each monitoring document
|
||||
// because the renderer might close the outputstream (ex: XContentBuilder)
|
||||
try (BytesStreamOutput buffer = new BytesStreamOutput()) {
|
||||
for (MarvelDoc marvelDoc : docs) {
|
||||
for (MonitoringDoc monitoringDoc : docs) {
|
||||
try {
|
||||
render(marvelDoc, buffer);
|
||||
render(monitoringDoc, buffer);
|
||||
// write the result to the connection
|
||||
out.write(buffer.bytes().toBytes());
|
||||
} finally {
|
||||
|
@ -739,18 +741,16 @@ public class HttpExporter extends Exporter {
|
|||
public static class Factory extends Exporter.Factory<HttpExporter> {
|
||||
|
||||
private final Environment env;
|
||||
private final RendererRegistry rendererRegistry;
|
||||
|
||||
@Inject
|
||||
public Factory(Environment env, RendererRegistry rendererRegistry) {
|
||||
public Factory(Environment env) {
|
||||
super(TYPE, false);
|
||||
this.env = env;
|
||||
this.rendererRegistry = rendererRegistry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HttpExporter create(Config config) {
|
||||
return new HttpExporter(config, env, rendererRegistry);
|
||||
return new HttpExporter(config, env);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,16 +9,14 @@ import org.elasticsearch.ElasticsearchException;
|
|||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.marvel.agent.exporter.ExportBulk;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringIndexNameResolver;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererRegistry;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.ResolversRegistry;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
@ -31,27 +29,22 @@ public class LocalBulk extends ExportBulk {
|
|||
|
||||
private final ESLogger logger;
|
||||
private final Client client;
|
||||
private final MonitoringIndexNameResolver indexNameResolver;
|
||||
private final RendererRegistry renderers;
|
||||
private final ResolversRegistry resolvers;
|
||||
|
||||
private BytesStreamOutput buffer = null;
|
||||
BulkRequestBuilder requestBuilder;
|
||||
|
||||
AtomicReference<State> state = new AtomicReference<>();
|
||||
|
||||
public LocalBulk(String name, ESLogger logger, Client client, MonitoringIndexNameResolver indexNameResolver,
|
||||
RendererRegistry renderers) {
|
||||
public LocalBulk(String name, ESLogger logger, Client client, ResolversRegistry resolvers) {
|
||||
super(name);
|
||||
this.logger = logger;
|
||||
this.client = client;
|
||||
this.indexNameResolver = indexNameResolver;
|
||||
this.renderers = renderers;
|
||||
this.resolvers = resolvers;
|
||||
state.set(State.ACTIVE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized ExportBulk add(Collection<MarvelDoc> docs) throws Exception {
|
||||
for (MarvelDoc marvelDoc : docs) {
|
||||
public synchronized ExportBulk add(Collection<MonitoringDoc> docs) throws Exception {
|
||||
for (MonitoringDoc doc : docs) {
|
||||
if (state.get() != State.ACTIVE) {
|
||||
return this;
|
||||
}
|
||||
|
@ -59,38 +52,24 @@ public class LocalBulk extends ExportBulk {
|
|||
requestBuilder = client.prepareBulk();
|
||||
}
|
||||
|
||||
// Get the appropriate renderer in order to render the MarvelDoc
|
||||
Renderer renderer = renderers.getRenderer(marvelDoc);
|
||||
assert renderer != null : "unable to render monitoring document of type [" + marvelDoc.getType() + "]. no renderer registered";
|
||||
try {
|
||||
MonitoringIndexNameResolver<MonitoringDoc> resolver = resolvers.getResolver(doc);
|
||||
if (resolver != null) {
|
||||
IndexRequest request = new IndexRequest(resolver.index(doc), resolver.type(doc), resolver.id(doc));
|
||||
request.source(resolver.source(doc, XContentType.SMILE));
|
||||
requestBuilder.add(request);
|
||||
|
||||
if (renderer == null) {
|
||||
logger.warn("local exporter [{}] - unable to render monitoring document of type [{}]: no renderer found in registry",
|
||||
name, marvelDoc.getType());
|
||||
continue;
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("local exporter [{}] - added index request [index={}, type={}, id={}]",
|
||||
name, request.index(), request.type(), request.id());
|
||||
}
|
||||
} else {
|
||||
logger.warn("local exporter [{}] - unable to render monitoring document of type [{}]: no renderer found in registry",
|
||||
name, doc);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn("local exporter [{}] - failed to add document [{}], skipping it", e, name, doc);
|
||||
}
|
||||
|
||||
IndexRequestBuilder request = client.prepareIndex();
|
||||
|
||||
// we need the index to be based on the document timestamp and/or template version
|
||||
request.setIndex(indexNameResolver.resolve(marvelDoc));
|
||||
|
||||
if (marvelDoc.getType() != null) {
|
||||
request.setType(marvelDoc.getType());
|
||||
}
|
||||
if (marvelDoc.getId() != null) {
|
||||
request.setId(marvelDoc.getId());
|
||||
}
|
||||
|
||||
if (buffer == null) {
|
||||
buffer = new BytesStreamOutput();
|
||||
} else {
|
||||
buffer.reset();
|
||||
}
|
||||
|
||||
renderer.render(marvelDoc, XContentType.SMILE, buffer);
|
||||
request.setSource(buffer.bytes().toBytes());
|
||||
|
||||
requestBuilder.add(request);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@ -108,9 +87,6 @@ public class LocalBulk extends ExportBulk {
|
|||
}
|
||||
} finally {
|
||||
requestBuilder = null;
|
||||
if (buffer != null) {
|
||||
buffer.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -118,7 +94,6 @@ public class LocalBulk extends ExportBulk {
|
|||
state.set(State.TERMINATING);
|
||||
synchronized (this) {
|
||||
requestBuilder = null;
|
||||
buffer = null;
|
||||
state.compareAndSet(State.TERMINATING, State.TERMINATED);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.marvel.agent.exporter.local;
|
|||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
|
@ -25,10 +26,13 @@ import org.elasticsearch.common.regex.Regex;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.exporter.ExportBulk;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporter;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererRegistry;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.ResolversRegistry;
|
||||
import org.elasticsearch.marvel.cleaner.CleanerService;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -36,6 +40,8 @@ import org.joda.time.DateTimeZone;
|
|||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString;
|
||||
|
||||
|
@ -48,7 +54,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
|
||||
private final Client client;
|
||||
private final ClusterService clusterService;
|
||||
private final RendererRegistry renderers;
|
||||
private final ResolversRegistry resolvers;
|
||||
private final CleanerService cleanerService;
|
||||
|
||||
private volatile LocalBulk bulk;
|
||||
|
@ -57,12 +63,10 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
/** Version number of built-in templates **/
|
||||
private final Integer templateVersion;
|
||||
|
||||
public LocalExporter(Exporter.Config config, Client client, ClusterService clusterService, RendererRegistry renderers,
|
||||
CleanerService cleanerService) {
|
||||
public LocalExporter(Exporter.Config config, Client client, ClusterService clusterService, CleanerService cleanerService) {
|
||||
super(TYPE, config);
|
||||
this.client = client;
|
||||
this.clusterService = clusterService;
|
||||
this.renderers = renderers;
|
||||
this.cleanerService = cleanerService;
|
||||
|
||||
// Loads the current version number of built-in templates
|
||||
|
@ -71,6 +75,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
throw new IllegalStateException("unable to find built-in template version");
|
||||
}
|
||||
|
||||
resolvers = new ResolversRegistry(config.settings());
|
||||
bulk = resolveBulk(clusterService.state(), bulk);
|
||||
clusterService.add(this);
|
||||
cleanerService.add(this);
|
||||
|
@ -80,6 +85,10 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
return bulk;
|
||||
}
|
||||
|
||||
ResolversRegistry getResolvers() {
|
||||
return resolvers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clusterChanged(ClusterChangedEvent event) {
|
||||
LocalBulk currentBulk = bulk;
|
||||
|
@ -152,7 +161,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
|
||||
// ok.. we have a compatible template... we can start
|
||||
logger.debug("local exporter [{}] - started!", name());
|
||||
return currentBulk != null ? currentBulk : new LocalBulk(name(), logger, client, indexNameResolver, renderers);
|
||||
return currentBulk != null ? currentBulk : new LocalBulk(name(), logger, client, resolvers);
|
||||
}
|
||||
|
||||
// we are on master
|
||||
|
@ -182,7 +191,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
}
|
||||
|
||||
// ok.. we have a compatible templates... we can start
|
||||
return currentBulk != null ? currentBulk : new LocalBulk(name(), logger, client, indexNameResolver, renderers);
|
||||
return currentBulk != null ? currentBulk : new LocalBulk(name(), logger, client, resolvers);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -266,22 +275,33 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
ClusterState clusterState = clusterService.state();
|
||||
if (clusterState != null) {
|
||||
long expirationTime = expiration.getMillis();
|
||||
Set<String> indices = new HashSet<>();
|
||||
|
||||
// Only clean up indices that match the timestamped index pattern
|
||||
String pattern = indexNameResolver().indexPattern();
|
||||
// Get the list of monitoring index patterns
|
||||
Set<String> monitoringPatterns = new HashSet<>();
|
||||
for (MonitoringIndexNameResolver resolver : getResolvers()) {
|
||||
monitoringPatterns.add(resolver.indexPattern());
|
||||
}
|
||||
|
||||
String[] patterns = StreamSupport.stream(getResolvers().spliterator(), false)
|
||||
.map(MonitoringIndexNameResolver::indexPattern)
|
||||
.toArray(String[]::new);
|
||||
|
||||
MonitoringDoc monitoringDoc = new MonitoringDoc(MonitoringIds.ES.getId(), Version.CURRENT.toString());
|
||||
monitoringDoc.setTimestamp(System.currentTimeMillis());
|
||||
|
||||
// Get the names of the current monitoring indices
|
||||
Set<String> currents = StreamSupport.stream(getResolvers().spliterator(), false)
|
||||
.map(r -> r.index(monitoringDoc))
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Set<String> indices = new HashSet<>();
|
||||
for (ObjectObjectCursor<String, IndexMetaData> index : clusterState.getMetaData().indices()) {
|
||||
String indexName = index.key;
|
||||
|
||||
if (Regex.simpleMatch(pattern, indexName)) {
|
||||
// Should not happen, but just in case... we never delete the data indices
|
||||
if (indexName.startsWith(MarvelSettings.MONITORING_DATA_INDEX_PREFIX)
|
||||
|| indexName.equalsIgnoreCase(MarvelSettings.LEGACY_DATA_INDEX_NAME)) {
|
||||
continue;
|
||||
}
|
||||
if (Regex.simpleMatch(patterns, indexName)) {
|
||||
|
||||
// Never delete the current timestamped index
|
||||
if (indexName.equals(indexNameResolver().resolve(System.currentTimeMillis()))) {
|
||||
// Never delete the data index or a current index
|
||||
if (currents.contains(indexName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -329,22 +349,20 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
public static class Factory extends Exporter.Factory<LocalExporter> {
|
||||
|
||||
private final InternalClient client;
|
||||
private final RendererRegistry registry;
|
||||
private final ClusterService clusterService;
|
||||
private final CleanerService cleanerService;
|
||||
|
||||
@Inject
|
||||
public Factory(InternalClient client, ClusterService clusterService, RendererRegistry registry, CleanerService cleanerService) {
|
||||
public Factory(InternalClient client, ClusterService clusterService, CleanerService cleanerService) {
|
||||
super(TYPE, true);
|
||||
this.client = client;
|
||||
this.clusterService = clusterService;
|
||||
this.registry = registry;
|
||||
this.cleanerService = cleanerService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LocalExporter create(Config config) {
|
||||
return new LocalExporter(config, client, clusterService, registry, cleanerService);
|
||||
return new LocalExporter(config, client, clusterService, cleanerService);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,90 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer;
|
||||
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Renders Monitoring documents using a XContentBuilder (potentially filtered)
|
||||
*/
|
||||
public abstract class AbstractRenderer<T extends MarvelDoc> implements Renderer<T> {
|
||||
|
||||
private static final String[] DEFAULT_FILTERS = {
|
||||
Fields.CLUSTER_UUID.underscore().toString(),
|
||||
Fields.TIMESTAMP.underscore().toString(),
|
||||
Fields.SOURCE_NODE.underscore().toString(),
|
||||
};
|
||||
|
||||
private final String[] filters;
|
||||
|
||||
public AbstractRenderer(String[] filters, boolean additive) {
|
||||
if (!additive) {
|
||||
this.filters = filters;
|
||||
} else {
|
||||
if (CollectionUtils.isEmpty(filters)) {
|
||||
this.filters = DEFAULT_FILTERS;
|
||||
} else {
|
||||
Set<String> additions = new HashSet<>();
|
||||
Collections.addAll(additions, DEFAULT_FILTERS);
|
||||
Collections.addAll(additions, filters);
|
||||
this.filters = additions.toArray(new String[additions.size()]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void render(T marvelDoc, XContentType xContentType, OutputStream os) throws IOException {
|
||||
if (marvelDoc != null) {
|
||||
try (XContentBuilder builder = new XContentBuilder(xContentType.xContent(), os, filters())) {
|
||||
builder.startObject();
|
||||
|
||||
// Add fields common to all Monitoring documents
|
||||
builder.field(Fields.CLUSTER_UUID, marvelDoc.getClusterUUID());
|
||||
DateTime timestampDateTime = new DateTime(marvelDoc.getTimestamp(), DateTimeZone.UTC);
|
||||
builder.field(Fields.TIMESTAMP, timestampDateTime.toString());
|
||||
|
||||
MarvelDoc.Node sourceNode = marvelDoc.getSourceNode();
|
||||
if (sourceNode != null) {
|
||||
builder.field(Fields.SOURCE_NODE, sourceNode);
|
||||
}
|
||||
|
||||
// Render fields specific to the Monitoring document
|
||||
doRender(marvelDoc, builder, ToXContent.EMPTY_PARAMS);
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void doRender(T marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException;
|
||||
|
||||
/**
|
||||
* Returns the list of filters used when rendering the document. If null,
|
||||
* no filtering is applied.
|
||||
*/
|
||||
protected String[] filters() {
|
||||
return filters;
|
||||
}
|
||||
|
||||
public static final class Fields {
|
||||
public static final XContentBuilderString CLUSTER_UUID = new XContentBuilderString("cluster_uuid");
|
||||
public static final XContentBuilderString TIMESTAMP = new XContentBuilderString("timestamp");
|
||||
public static final XContentBuilderString SOURCE_NODE = new XContentBuilderString("source_node");
|
||||
}
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* {@code Renderer}s are used to render documents using a given OutputStream.
|
||||
* <p>
|
||||
* Each {@code Renderer} can be thought of as a generator of a unique document <em>type</em> within the resulting ES index. For example,
|
||||
* there will be details about shards, which requires a unique document type and there will also be details about indices, which requires
|
||||
* their own unique documents.
|
||||
*
|
||||
* @see AbstractRenderer
|
||||
*/
|
||||
public interface Renderer<T> {
|
||||
/**
|
||||
* Convert the given {@code document} type into something that can be sent to the monitoring cluster.
|
||||
*
|
||||
* @param document The arbitrary document (e.g., details about a shard)
|
||||
* @param xContentType The rendered content type (e.g., JSON)
|
||||
* @param os The buffer
|
||||
* @throws IOException if any unexpected error occurs
|
||||
*/
|
||||
void render(T document, XContentType xContentType, OutputStream os) throws IOException;
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterInfoMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateNodeMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.DiscoveryNodeMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.shards.ShardMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterInfoRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStateNodeRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStateRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.cluster.ClusterStatsRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.cluster.DiscoveryNodeRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.indices.IndexRecoveryRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.indices.IndexStatsRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.indices.IndicesStatsRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.node.NodeStatsRenderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.shards.ShardsRenderer;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class RendererModule extends AbstractModule {
|
||||
|
||||
protected void configure() {
|
||||
Map<Class<? extends MarvelDoc>, Renderer> renderers = new HashMap<>();
|
||||
renderers.put(IndicesStatsMarvelDoc.class, new IndicesStatsRenderer());
|
||||
renderers.put(IndexStatsMarvelDoc.class, new IndexStatsRenderer());
|
||||
renderers.put(ClusterInfoMarvelDoc.class, new ClusterInfoRenderer());
|
||||
renderers.put(ClusterStatsMarvelDoc.class, new ClusterStatsRenderer());
|
||||
renderers.put(ClusterStateMarvelDoc.class, new ClusterStateRenderer());
|
||||
renderers.put(ClusterStateNodeMarvelDoc.class, new ClusterStateNodeRenderer());
|
||||
renderers.put(DiscoveryNodeMarvelDoc.class, new DiscoveryNodeRenderer());
|
||||
renderers.put(ShardMarvelDoc.class, new ShardsRenderer());
|
||||
renderers.put(NodeStatsMarvelDoc.class, new NodeStatsRenderer());
|
||||
renderers.put(IndexRecoveryMarvelDoc.class, new IndexRecoveryRenderer());
|
||||
|
||||
RendererRegistry registry = new RendererRegistry(renderers);
|
||||
bind(RendererRegistry.class).toInstance(registry);
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
public class RendererRegistry {
|
||||
|
||||
private final Map<Class<? extends MarvelDoc>, Renderer> renderers;
|
||||
|
||||
@Inject
|
||||
public RendererRegistry(Map<Class<? extends MarvelDoc>, Renderer> renderers) {
|
||||
this.renderers = Collections.unmodifiableMap(renderers);
|
||||
}
|
||||
|
||||
public Renderer getRenderer(MarvelDoc marvelDoc) {
|
||||
return renderers.get(marvelDoc.getClass());
|
||||
}
|
||||
}
|
|
@ -1,35 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateNodeMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ClusterStateNodeRenderer extends AbstractRenderer<ClusterStateNodeMarvelDoc> {
|
||||
|
||||
public ClusterStateNodeRenderer() {
|
||||
super(null, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(ClusterStateNodeMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.field(Fields.STATE_UUID, marvelDoc.getStateUUID());
|
||||
builder.startObject(Fields.NODE);
|
||||
builder.field(Fields.ID, marvelDoc.getNodeId());
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString STATE_UUID = new XContentBuilderString("state_uuid");
|
||||
static final XContentBuilderString NODE = new XContentBuilderString("node");
|
||||
static final XContentBuilderString ID = new XContentBuilderString("id");
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public class ClusterStateRenderer extends AbstractRenderer<ClusterStateMarvelDoc> {
|
||||
|
||||
public static final String[] FILTERS = {
|
||||
"cluster_state.version",
|
||||
"cluster_state.master_node",
|
||||
"cluster_state.state_uuid",
|
||||
"cluster_state.status",
|
||||
"cluster_state.nodes",
|
||||
};
|
||||
|
||||
public ClusterStateRenderer() {
|
||||
super(FILTERS, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(ClusterStateMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.CLUSTER_STATE);
|
||||
|
||||
ClusterState clusterState = marvelDoc.getClusterState();
|
||||
if (clusterState != null) {
|
||||
builder.field(Fields.STATUS, marvelDoc.getStatus().name().toLowerCase(Locale.ROOT));
|
||||
clusterState.toXContent(builder, params);
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString CLUSTER_STATE = new XContentBuilderString("cluster_state");
|
||||
static final XContentBuilderString STATUS = new XContentBuilderString("status");
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.shards;
|
||||
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.shards.ShardMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ShardsRenderer extends AbstractRenderer<ShardMarvelDoc> {
|
||||
|
||||
public static final String[] FILTERS = {
|
||||
"state_uuid",
|
||||
"shard.state",
|
||||
"shard.primary",
|
||||
"shard.node",
|
||||
"shard.relocating_node",
|
||||
"shard.shard",
|
||||
"shard.index",
|
||||
};
|
||||
|
||||
public ShardsRenderer() {
|
||||
super(FILTERS, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(ShardMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.field(Fields.STATE_UUID, marvelDoc.getClusterStateUUID());
|
||||
|
||||
ShardRouting shardRouting = marvelDoc.getShardRouting();
|
||||
if (shardRouting != null) {
|
||||
// ShardRouting is rendered inside a startObject() / endObject() but without a name,
|
||||
// so we must use XContentBuilder.field(String, ToXContent, ToXContent.Params) here
|
||||
builder.field(Fields.SHARD.underscore().toString(), shardRouting, params);
|
||||
}
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString SHARD = new XContentBuilderString("shard");
|
||||
static final XContentBuilderString STATE_UUID = new XContentBuilderString("state_uuid");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,192 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver;
|
||||
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* MonitoringIndexNameResolver are used to resolve the index name, type name
|
||||
* and id of a {@link MonitoringDoc}.
|
||||
*/
|
||||
public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
|
||||
|
||||
public static final String PREFIX = ".monitoring";
|
||||
public static final String DELIMITER = "-";
|
||||
|
||||
private final int version;
|
||||
|
||||
protected MonitoringIndexNameResolver(int version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the index in which the monitoring document must be indexed.
|
||||
*
|
||||
* @param document the monitoring document
|
||||
* @return the name of the index
|
||||
*/
|
||||
public abstract String index(T document);
|
||||
|
||||
/**
|
||||
* Returns the generic part of the index name (ie without any dynamic part) that can be
|
||||
* used to match indices names.
|
||||
*
|
||||
* @return the index pattern
|
||||
*/
|
||||
public abstract String indexPattern();
|
||||
|
||||
/**
|
||||
* Returns the document type under which the monitoring document must be indexed.
|
||||
*
|
||||
* @param document the monitoring document
|
||||
* @return the type of the document
|
||||
*/
|
||||
public abstract String type(T document);
|
||||
|
||||
/**
|
||||
* Returns the document id under which the monitoring document must be indexed.
|
||||
*
|
||||
* @param document the monitoring document
|
||||
* @return the id of the document
|
||||
*/
|
||||
public abstract String id(T document);
|
||||
|
||||
/**
|
||||
* Builds the source of the document in a given XContentType
|
||||
*
|
||||
* @param document the monitoring document
|
||||
* @param xContentType the content type
|
||||
* @return the name of the index
|
||||
*/
|
||||
public BytesReference source(T document, XContentType xContentType) throws IOException {
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
try (XContentBuilder builder = new XContentBuilder(xContentType.xContent(), out, filters())) {
|
||||
builder.startObject();
|
||||
|
||||
builder.field(Fields.CLUSTER_UUID, document.getClusterUUID());
|
||||
DateTime timestampDateTime = new DateTime(document.getTimestamp(), DateTimeZone.UTC);
|
||||
builder.field(Fields.TIMESTAMP, timestampDateTime.toString());
|
||||
|
||||
MonitoringDoc.Node sourceNode = document.getSourceNode();
|
||||
if (sourceNode != null) {
|
||||
builder.field(Fields.SOURCE_NODE, sourceNode);
|
||||
}
|
||||
|
||||
buildXContent(document, builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
}
|
||||
return out.bytes();
|
||||
}
|
||||
}
|
||||
|
||||
int getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the filters used when rendering the document.
|
||||
* If null or empty, no filtering is applied.
|
||||
*/
|
||||
public String[] filters() {
|
||||
// No filtering by default
|
||||
return null;
|
||||
}
|
||||
|
||||
protected abstract void buildXContent(T document, XContentBuilder builder, ToXContent.Params params) throws IOException;
|
||||
|
||||
public static final class Fields {
|
||||
public static final XContentBuilderString CLUSTER_UUID = new XContentBuilderString("cluster_uuid");
|
||||
public static final XContentBuilderString TIMESTAMP = new XContentBuilderString("timestamp");
|
||||
public static final XContentBuilderString SOURCE_NODE = new XContentBuilderString("source_node");
|
||||
}
|
||||
|
||||
/**
|
||||
* Data index name resolvers are used used to index documents in
|
||||
* the monitoring data index (.monitoring-data-{VERSION})
|
||||
*/
|
||||
public static abstract class Data<T extends MonitoringDoc> extends MonitoringIndexNameResolver<T> {
|
||||
|
||||
public static final String DATA = "data";
|
||||
|
||||
private final String index;
|
||||
|
||||
public Data(int version) {
|
||||
super(version);
|
||||
this.index = String.join(DELIMITER, PREFIX, DATA, String.valueOf(version));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String index(T document) {
|
||||
return index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String indexPattern() {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Timestamped index name resolvers are used used to index documents in
|
||||
* a timestamped index (.monitoring-{ID}-{VERSION}-YYYY.MM.dd)
|
||||
*/
|
||||
public static abstract class Timestamped<T extends MonitoringDoc> extends MonitoringIndexNameResolver<T> {
|
||||
|
||||
public static final Setting<String> INDEX_NAME_TIME_FORMAT_SETTING = new Setting<>("index.name.time_format","YYYY.MM.dd",
|
||||
Function.identity(), false, Setting.Scope.CLUSTER);
|
||||
|
||||
private final String id;
|
||||
private final DateTimeFormatter formatter;
|
||||
|
||||
public Timestamped(String id, int version, Settings settings) {
|
||||
super(version);
|
||||
this.id = id;
|
||||
String format = INDEX_NAME_TIME_FORMAT_SETTING.get(settings);
|
||||
try {
|
||||
this.formatter = DateTimeFormat.forPattern(format).withZoneUTC();
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new SettingsException("invalid index name time format [" + format + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String index(T document) {
|
||||
return String.join(DELIMITER, PREFIX, id, String.valueOf(getVersion()), formatter.print(document.getTimestamp()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String indexPattern() {
|
||||
return String.join(DELIMITER, PREFIX, "*", String.valueOf(getVersion()), "*");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id(T document) {
|
||||
// Documents in timestamped indices are usually indexed with auto-generated ids
|
||||
return null;
|
||||
}
|
||||
|
||||
String getId() {
|
||||
return id;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,125 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterInfoMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateNodeMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.DiscoveryNodeMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.shards.ShardMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.resolver.cluster.ClusterInfoResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.cluster.ClusterStateNodeResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.cluster.ClusterStateResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.cluster.ClusterStatsResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.cluster.DiscoveryNodeResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.indices.IndexRecoveryResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.indices.IndexStatsResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.indices.IndicesStatsResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.node.NodeStatsResolver;
|
||||
import org.elasticsearch.marvel.agent.resolver.shards.ShardsResolver;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver> {
|
||||
|
||||
private List<Registration> registrations;
|
||||
|
||||
public ResolversRegistry(Settings settings) {
|
||||
List<Registration> resolvers = new ArrayList<>();
|
||||
registerElasticsearchResolvers(resolvers, settings);
|
||||
this.registrations = Collections.unmodifiableList(resolvers);
|
||||
}
|
||||
|
||||
/** Registers default Elasticsearch resolvers used by the Marvel plugin. **/
|
||||
private void registerElasticsearchResolvers(List<Registration> resolvers, Settings settings) {
|
||||
Map<Class<? extends MonitoringDoc>, MonitoringIndexNameResolver<? extends MonitoringDoc>> map = new HashMap<>();
|
||||
map.put(ClusterInfoMonitoringDoc.class, new ClusterInfoResolver());
|
||||
map.put(ClusterStateNodeMonitoringDoc.class, new ClusterStateNodeResolver(settings));
|
||||
map.put(ClusterStateMonitoringDoc.class, new ClusterStateResolver(settings));
|
||||
map.put(ClusterStatsMonitoringDoc.class, new ClusterStatsResolver(settings));
|
||||
map.put(DiscoveryNodeMonitoringDoc.class, new DiscoveryNodeResolver());
|
||||
map.put(IndexRecoveryMonitoringDoc.class, new IndexRecoveryResolver(settings));
|
||||
map.put(IndexStatsMonitoringDoc.class, new IndexStatsResolver(settings));
|
||||
map.put(IndicesStatsMonitoringDoc.class, new IndicesStatsResolver(settings));
|
||||
map.put(NodeStatsMonitoringDoc.class, new NodeStatsResolver(settings));
|
||||
map.put(ShardMonitoringDoc.class, new ShardsResolver(settings));
|
||||
|
||||
for (Map.Entry<Class<? extends MonitoringDoc>, MonitoringIndexNameResolver<? extends MonitoringDoc>> resolver : map.entrySet()) {
|
||||
resolvers.add(new Registration(MonitoringIds.ES.getId(), Version.CURRENT.toString(), resolver.getKey(), resolver.getValue()));
|
||||
}
|
||||
}
|
||||
|
||||
public <T extends MonitoringDoc> MonitoringIndexNameResolver<MonitoringDoc> getResolver(T document) {
|
||||
return getResolver(document.getClass(), document.getMonitoringId(), document.getMonitoringVersion());
|
||||
}
|
||||
|
||||
<T extends MonitoringDoc> MonitoringIndexNameResolver<T> getResolver(Class<? extends MonitoringDoc> clazz, String id, String version) {
|
||||
for (Registration registration : registrations) {
|
||||
if (registration.support(id, version, clazz)) {
|
||||
return (MonitoringIndexNameResolver<T>) registration.getResolver();
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("No resolver found for monitoring document [class=" + clazz.getName()
|
||||
+ ", id=" + id + ", version=" + version + "]");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<MonitoringIndexNameResolver> iterator() {
|
||||
List<MonitoringIndexNameResolver> resolvers = new ArrayList<>(registrations.size());
|
||||
for (Registration registration : registrations) {
|
||||
resolvers.add(registration.getResolver());
|
||||
}
|
||||
return Collections.unmodifiableList(resolvers).iterator();
|
||||
}
|
||||
|
||||
private static class Registration {
|
||||
|
||||
private final String id;
|
||||
private final String version;
|
||||
private final Class<? extends MonitoringDoc> clazz;
|
||||
private final MonitoringIndexNameResolver<? extends MonitoringDoc> resolver;
|
||||
|
||||
private Registration(String id, String version, Class<? extends MonitoringDoc> clazz,
|
||||
MonitoringIndexNameResolver<? extends MonitoringDoc> resolver) {
|
||||
this.id = Objects.requireNonNull(id);
|
||||
this.version = Objects.requireNonNull(version);
|
||||
this.clazz = Objects.requireNonNull(clazz);
|
||||
this.resolver = Objects.requireNonNull(resolver);
|
||||
}
|
||||
|
||||
public MonitoringIndexNameResolver<? extends MonitoringDoc> getResolver() {
|
||||
return resolver;
|
||||
}
|
||||
|
||||
public boolean support(String id, String version, Class<? extends MonitoringDoc> documentClass) {
|
||||
return clazz.equals(documentClass) && this.id.equals(id) && this.version.equals(version);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "monitoring resolver [" + resolver.getClass().getName()
|
||||
+ "] registered for [class=" + clazz.getName() +
|
||||
", id=" + id +
|
||||
", version=" + version +
|
||||
"]";
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
|
@ -12,24 +12,38 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterInfoMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterInfoMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Map;
|
||||
|
||||
public class ClusterInfoRenderer extends AbstractRenderer<ClusterInfoMarvelDoc> {
|
||||
public ClusterInfoRenderer() {
|
||||
super(null, false);
|
||||
public class ClusterInfoResolver extends MonitoringIndexNameResolver.Data<ClusterInfoMonitoringDoc> {
|
||||
|
||||
public static final String TYPE = "cluster_info";
|
||||
|
||||
public ClusterInfoResolver() {
|
||||
super(MarvelTemplateUtils.TEMPLATE_VERSION);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(ClusterInfoMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.field(Fields.CLUSTER_NAME, marvelDoc.getClusterName());
|
||||
builder.field(Fields.VERSION, marvelDoc.getVersion());
|
||||
public String type(ClusterInfoMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
License license = marvelDoc.getLicense();
|
||||
@Override
|
||||
public String id(ClusterInfoMonitoringDoc document) {
|
||||
return document.getClusterUUID();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(ClusterInfoMonitoringDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.field(Fields.CLUSTER_NAME, document.getClusterName());
|
||||
builder.field(Fields.VERSION, document.getVersion());
|
||||
|
||||
License license = document.getLicense();
|
||||
if (license != null) {
|
||||
builder.startObject(Fields.LICENSE);
|
||||
Map<String, String> extraParams = new MapBuilder<String, String>()
|
||||
|
@ -37,12 +51,12 @@ public class ClusterInfoRenderer extends AbstractRenderer<ClusterInfoMarvelDoc>
|
|||
.map();
|
||||
params = new ToXContent.DelegatingMapParams(extraParams, params);
|
||||
license.toInnerXContent(builder, params);
|
||||
builder.field(Fields.HKEY, hash(license, marvelDoc.getClusterUUID()));
|
||||
builder.field(Fields.HKEY, hash(license, document.getClusterUUID()));
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
builder.startObject(Fields.CLUSTER_STATS);
|
||||
ClusterStatsResponse clusterStats = marvelDoc.getClusterStats();
|
||||
ClusterStatsResponse clusterStats = document.getClusterStats();
|
||||
if (clusterStats != null) {
|
||||
clusterStats.toXContent(builder, params);
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateNodeMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ClusterStateNodeResolver extends MonitoringIndexNameResolver.Timestamped<ClusterStateNodeMonitoringDoc> {
|
||||
|
||||
public static final String TYPE = "node";
|
||||
|
||||
public ClusterStateNodeResolver(Settings settings) {
|
||||
super(MonitoringIds.ES.getId(), MarvelTemplateUtils.TEMPLATE_VERSION, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type(ClusterStateNodeMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(ClusterStateNodeMonitoringDoc document,
|
||||
XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.field(Fields.STATE_UUID, document.getStateUUID());
|
||||
builder.startObject(Fields.NODE);
|
||||
builder.field(Fields.ID, document.getNodeId());
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString STATE_UUID = new XContentBuilderString("state_uuid");
|
||||
static final XContentBuilderString NODE = new XContentBuilderString("node");
|
||||
static final XContentBuilderString ID = new XContentBuilderString("id");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public class ClusterStateResolver extends MonitoringIndexNameResolver.Timestamped<ClusterStateMonitoringDoc> {
|
||||
|
||||
public static final String TYPE = "cluster_state";
|
||||
|
||||
private static final String[] FILTERS = {
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"cluster_state.version",
|
||||
"cluster_state.master_node",
|
||||
"cluster_state.state_uuid",
|
||||
"cluster_state.status",
|
||||
"cluster_state.nodes",
|
||||
};
|
||||
|
||||
public ClusterStateResolver(Settings settings) {
|
||||
super(MonitoringIds.ES.getId(), MarvelTemplateUtils.TEMPLATE_VERSION, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type(ClusterStateMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] filters() {
|
||||
return FILTERS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(ClusterStateMonitoringDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.CLUSTER_STATE);
|
||||
ClusterState clusterState = document.getClusterState();
|
||||
if (clusterState != null) {
|
||||
builder.field(Fields.STATUS, document.getStatus().name().toLowerCase(Locale.ROOT));
|
||||
clusterState.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString CLUSTER_STATE = new XContentBuilderString(TYPE);
|
||||
static final XContentBuilderString STATUS = new XContentBuilderString("status");
|
||||
}
|
||||
}
|
|
@ -3,20 +3,28 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ClusterStatsRenderer extends AbstractRenderer<ClusterStatsMarvelDoc> {
|
||||
public class ClusterStatsResolver extends MonitoringIndexNameResolver.Timestamped<ClusterStatsMonitoringDoc> {
|
||||
|
||||
public static final String[] FILTERS = {
|
||||
public static final String TYPE = "cluster_stats";
|
||||
|
||||
private static final String[] FILTERS = {
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"cluster_stats.nodes.count.total",
|
||||
"cluster_stats.indices.count",
|
||||
"cluster_stats.indices.shards.total",
|
||||
|
@ -32,23 +40,31 @@ public class ClusterStatsRenderer extends AbstractRenderer<ClusterStatsMarvelDoc
|
|||
"cluster_stats.nodes.versions",
|
||||
};
|
||||
|
||||
public ClusterStatsRenderer() {
|
||||
super(FILTERS, true);
|
||||
public ClusterStatsResolver(Settings settings) {
|
||||
super(MonitoringIds.ES.getId(), MarvelTemplateUtils.TEMPLATE_VERSION, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(ClusterStatsMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.CLUSTER_STATS);
|
||||
public String type(ClusterStatsMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
ClusterStatsResponse clusterStats = marvelDoc.getClusterStats();
|
||||
@Override
|
||||
public String[] filters() {
|
||||
return FILTERS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(ClusterStatsMonitoringDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.CLUSTER_STATS);
|
||||
ClusterStatsResponse clusterStats = document.getClusterStats();
|
||||
if (clusterStats != null) {
|
||||
clusterStats.toXContent(builder, params);
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString CLUSTER_STATS = new XContentBuilderString("cluster_stats");
|
||||
static final XContentBuilderString CLUSTER_STATS = new XContentBuilderString(TYPE);
|
||||
}
|
||||
}
|
|
@ -3,29 +3,43 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.DiscoveryNodeMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.DiscoveryNodeMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DiscoveryNodeRenderer extends AbstractRenderer<DiscoveryNodeMarvelDoc> {
|
||||
public class DiscoveryNodeResolver extends MonitoringIndexNameResolver.Data<DiscoveryNodeMonitoringDoc> {
|
||||
|
||||
public DiscoveryNodeRenderer() {
|
||||
super(null, false);
|
||||
public static final String TYPE = "node";
|
||||
|
||||
public DiscoveryNodeResolver() {
|
||||
super(MarvelTemplateUtils.TEMPLATE_VERSION);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(DiscoveryNodeMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
public String type(DiscoveryNodeMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id(DiscoveryNodeMonitoringDoc document) {
|
||||
return document.getNode().getId();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(DiscoveryNodeMonitoringDoc document,
|
||||
XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.NODE);
|
||||
|
||||
DiscoveryNode node = marvelDoc.getNode();
|
||||
DiscoveryNode node = document.getNode();
|
||||
if (node != null) {
|
||||
builder.field(Fields.NAME, node.getName());
|
||||
builder.field(Fields.TRANSPORT_ADDRESS, node.getAddress().toString());
|
||||
|
@ -42,13 +56,10 @@ public class DiscoveryNodeRenderer extends AbstractRenderer<DiscoveryNodeMarvelD
|
|||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString NODE = new XContentBuilderString("node");
|
||||
static final XContentBuilderString NODE = new XContentBuilderString(TYPE);
|
||||
static final XContentBuilderString NAME = new XContentBuilderString("name");
|
||||
static final XContentBuilderString TRANSPORT_ADDRESS = new XContentBuilderString("transport_address");
|
||||
static final XContentBuilderString ATTRIBUTES = new XContentBuilderString("attributes");
|
||||
static final XContentBuilderString ID = new XContentBuilderString("id");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -3,31 +3,42 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.indices;
|
||||
package org.elasticsearch.marvel.agent.resolver.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class IndexRecoveryRenderer extends AbstractRenderer<IndexRecoveryMarvelDoc> {
|
||||
public class IndexRecoveryResolver extends MonitoringIndexNameResolver.Timestamped<IndexRecoveryMonitoringDoc> {
|
||||
|
||||
public IndexRecoveryRenderer() {
|
||||
super(null, false);
|
||||
public static final String TYPE = "index_recovery";
|
||||
|
||||
public IndexRecoveryResolver(Settings settings) {
|
||||
super(MonitoringIds.ES.getId(), MarvelTemplateUtils.TEMPLATE_VERSION, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(IndexRecoveryMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
public String type(IndexRecoveryMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(IndexRecoveryMonitoringDoc document,
|
||||
XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.INDEX_RECOVERY);
|
||||
|
||||
RecoveryResponse recovery = marvelDoc.getRecoveryResponse();
|
||||
RecoveryResponse recovery = document.getRecoveryResponse();
|
||||
if (recovery != null) {
|
||||
builder.startArray(Fields.SHARDS);
|
||||
Map<String, List<RecoveryState>> shards = recovery.shardRecoveryStates();
|
||||
|
@ -52,7 +63,7 @@ public class IndexRecoveryRenderer extends AbstractRenderer<IndexRecoveryMarvelD
|
|||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString INDEX_RECOVERY = new XContentBuilderString("index_recovery");
|
||||
static final XContentBuilderString INDEX_RECOVERY = new XContentBuilderString(TYPE);
|
||||
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
|
||||
static final XContentBuilderString INDEX_NAME = new XContentBuilderString("index_name");
|
||||
}
|
|
@ -3,20 +3,28 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.indices;
|
||||
package org.elasticsearch.marvel.agent.resolver.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class IndexStatsRenderer extends AbstractRenderer<IndexStatsMarvelDoc> {
|
||||
public class IndexStatsResolver extends MonitoringIndexNameResolver.Timestamped<IndexStatsMonitoringDoc> {
|
||||
|
||||
public static final String[] FILTERS = {
|
||||
public static final String TYPE = "index_stats";
|
||||
|
||||
private static final String[] FILTERS = {
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"index_stats.index",
|
||||
"index_stats.primaries.docs.count",
|
||||
"index_stats.primaries.fielddata.memory_size_in_bytes",
|
||||
|
@ -44,15 +52,25 @@ public class IndexStatsRenderer extends AbstractRenderer<IndexStatsMarvelDoc> {
|
|||
"index_stats.total.refresh.total_time_in_millis",
|
||||
};
|
||||
|
||||
public IndexStatsRenderer() {
|
||||
super(FILTERS, true);
|
||||
public IndexStatsResolver(Settings settings) {
|
||||
super(MonitoringIds.ES.getId(), MarvelTemplateUtils.TEMPLATE_VERSION, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(IndexStatsMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
public String type(IndexStatsMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] filters() {
|
||||
return FILTERS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(IndexStatsMonitoringDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.INDEX_STATS);
|
||||
|
||||
IndexStats indexStats = marvelDoc.getIndexStats();
|
||||
IndexStats indexStats = document.getIndexStats();
|
||||
if (indexStats != null) {
|
||||
builder.field(Fields.INDEX, indexStats.getIndex());
|
||||
|
||||
|
@ -73,7 +91,7 @@ public class IndexStatsRenderer extends AbstractRenderer<IndexStatsMarvelDoc> {
|
|||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString INDEX_STATS = new XContentBuilderString("index_stats");
|
||||
static final XContentBuilderString INDEX_STATS = new XContentBuilderString(TYPE);
|
||||
static final XContentBuilderString INDEX = new XContentBuilderString("index");
|
||||
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
|
||||
static final XContentBuilderString PRIMARIES = new XContentBuilderString("primaries");
|
|
@ -3,20 +3,28 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.indices;
|
||||
package org.elasticsearch.marvel.agent.resolver.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class IndicesStatsRenderer extends AbstractRenderer<IndicesStatsMarvelDoc> {
|
||||
public class IndicesStatsResolver extends MonitoringIndexNameResolver.Timestamped<IndicesStatsMonitoringDoc> {
|
||||
|
||||
public static final String[] FILTERS = {
|
||||
public static final String TYPE = "indices_stats";
|
||||
|
||||
private static final String[] FILTERS = {
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"indices_stats._all.primaries.docs.count",
|
||||
"indices_stats._all.primaries.indexing.index_time_in_millis",
|
||||
"indices_stats._all.primaries.indexing.index_total",
|
||||
|
@ -35,23 +43,31 @@ public class IndicesStatsRenderer extends AbstractRenderer<IndicesStatsMarvelDoc
|
|||
"indices_stats._all.total.store.size_in_bytes",
|
||||
};
|
||||
|
||||
public IndicesStatsRenderer() {
|
||||
super(FILTERS, true);
|
||||
public IndicesStatsResolver(Settings settings) {
|
||||
super(MonitoringIds.ES.getId(), MarvelTemplateUtils.TEMPLATE_VERSION, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(IndicesStatsMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.INDICES_STATS);
|
||||
public String type(IndicesStatsMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
IndicesStatsResponse indicesStats = marvelDoc.getIndicesStats();
|
||||
@Override
|
||||
public String[] filters() {
|
||||
return FILTERS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(IndicesStatsMonitoringDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.INDICES_STATS);
|
||||
IndicesStatsResponse indicesStats = document.getIndicesStats();
|
||||
if (indicesStats != null) {
|
||||
indicesStats.toXContent(builder, params);
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString INDICES_STATS = new XContentBuilderString("indices_stats");
|
||||
static final XContentBuilderString INDICES_STATS = new XContentBuilderString(TYPE);
|
||||
}
|
||||
}
|
|
@ -3,20 +3,29 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.node;
|
||||
package org.elasticsearch.marvel.agent.resolver.node;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class NodeStatsRenderer extends AbstractRenderer<NodeStatsMarvelDoc> {
|
||||
public class NodeStatsResolver extends MonitoringIndexNameResolver.Timestamped<NodeStatsMonitoringDoc> {
|
||||
|
||||
public static final String[] FILTERS = {
|
||||
public static final String TYPE = "node_stats";
|
||||
|
||||
private static final String[] FILTERS = {
|
||||
// Common information
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
// Extra information
|
||||
"node_stats.node_id",
|
||||
"node_stats.node_master",
|
||||
|
@ -53,21 +62,31 @@ public class NodeStatsRenderer extends AbstractRenderer<NodeStatsMarvelDoc> {
|
|||
"node_stats.thread_pool.bulk.rejected",
|
||||
};
|
||||
|
||||
public NodeStatsRenderer() {
|
||||
super(FILTERS, true);
|
||||
public NodeStatsResolver(Settings settings) {
|
||||
super(MonitoringIds.ES.getId(), MarvelTemplateUtils.TEMPLATE_VERSION, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRender(NodeStatsMarvelDoc marvelDoc, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
public String type(NodeStatsMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] filters() {
|
||||
return FILTERS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(NodeStatsMonitoringDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.NODE_STATS);
|
||||
|
||||
builder.field(Fields.NODE_ID, marvelDoc.getNodeId());
|
||||
builder.field(Fields.NODE_MASTER, marvelDoc.isNodeMaster());
|
||||
builder.field(Fields.MLOCKALL, marvelDoc.isMlockall());
|
||||
builder.field(Fields.DISK_THRESHOLD_ENABLED, marvelDoc.isDiskThresholdDeciderEnabled());
|
||||
builder.field(Fields.DISK_THRESHOLD_WATERMARK_HIGH, marvelDoc.getDiskThresholdWaterMarkHigh());
|
||||
builder.field(Fields.NODE_ID, document.getNodeId());
|
||||
builder.field(Fields.NODE_MASTER, document.isNodeMaster());
|
||||
builder.field(Fields.MLOCKALL, document.isMlockall());
|
||||
builder.field(Fields.DISK_THRESHOLD_ENABLED, document.isDiskThresholdDeciderEnabled());
|
||||
builder.field(Fields.DISK_THRESHOLD_WATERMARK_HIGH, document.getDiskThresholdWaterMarkHigh());
|
||||
|
||||
NodeStats nodeStats = marvelDoc.getNodeStats();
|
||||
NodeStats nodeStats = document.getNodeStats();
|
||||
if (nodeStats != null) {
|
||||
nodeStats.toXContent(builder, params);
|
||||
}
|
||||
|
@ -76,7 +95,7 @@ public class NodeStatsRenderer extends AbstractRenderer<NodeStatsMarvelDoc> {
|
|||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString NODE_STATS = new XContentBuilderString("node_stats");
|
||||
static final XContentBuilderString NODE_STATS = new XContentBuilderString(TYPE);
|
||||
static final XContentBuilderString NODE_ID = new XContentBuilderString("node_id");
|
||||
static final XContentBuilderString NODE_MASTER = new XContentBuilderString("node_master");
|
||||
static final XContentBuilderString MLOCKALL = new XContentBuilderString("mlockall");
|
||||
|
@ -84,6 +103,3 @@ public class NodeStatsRenderer extends AbstractRenderer<NodeStatsMarvelDoc> {
|
|||
static final XContentBuilderString DISK_THRESHOLD_WATERMARK_HIGH = new XContentBuilderString("disk_threshold_watermark_high");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,99 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.shards;
|
||||
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.shards.ShardMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ShardsResolver extends MonitoringIndexNameResolver.Timestamped<ShardMonitoringDoc> {
|
||||
|
||||
public static final String TYPE = "shards";
|
||||
|
||||
private static final String[] FILTERS = {
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"state_uuid",
|
||||
"shard.state",
|
||||
"shard.primary",
|
||||
"shard.node",
|
||||
"shard.relocating_node",
|
||||
"shard.shard",
|
||||
"shard.index",
|
||||
};
|
||||
|
||||
public ShardsResolver(Settings settings) {
|
||||
super(MonitoringIds.ES.getId(), MarvelTemplateUtils.TEMPLATE_VERSION, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type(ShardMonitoringDoc document) {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id(ShardMonitoringDoc document) {
|
||||
return id(document.getClusterStateUUID(), document.getShardRouting());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] filters() {
|
||||
return FILTERS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(ShardMonitoringDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.field(Fields.STATE_UUID, document.getClusterStateUUID());
|
||||
|
||||
ShardRouting shardRouting = document.getShardRouting();
|
||||
if (shardRouting != null) {
|
||||
// ShardRouting is rendered inside a startObject() / endObject() but without a name,
|
||||
// so we must use XContentBuilder.field(String, ToXContent, ToXContent.Params) here
|
||||
builder.field(Fields.SHARD.underscore().toString(), shardRouting, params);
|
||||
}
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString SHARD = new XContentBuilderString("shard");
|
||||
static final XContentBuilderString STATE_UUID = new XContentBuilderString("state_uuid");
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute an id that has the format:
|
||||
*
|
||||
* {state_uuid}:{node_id || '_na'}:{index}:{shard}:{'p' || 'r'}
|
||||
*/
|
||||
static String id(String stateUUID, ShardRouting shardRouting) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append(stateUUID);
|
||||
builder.append(':');
|
||||
if (shardRouting.assignedToNode()) {
|
||||
builder.append(shardRouting.currentNodeId());
|
||||
} else {
|
||||
builder.append("_na");
|
||||
}
|
||||
builder.append(':');
|
||||
builder.append(shardRouting.getIndexName());
|
||||
builder.append(':');
|
||||
builder.append(Integer.valueOf(shardRouting.id()));
|
||||
builder.append(':');
|
||||
if (shardRouting.primary()) {
|
||||
builder.append("p");
|
||||
} else {
|
||||
builder.append("r");
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"template": ".monitoring-es-data-${monitoring.template.version}",
|
||||
"template": ".monitoring-data-${monitoring.template.version}",
|
||||
"settings": {
|
||||
"index.xpack.version": "${project.version}",
|
||||
"index.number_of_shards": 1,
|
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.action.admin.indices.stats;
|
||||
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class IndicesStatsResponseTestUtils {
|
||||
|
||||
/**
|
||||
* Gives access to package private IndicesStatsResponse constructor for test purpose.
|
||||
**/
|
||||
public static IndicesStatsResponse newIndicesStatsResponse(ShardStats[] shards, int totalShards, int successfulShards,
|
||||
int failedShards, List<ShardOperationFailedException> shardFailures) {
|
||||
return new IndicesStatsResponse(shards, totalShards, successfulShards, failedShards, shardFailures);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.cluster.routing;
|
||||
|
||||
public class ShardRoutingTestUtils {
|
||||
|
||||
/**
|
||||
* Gives access to package private {@link ShardRouting#initialize(String, String, long)} method for test purpose.
|
||||
**/
|
||||
public static void initialize(ShardRouting shardRouting, String nodeId) {
|
||||
shardRouting.initialize(nodeId, null, -1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gives access to package private {@link ShardRouting#moveToStarted()} method for test purpose.
|
||||
**/
|
||||
public static void moveToStarted(ShardRouting shardRouting) {
|
||||
shardRouting.moveToStarted();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gives access to package private {@link ShardRouting#relocate(String, long)} method for test purpose.
|
||||
**/
|
||||
public static void relocate(ShardRouting shardRouting, String nodeId) {
|
||||
shardRouting.relocate(nodeId, -1);
|
||||
}
|
||||
}
|
|
@ -35,6 +35,6 @@ public class MarvelPluginClientTests extends ESTestCase {
|
|||
Marvel plugin = new Marvel(settings);
|
||||
assertThat(plugin.isEnabled(), is(true));
|
||||
Collection<Module> modules = plugin.nodeModules();
|
||||
assertThat(modules.size(), is(5));
|
||||
assertThat(modules.size(), is(4));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,14 +5,15 @@
|
|||
*/
|
||||
package org.elasticsearch.marvel.agent.collector.cluster;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollectorTestCase;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -82,18 +83,20 @@ public class ClusterStateCollectorTests extends AbstractCollectorTestCase {
|
|||
assertHitCount(client().prepareSearch("test-" + i).setSize(0).get(), docsPerIndex[i]);
|
||||
}
|
||||
|
||||
Collection<MarvelDoc> results = newClusterStateCollector().doCollect();
|
||||
Collection<MonitoringDoc> results = newClusterStateCollector().doCollect();
|
||||
assertMarvelDocs(results, nbShards);
|
||||
|
||||
MarvelDoc marvelDoc = results.iterator().next();
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(ClusterStateMarvelDoc.class));
|
||||
MonitoringDoc monitoringDoc = results.iterator().next();
|
||||
assertNotNull(monitoringDoc);
|
||||
assertThat(monitoringDoc, instanceOf(ClusterStateMonitoringDoc.class));
|
||||
|
||||
ClusterStateMarvelDoc clusterStateMarvelDoc = (ClusterStateMarvelDoc) marvelDoc;
|
||||
ClusterStateMonitoringDoc clusterStateMarvelDoc = (ClusterStateMonitoringDoc) monitoringDoc;
|
||||
|
||||
assertThat(clusterStateMarvelDoc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(clusterStateMarvelDoc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(clusterStateMarvelDoc.getClusterUUID(),
|
||||
equalTo(client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(clusterStateMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(clusterStateMarvelDoc.getType(), equalTo(ClusterStateCollector.TYPE));
|
||||
assertThat(clusterStateMarvelDoc.getSourceNode(), notNullValue());
|
||||
assertNotNull(clusterStateMarvelDoc.getClusterState());
|
||||
|
||||
|
@ -155,44 +158,43 @@ public class ClusterStateCollectorTests extends AbstractCollectorTestCase {
|
|||
securedClient(nodeId));
|
||||
}
|
||||
|
||||
private void assertMarvelDocs(Collection<MarvelDoc> results, final int nbShards) {
|
||||
private void assertMarvelDocs(Collection<MonitoringDoc> results, final int nbShards) {
|
||||
assertThat("expecting 1 document for cluster state and 2 documents per node", results, hasSize(1 + internalCluster().size() * 2));
|
||||
|
||||
final ClusterState clusterState = securedClient().admin().cluster().prepareState().get().getState();
|
||||
final String clusterUUID = clusterState.getMetaData().clusterUUID();
|
||||
final String stateUUID = clusterState.stateUUID();
|
||||
|
||||
List<ClusterStateNodeMarvelDoc> clusterStateNodes = new ArrayList<>();
|
||||
List<DiscoveryNodeMarvelDoc> discoveryNodes = new ArrayList<>();
|
||||
List<ClusterStateNodeMonitoringDoc> clusterStateNodes = new ArrayList<>();
|
||||
List<DiscoveryNodeMonitoringDoc> discoveryNodes = new ArrayList<>();
|
||||
|
||||
for (MarvelDoc marvelDoc : results) {
|
||||
assertThat(marvelDoc.getClusterUUID(), equalTo(clusterUUID));
|
||||
assertThat(marvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(marvelDoc.getSourceNode(), notNullValue());
|
||||
assertThat(marvelDoc, anyOf(instanceOf(ClusterStateMarvelDoc.class), instanceOf(ClusterStateNodeMarvelDoc.class),
|
||||
instanceOf(DiscoveryNodeMarvelDoc.class)));
|
||||
for (MonitoringDoc doc : results) {
|
||||
assertThat(doc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(doc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(doc.getClusterUUID(), equalTo(clusterUUID));
|
||||
assertThat(doc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(doc.getSourceNode(), notNullValue());
|
||||
assertThat(doc, anyOf(instanceOf(ClusterStateMonitoringDoc.class),
|
||||
instanceOf(ClusterStateNodeMonitoringDoc.class), instanceOf(DiscoveryNodeMonitoringDoc.class)));
|
||||
|
||||
if (marvelDoc instanceof ClusterStateMarvelDoc) {
|
||||
ClusterStateMarvelDoc clusterStateMarvelDoc = (ClusterStateMarvelDoc) marvelDoc;
|
||||
if (doc instanceof ClusterStateMonitoringDoc) {
|
||||
ClusterStateMonitoringDoc clusterStateMarvelDoc = (ClusterStateMonitoringDoc) doc;
|
||||
assertThat(clusterStateMarvelDoc.getClusterState().getRoutingTable().allShards(), hasSize(nbShards));
|
||||
assertThat(clusterStateMarvelDoc.getClusterState().getNodes().getSize(), equalTo(internalCluster().size()));
|
||||
|
||||
} else if (marvelDoc instanceof ClusterStateNodeMarvelDoc) {
|
||||
ClusterStateNodeMarvelDoc clusterStateNodeMarvelDoc = (ClusterStateNodeMarvelDoc) marvelDoc;
|
||||
} else if (doc instanceof ClusterStateNodeMonitoringDoc) {
|
||||
ClusterStateNodeMonitoringDoc clusterStateNodeMarvelDoc = (ClusterStateNodeMonitoringDoc) doc;
|
||||
assertThat(clusterStateNodeMarvelDoc.getStateUUID(), equalTo(stateUUID));
|
||||
assertThat(clusterStateNodeMarvelDoc.getNodeId(), not(isEmptyOrNullString()));
|
||||
clusterStateNodes.add(clusterStateNodeMarvelDoc);
|
||||
|
||||
} else if (marvelDoc instanceof DiscoveryNodeMarvelDoc) {
|
||||
DiscoveryNodeMarvelDoc discoveryNodeMarvelDoc = (DiscoveryNodeMarvelDoc) marvelDoc;
|
||||
assertThat(discoveryNodeMarvelDoc.getIndex(),
|
||||
equalTo(MarvelSettings.MONITORING_DATA_INDEX_PREFIX + MarvelTemplateUtils.TEMPLATE_VERSION));
|
||||
assertThat(discoveryNodeMarvelDoc.getId(), not(isEmptyOrNullString()));
|
||||
} else if (doc instanceof DiscoveryNodeMonitoringDoc) {
|
||||
DiscoveryNodeMonitoringDoc discoveryNodeMarvelDoc = (DiscoveryNodeMonitoringDoc) doc;
|
||||
assertNotNull(discoveryNodeMarvelDoc.getNode());
|
||||
discoveryNodes.add(discoveryNodeMarvelDoc);
|
||||
|
||||
} else {
|
||||
fail("unknown monitoring document type " + marvelDoc.getType());
|
||||
fail("unknown monitoring document type " + doc);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,7 +205,7 @@ public class ClusterStateCollectorTests extends AbstractCollectorTestCase {
|
|||
final String nodeId = internalCluster().clusterService(nodeName).localNode().getId();
|
||||
|
||||
boolean found = false;
|
||||
for (ClusterStateNodeMarvelDoc doc : clusterStateNodes) {
|
||||
for (ClusterStateNodeMonitoringDoc doc : clusterStateNodes) {
|
||||
if (nodeId.equals(doc.getNodeId())) {
|
||||
found = true;
|
||||
break;
|
||||
|
@ -212,7 +214,7 @@ public class ClusterStateCollectorTests extends AbstractCollectorTestCase {
|
|||
assertTrue("Could not find node id [" + nodeName + "]", found);
|
||||
|
||||
found = false;
|
||||
for (DiscoveryNodeMarvelDoc doc : discoveryNodes) {
|
||||
for (DiscoveryNodeMonitoringDoc doc : discoveryNodes) {
|
||||
if (nodeName.equals(doc.getNode().getName())) {
|
||||
found = true;
|
||||
break;
|
||||
|
|
|
@ -6,14 +6,16 @@
|
|||
package org.elasticsearch.marvel.agent.collector.cluster;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.BadApple;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.plugin.core.LicensesManagerService;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollectorTestCase;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
|
||||
import java.util.Collection;
|
||||
|
@ -29,19 +31,20 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
public class ClusterStatsCollectorTests extends AbstractCollectorTestCase {
|
||||
|
||||
public void testClusterStatsCollector() throws Exception {
|
||||
Collection<MarvelDoc> results = newClusterStatsCollector().doCollect();
|
||||
Collection<MonitoringDoc> results = newClusterStatsCollector().doCollect();
|
||||
assertThat(results, hasSize(2));
|
||||
|
||||
// Check cluster info document
|
||||
MarvelDoc marvelDoc = results.stream().filter(o -> o instanceof ClusterInfoMarvelDoc).findFirst().get();
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(ClusterInfoMarvelDoc.class));
|
||||
MonitoringDoc monitoringDoc = results.stream().filter(o -> o instanceof ClusterInfoMonitoringDoc).findFirst().get();
|
||||
assertNotNull(monitoringDoc);
|
||||
assertThat(monitoringDoc, instanceOf(ClusterInfoMonitoringDoc.class));
|
||||
|
||||
ClusterInfoMarvelDoc clusterInfoMarvelDoc = (ClusterInfoMarvelDoc) marvelDoc;
|
||||
ClusterInfoMonitoringDoc clusterInfoMarvelDoc = (ClusterInfoMonitoringDoc) monitoringDoc;
|
||||
assertThat(clusterInfoMarvelDoc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(clusterInfoMarvelDoc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(clusterInfoMarvelDoc.getClusterUUID(),
|
||||
equalTo(client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(clusterInfoMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(clusterInfoMarvelDoc.getType(), equalTo(ClusterStatsCollector.CLUSTER_INFO_TYPE));
|
||||
assertThat(clusterInfoMarvelDoc.getSourceNode(), notNullValue());
|
||||
|
||||
assertThat(clusterInfoMarvelDoc.getClusterName(),
|
||||
|
@ -56,15 +59,16 @@ public class ClusterStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
equalTo(internalCluster().getNodeNames().length));
|
||||
|
||||
// Check cluster stats document
|
||||
marvelDoc = results.stream().filter(o -> o instanceof ClusterStatsMarvelDoc).findFirst().get();
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(ClusterStatsMarvelDoc.class));
|
||||
monitoringDoc = results.stream().filter(o -> o instanceof ClusterStatsMonitoringDoc).findFirst().get();
|
||||
assertNotNull(monitoringDoc);
|
||||
assertThat(monitoringDoc, instanceOf(ClusterStatsMonitoringDoc.class));
|
||||
|
||||
ClusterStatsMarvelDoc clusterStatsMarvelDoc = (ClusterStatsMarvelDoc) marvelDoc;
|
||||
ClusterStatsMonitoringDoc clusterStatsMarvelDoc = (ClusterStatsMonitoringDoc) monitoringDoc;
|
||||
assertThat(clusterStatsMarvelDoc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(clusterStatsMarvelDoc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(clusterStatsMarvelDoc.getClusterUUID(),
|
||||
equalTo(client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(clusterStatsMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(clusterStatsMarvelDoc.getType(), equalTo(ClusterStatsCollector.CLUSTER_STATS_TYPE));
|
||||
assertThat(clusterStatsMarvelDoc.getSourceNode(), notNullValue());
|
||||
|
||||
assertNotNull(clusterStatsMarvelDoc.getClusterStats());
|
||||
|
@ -83,7 +87,7 @@ public class ClusterStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
logger.debug("--> enabling license and checks that the collector can collect data if node is master");
|
||||
enableLicense();
|
||||
if (node.equals(internalCluster().getMasterName())) {
|
||||
assertCanCollect(collector, ClusterInfoMarvelDoc.class, ClusterStatsMarvelDoc.class);
|
||||
assertCanCollect(collector, ClusterInfoMonitoringDoc.class, ClusterStatsMonitoringDoc.class);
|
||||
} else {
|
||||
assertCannotCollect(collector);
|
||||
}
|
||||
|
@ -91,7 +95,7 @@ public class ClusterStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
logger.debug("--> starting graceful period and checks that the collector can still collect data if node is master");
|
||||
beginGracefulPeriod();
|
||||
if (node.equals(internalCluster().getMasterName())) {
|
||||
assertCanCollect(collector, ClusterInfoMarvelDoc.class, ClusterStatsMarvelDoc.class);
|
||||
assertCanCollect(collector, ClusterInfoMonitoringDoc.class, ClusterStatsMonitoringDoc.class);
|
||||
} else {
|
||||
assertCannotCollect(collector);
|
||||
}
|
||||
|
@ -99,7 +103,7 @@ public class ClusterStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
logger.debug("--> ending graceful period and checks that the collector can still collect data (if node is master)");
|
||||
endGracefulPeriod();
|
||||
if (node.equals(internalCluster().getMasterName())) {
|
||||
assertCanCollect(collector, ClusterInfoMarvelDoc.class);
|
||||
assertCanCollect(collector, ClusterInfoMonitoringDoc.class);
|
||||
} else {
|
||||
assertCannotCollect(collector);
|
||||
}
|
||||
|
@ -107,7 +111,7 @@ public class ClusterStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
logger.debug("--> disabling license and checks that the collector can still collect data (if node is master)");
|
||||
disableLicense();
|
||||
if (node.equals(internalCluster().getMasterName())) {
|
||||
assertCanCollect(collector, ClusterInfoMarvelDoc.class);
|
||||
assertCanCollect(collector, ClusterInfoMonitoringDoc.class);
|
||||
} else {
|
||||
assertCannotCollect(collector);
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.marvel.agent.collector.indices;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
|
@ -13,8 +14,9 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollectorTestCase;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
||||
|
@ -57,7 +59,7 @@ public class IndexRecoveryCollectorTests extends AbstractCollectorTestCase {
|
|||
waitForNoBlocksOnNode(node1);
|
||||
|
||||
logger.info("--> collect index recovery data");
|
||||
Collection<MarvelDoc> results = newIndexRecoveryCollector(node1).doCollect();
|
||||
Collection<MonitoringDoc> results = newIndexRecoveryCollector(node1).doCollect();
|
||||
|
||||
logger.info("--> no indices created, expecting 0 monitoring documents");
|
||||
assertNotNull(results);
|
||||
|
@ -95,15 +97,16 @@ public class IndexRecoveryCollectorTests extends AbstractCollectorTestCase {
|
|||
assertNotNull(results);
|
||||
assertThat(results, hasSize(1));
|
||||
|
||||
MarvelDoc marvelDoc = results.iterator().next();
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(IndexRecoveryMarvelDoc.class));
|
||||
MonitoringDoc monitoringDoc = results.iterator().next();
|
||||
assertNotNull(monitoringDoc);
|
||||
assertThat(monitoringDoc, instanceOf(IndexRecoveryMonitoringDoc.class));
|
||||
|
||||
IndexRecoveryMarvelDoc indexRecoveryMarvelDoc = (IndexRecoveryMarvelDoc) marvelDoc;
|
||||
IndexRecoveryMonitoringDoc indexRecoveryMarvelDoc = (IndexRecoveryMonitoringDoc) monitoringDoc;
|
||||
assertThat(indexRecoveryMarvelDoc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(indexRecoveryMarvelDoc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(indexRecoveryMarvelDoc.getClusterUUID(),
|
||||
equalTo(client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(indexRecoveryMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(indexRecoveryMarvelDoc.getType(), equalTo(IndexRecoveryCollector.TYPE));
|
||||
assertThat(indexRecoveryMarvelDoc.getSourceNode(), notNullValue());
|
||||
|
||||
RecoveryResponse recovery = indexRecoveryMarvelDoc.getRecoveryResponse();
|
||||
|
|
|
@ -5,14 +5,16 @@
|
|||
*/
|
||||
package org.elasticsearch.marvel.agent.collector.indices;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollectorTestCase;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
||||
|
@ -89,18 +91,19 @@ public class IndexStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
|
||||
assertHitCount(client().prepareSearch().setSize(0).get(), nbDocs);
|
||||
|
||||
Collection<MarvelDoc> results = newIndexStatsCollector().doCollect();
|
||||
Collection<MonitoringDoc> results = newIndexStatsCollector().doCollect();
|
||||
assertThat(results, hasSize(1));
|
||||
|
||||
MarvelDoc marvelDoc = results.iterator().next();
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(IndexStatsMarvelDoc.class));
|
||||
MonitoringDoc monitoringDoc = results.iterator().next();
|
||||
assertNotNull(monitoringDoc);
|
||||
assertThat(monitoringDoc, instanceOf(IndexStatsMonitoringDoc.class));
|
||||
|
||||
IndexStatsMarvelDoc indexStatsMarvelDoc = (IndexStatsMarvelDoc) marvelDoc;
|
||||
IndexStatsMonitoringDoc indexStatsMarvelDoc = (IndexStatsMonitoringDoc) monitoringDoc;
|
||||
assertThat(indexStatsMarvelDoc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(indexStatsMarvelDoc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(indexStatsMarvelDoc.getClusterUUID(),
|
||||
equalTo(client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(indexStatsMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(indexStatsMarvelDoc.getType(), equalTo(IndexStatsCollector.TYPE));
|
||||
assertThat(indexStatsMarvelDoc.getSourceNode(), notNullValue());
|
||||
|
||||
IndexStats indexStats = indexStatsMarvelDoc.getIndexStats();
|
||||
|
@ -143,26 +146,25 @@ public class IndexStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
|
||||
String clusterUUID = client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID();
|
||||
|
||||
Collection<MarvelDoc> results = newIndexStatsCollector().doCollect();
|
||||
Collection<MonitoringDoc> results = newIndexStatsCollector().doCollect();
|
||||
assertThat(results, hasSize(nbIndices));
|
||||
|
||||
for (int i = 0; i < nbIndices; i++) {
|
||||
String indexName = indexPrefix + i;
|
||||
boolean found = false;
|
||||
|
||||
Iterator<MarvelDoc> it = results.iterator();
|
||||
Iterator<MonitoringDoc> it = results.iterator();
|
||||
while (!found && it.hasNext()) {
|
||||
MarvelDoc marvelDoc = it.next();
|
||||
assertThat(marvelDoc, instanceOf(IndexStatsMarvelDoc.class));
|
||||
MonitoringDoc monitoringDoc = it.next();
|
||||
assertThat(monitoringDoc, instanceOf(IndexStatsMonitoringDoc.class));
|
||||
|
||||
IndexStatsMarvelDoc indexStatsMarvelDoc = (IndexStatsMarvelDoc) marvelDoc;
|
||||
IndexStatsMonitoringDoc indexStatsMarvelDoc = (IndexStatsMonitoringDoc) monitoringDoc;
|
||||
IndexStats indexStats = indexStatsMarvelDoc.getIndexStats();
|
||||
assertNotNull(indexStats);
|
||||
|
||||
if (indexStats.getIndex().equals(indexPrefix + i)) {
|
||||
assertThat(indexStatsMarvelDoc.getClusterUUID(), equalTo(clusterUUID));
|
||||
assertThat(indexStatsMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(indexStatsMarvelDoc.getType(), equalTo(IndexStatsCollector.TYPE));
|
||||
assertThat(indexStatsMarvelDoc.getSourceNode(), notNullValue());
|
||||
|
||||
assertThat(indexStats.getIndex(), equalTo(indexName));
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.marvel.agent.collector.indices;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
|
@ -13,8 +14,9 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollectorTestCase;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
||||
|
@ -89,17 +91,16 @@ public class IndicesStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
|
||||
assertHitCount(client().prepareSearch().setSize(0).get(), nbDocs);
|
||||
|
||||
Collection<MarvelDoc> results = newIndicesStatsCollector().doCollect();
|
||||
Collection<MonitoringDoc> results = newIndicesStatsCollector().doCollect();
|
||||
assertThat(results, hasSize(1));
|
||||
|
||||
MarvelDoc marvelDoc = results.iterator().next();
|
||||
assertThat(marvelDoc, instanceOf(IndicesStatsMarvelDoc.class));
|
||||
MonitoringDoc monitoringDoc = results.iterator().next();
|
||||
assertThat(monitoringDoc, instanceOf(IndicesStatsMonitoringDoc.class));
|
||||
|
||||
IndicesStatsMarvelDoc indicesStatsMarvelDoc = (IndicesStatsMarvelDoc) marvelDoc;
|
||||
IndicesStatsMonitoringDoc indicesStatsMarvelDoc = (IndicesStatsMonitoringDoc) monitoringDoc;
|
||||
assertThat(indicesStatsMarvelDoc.getClusterUUID(), equalTo(client().admin().cluster().
|
||||
prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(indicesStatsMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(indicesStatsMarvelDoc.getType(), equalTo(IndicesStatsCollector.TYPE));
|
||||
assertThat(indicesStatsMarvelDoc.getSourceNode(), notNullValue());
|
||||
|
||||
IndicesStatsResponse indicesStats = indicesStatsMarvelDoc.getIndicesStats();
|
||||
|
@ -136,13 +137,19 @@ public class IndicesStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
assertHitCount(client().prepareSearch(indexPrefix + i).setSize(0).get(), docsPerIndex[i]);
|
||||
}
|
||||
|
||||
Collection<MarvelDoc> results = newIndicesStatsCollector().doCollect();
|
||||
Collection<MonitoringDoc> results = newIndicesStatsCollector().doCollect();
|
||||
assertThat(results, hasSize(1));
|
||||
|
||||
MarvelDoc marvelDoc = results.iterator().next();
|
||||
assertThat(marvelDoc, instanceOf(IndicesStatsMarvelDoc.class));
|
||||
MonitoringDoc monitoringDoc = results.iterator().next();
|
||||
assertThat(monitoringDoc, instanceOf(IndicesStatsMonitoringDoc.class));
|
||||
|
||||
IndicesStatsMonitoringDoc indicesStatsMarvelDoc = (IndicesStatsMonitoringDoc) monitoringDoc;
|
||||
assertThat(indicesStatsMarvelDoc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(indicesStatsMarvelDoc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(indicesStatsMarvelDoc.getClusterUUID(),
|
||||
equalTo(client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(indicesStatsMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
|
||||
IndicesStatsMarvelDoc indicesStatsMarvelDoc = (IndicesStatsMarvelDoc) marvelDoc;
|
||||
IndicesStatsResponse indicesStats = indicesStatsMarvelDoc.getIndicesStats();
|
||||
assertNotNull(indicesStats);
|
||||
assertThat(indicesStats.getIndices().keySet(), hasSize(nbIndices));
|
||||
|
|
|
@ -5,14 +5,16 @@
|
|||
*/
|
||||
package org.elasticsearch.marvel.agent.collector.node;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.bootstrap.BootstrapInfo;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollectorTestCase;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -30,22 +32,24 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
// such files.
|
||||
@ClusterScope(numClientNodes = 0)
|
||||
public class NodeStatsCollectorTests extends AbstractCollectorTestCase {
|
||||
|
||||
public void testNodeStatsCollector() throws Exception {
|
||||
String[] nodes = internalCluster().getNodeNames();
|
||||
for (String node : nodes) {
|
||||
logger.info("--> collecting node stats on node [{}]", node);
|
||||
Collection<MarvelDoc> results = newNodeStatsCollector(node).doCollect();
|
||||
Collection<MonitoringDoc> results = newNodeStatsCollector(node).doCollect();
|
||||
assertThat(results, hasSize(1));
|
||||
|
||||
MarvelDoc marvelDoc = results.iterator().next();
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(NodeStatsMarvelDoc.class));
|
||||
MonitoringDoc monitoringDoc = results.iterator().next();
|
||||
assertNotNull(monitoringDoc);
|
||||
assertThat(monitoringDoc, instanceOf(NodeStatsMonitoringDoc.class));
|
||||
|
||||
NodeStatsMarvelDoc nodeStatsMarvelDoc = (NodeStatsMarvelDoc) marvelDoc;
|
||||
NodeStatsMonitoringDoc nodeStatsMarvelDoc = (NodeStatsMonitoringDoc) monitoringDoc;
|
||||
assertThat(nodeStatsMarvelDoc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(nodeStatsMarvelDoc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(nodeStatsMarvelDoc.getClusterUUID(),
|
||||
equalTo(client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID()));
|
||||
assertThat(nodeStatsMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(nodeStatsMarvelDoc.getType(), equalTo(NodeStatsCollector.TYPE));
|
||||
assertThat(nodeStatsMarvelDoc.getSourceNode(), notNullValue());
|
||||
|
||||
assertThat(nodeStatsMarvelDoc.getNodeId(),
|
||||
|
|
|
@ -5,13 +5,15 @@
|
|||
*/
|
||||
package org.elasticsearch.marvel.agent.collector.shards;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.AbstractCollectorTestCase;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
|
||||
import java.util.Collection;
|
||||
|
@ -28,7 +30,7 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
public class ShardsCollectorTests extends AbstractCollectorTestCase {
|
||||
|
||||
public void testShardsCollectorNoIndices() throws Exception {
|
||||
Collection<MarvelDoc> results = newShardsCollector().doCollect();
|
||||
Collection<MonitoringDoc> results = newShardsCollector().doCollect();
|
||||
assertThat(results, hasSize(0));
|
||||
}
|
||||
|
||||
|
@ -52,7 +54,7 @@ public class ShardsCollectorTests extends AbstractCollectorTestCase {
|
|||
|
||||
assertHitCount(client().prepareSearch().setSize(0).get(), nbDocs);
|
||||
|
||||
Collection<MarvelDoc> results = newShardsCollector().doCollect();
|
||||
Collection<MonitoringDoc> results = newShardsCollector().doCollect();
|
||||
assertThat(results, hasSize(getNumShards("test-shards").totalNumShards));
|
||||
|
||||
final ClusterState clusterState = client().admin().cluster().prepareState().setMetaData(true).get().getState();
|
||||
|
@ -60,16 +62,15 @@ public class ShardsCollectorTests extends AbstractCollectorTestCase {
|
|||
int primaries = 0;
|
||||
int replicas = 0;
|
||||
|
||||
for (MarvelDoc marvelDoc : results) {
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(ShardMarvelDoc.class));
|
||||
for (MonitoringDoc monitoringDoc : results) {
|
||||
assertNotNull(monitoringDoc);
|
||||
assertThat(monitoringDoc, instanceOf(ShardMonitoringDoc.class));
|
||||
|
||||
ShardMarvelDoc shardMarvelDoc = (ShardMarvelDoc) marvelDoc;
|
||||
ShardMonitoringDoc shardMarvelDoc = (ShardMonitoringDoc) monitoringDoc;
|
||||
assertThat(shardMarvelDoc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(shardMarvelDoc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(shardMarvelDoc.getClusterUUID(), equalTo(clusterState.metaData().clusterUUID()));
|
||||
assertThat(shardMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(shardMarvelDoc.getType(), equalTo(ShardsCollector.TYPE));
|
||||
assertThat(shardMarvelDoc.getId(),
|
||||
equalTo(ShardsCollector.id(clusterState.stateUUID(), ((ShardMarvelDoc) marvelDoc).getShardRouting())));
|
||||
assertThat(shardMarvelDoc.getSourceNode(), notNullValue());
|
||||
assertThat(shardMarvelDoc.getClusterStateUUID(), equalTo(clusterState.stateUUID()));
|
||||
|
||||
|
@ -117,31 +118,31 @@ public class ShardsCollectorTests extends AbstractCollectorTestCase {
|
|||
totalShards += getNumShards(index).totalNumShards;
|
||||
}
|
||||
|
||||
Collection<MarvelDoc> results = newShardsCollector().doCollect();
|
||||
Collection<MonitoringDoc> results = newShardsCollector().doCollect();
|
||||
assertThat(results, hasSize(totalShards));
|
||||
|
||||
final ClusterState clusterState = client().admin().cluster().prepareState().setMetaData(true).get().getState();
|
||||
|
||||
for (MarvelDoc marvelDoc : results) {
|
||||
assertNotNull(marvelDoc);
|
||||
assertThat(marvelDoc, instanceOf(ShardMarvelDoc.class));
|
||||
for (MonitoringDoc doc : results) {
|
||||
assertNotNull(doc);
|
||||
assertThat(doc, instanceOf(ShardMonitoringDoc.class));
|
||||
|
||||
ShardMarvelDoc shardMarvelDoc = (ShardMarvelDoc) marvelDoc;
|
||||
assertThat(shardMarvelDoc.getClusterUUID(), equalTo(clusterState.metaData().clusterUUID()));
|
||||
assertThat(shardMarvelDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(shardMarvelDoc.getType(), equalTo(ShardsCollector.TYPE));
|
||||
assertThat(shardMarvelDoc.getId(),
|
||||
equalTo(ShardsCollector.id(clusterState.stateUUID(), ((ShardMarvelDoc) marvelDoc).getShardRouting())));
|
||||
assertThat(shardMarvelDoc.getClusterStateUUID(), equalTo(clusterState.stateUUID()));
|
||||
ShardMonitoringDoc shardDoc = (ShardMonitoringDoc) doc;
|
||||
assertThat(shardDoc.getMonitoringId(), equalTo(MonitoringIds.ES.getId()));
|
||||
assertThat(shardDoc.getMonitoringVersion(), equalTo(Version.CURRENT.toString()));
|
||||
assertThat(shardDoc.getClusterUUID(), equalTo(clusterState.metaData().clusterUUID()));
|
||||
assertThat(shardDoc.getTimestamp(), greaterThan(0L));
|
||||
assertThat(shardDoc.getSourceNode(), notNullValue());
|
||||
assertThat(shardDoc.getClusterStateUUID(), equalTo(clusterState.stateUUID()));
|
||||
|
||||
ShardRouting shardRouting = shardMarvelDoc.getShardRouting();
|
||||
ShardRouting shardRouting = shardDoc.getShardRouting();
|
||||
assertNotNull(shardRouting);
|
||||
}
|
||||
|
||||
// Checks that a correct number of ShardMarvelDoc documents has been created for each index
|
||||
int[] shards = new int[nbIndices];
|
||||
for (MarvelDoc marvelDoc : results) {
|
||||
ShardRouting routing = ((ShardMarvelDoc) marvelDoc).getShardRouting();
|
||||
for (MonitoringDoc monitoringDoc : results) {
|
||||
ShardRouting routing = ((ShardMonitoringDoc) monitoringDoc).getShardRouting();
|
||||
int index = Integer.parseInt(routing.getIndexName().substring(indexPrefix.length()));
|
||||
shards[index]++;
|
||||
}
|
||||
|
|
|
@ -5,11 +5,13 @@
|
|||
*/
|
||||
package org.elasticsearch.marvel.agent.exporter;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.Collector;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
||||
|
@ -163,11 +165,17 @@ public abstract class AbstractExporterTemplateTestCase extends MarvelIntegTestCa
|
|||
}
|
||||
|
||||
private String currentDataIndexName() {
|
||||
return ".monitoring-es-data-" + String.valueOf(currentVersion);
|
||||
MockDataIndexNameResolver resolver = new MockDataIndexNameResolver(currentVersion);
|
||||
return resolver.index(null);
|
||||
}
|
||||
|
||||
private String currentTimestampedIndexName() {
|
||||
return exporter().indexNameResolver().resolve(System.currentTimeMillis());
|
||||
MonitoringDoc doc = new MonitoringDoc(MonitoringIds.ES.getId(), Version.CURRENT.toString());
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
|
||||
MockTimestampedIndexNameResolver resolver = new MockTimestampedIndexNameResolver(MonitoringIds.ES.getId(),
|
||||
currentVersion, exporterSettings());
|
||||
return resolver.index(doc);
|
||||
}
|
||||
|
||||
/** Generates a basic template **/
|
||||
|
|
|
@ -11,9 +11,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.marvel.agent.exporter.local.LocalExporter;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererRegistry;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.exporter.local.LocalExporter;
|
||||
import org.elasticsearch.marvel.cleaner.CleanerService;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -58,7 +57,7 @@ public class ExportersTests extends ESTestCase {
|
|||
|
||||
// we always need to have the local exporter as it serves as the default one
|
||||
factories.put(LocalExporter.TYPE, new LocalExporter.Factory(new InternalClient.Insecure(client), clusterService,
|
||||
mock(RendererRegistry.class), mock(CleanerService.class)));
|
||||
mock(CleanerService.class)));
|
||||
clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(MarvelSettings.COLLECTORS,
|
||||
MarvelSettings.INTERVAL, MarvelSettings.EXPORTERS_SETTINGS)));
|
||||
exporters = new Exporters(Settings.EMPTY, factories, clusterService, clusterSettings);
|
||||
|
@ -270,7 +269,7 @@ public class ExportersTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void export(Collection<MarvelDoc> marvelDocs) throws Exception {
|
||||
public void export(Collection<MonitoringDoc> monitoringDocs) throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -51,6 +51,6 @@ public class MarvelTemplateUtilsTests extends ESTestCase {
|
|||
assertThat(MarvelTemplateUtils.dataTemplateName(),
|
||||
equalTo(MarvelTemplateUtils.DATA_TEMPLATE_NAME_PREFIX + MarvelTemplateUtils.TEMPLATE_VERSION));
|
||||
int version = randomIntBetween(1, 100);
|
||||
assertThat(MarvelTemplateUtils.dataTemplateName(version), equalTo(".monitoring-es-data-" + version));
|
||||
assertThat(MarvelTemplateUtils.dataTemplateName(version), equalTo(".monitoring-data-" + version));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,7 +99,6 @@ public class HttpExporterTemplateTests extends AbstractExporterTemplateTestCase
|
|||
|
||||
class MockServerDispatcher extends Dispatcher {
|
||||
|
||||
private final MockResponse OK = newResponse(200, "");
|
||||
private final MockResponse NOT_FOUND = newResponse(404, "");
|
||||
|
||||
private final Set<String> requests = new HashSet<>();
|
||||
|
@ -130,7 +129,7 @@ public class HttpExporterTemplateTests extends AbstractExporterTemplateTestCase
|
|||
} catch (Exception e) {
|
||||
return newResponse(500, e.getMessage());
|
||||
}
|
||||
return OK;
|
||||
return newResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}");
|
||||
default:
|
||||
String[] paths = Strings.splitStringToArray(request.getPath(), '/');
|
||||
|
||||
|
|
|
@ -19,17 +19,18 @@ import org.elasticsearch.action.index.IndexRequest;
|
|||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateCollector;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryCollector;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporters;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporters;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
@ -91,7 +92,7 @@ public class HttpExporterTests extends MarvelIntegTestCase {
|
|||
enqueueResponse(201, "template for timestamped indices created");
|
||||
enqueueResponse(404, "template for data index does not exist");
|
||||
enqueueResponse(201, "template for data index created");
|
||||
enqueueResponse(200, "successful bulk request ");
|
||||
enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}");
|
||||
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put(MarvelSettings.INTERVAL.getKey(), "-1")
|
||||
|
@ -177,7 +178,7 @@ public class HttpExporterTests extends MarvelIntegTestCase {
|
|||
enqueueResponse(201, "template for timestamped indices created");
|
||||
enqueueResponse(404, "template for data index does not exist");
|
||||
enqueueResponse(201, "template for data index created");
|
||||
enqueueResponse(200, "successful bulk request ");
|
||||
enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}");
|
||||
|
||||
String agentNode = internalCluster().startNode(builder);
|
||||
|
||||
|
@ -246,7 +247,7 @@ public class HttpExporterTests extends MarvelIntegTestCase {
|
|||
enqueueResponse(secondWebServer, 404, "template for timestamped indices does not exist");
|
||||
enqueueResponse(secondWebServer, 201, "template for timestamped indices created");
|
||||
enqueueResponse(secondWebServer, 200, "template for data index exist");
|
||||
enqueueResponse(secondWebServer, 200, "successful bulk request ");
|
||||
enqueueResponse(secondWebServer, 200, "{\"errors\": false, \"msg\": \"successful bulk request\"}");
|
||||
|
||||
logger.info("--> exporting a second event");
|
||||
exporter.export(Collections.singletonList(newRandomMarvelDoc()));
|
||||
|
@ -325,11 +326,11 @@ public class HttpExporterTests extends MarvelIntegTestCase {
|
|||
enqueueResponse(201, "template for timestamped indices created");
|
||||
enqueueResponse(404, "template for data index does not exist");
|
||||
enqueueResponse(201, "template for data index created");
|
||||
enqueueResponse(200, "successful bulk request ");
|
||||
enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}");
|
||||
|
||||
HttpExporter exporter = getExporter(agentNode);
|
||||
|
||||
MarvelDoc doc = newRandomMarvelDoc();
|
||||
MonitoringDoc doc = newRandomMarvelDoc();
|
||||
exporter.export(Collections.singletonList(doc));
|
||||
|
||||
assertThat(webServer.getRequestCount(), equalTo(6));
|
||||
|
@ -360,7 +361,7 @@ public class HttpExporterTests extends MarvelIntegTestCase {
|
|||
assertThat(recordedRequest.getMethod(), equalTo("POST"));
|
||||
assertThat(recordedRequest.getPath(), equalTo("/_bulk"));
|
||||
|
||||
String indexName = exporter.indexNameResolver().resolve(doc);
|
||||
String indexName = exporter.getResolvers().getResolver(doc).index(doc);
|
||||
logger.info("--> checks that the document in the bulk request is indexed in [{}]", indexName);
|
||||
|
||||
byte[] bytes = recordedRequest.getBody().readByteArray();
|
||||
|
@ -379,13 +380,13 @@ public class HttpExporterTests extends MarvelIntegTestCase {
|
|||
enqueueGetClusterVersionResponse(Version.CURRENT);
|
||||
enqueueResponse(200, "template for timestamped indices exist");
|
||||
enqueueResponse(200, "template for data index exist");
|
||||
enqueueResponse(200, "successful bulk request ");
|
||||
enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}");
|
||||
|
||||
doc = newRandomMarvelDoc();
|
||||
exporter = getExporter(agentNode);
|
||||
exporter.export(Collections.singletonList(doc));
|
||||
|
||||
String expectedMarvelIndex = MarvelSettings.MONITORING_INDICES_PREFIX + MarvelTemplateUtils.TEMPLATE_VERSION + "-"
|
||||
String expectedMarvelIndex = ".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-"
|
||||
+ DateTimeFormat.forPattern(newTimeFormat).withZoneUTC().print(doc.getTimestamp());
|
||||
|
||||
assertThat(webServer.getRequestCount(), equalTo(6 + 4));
|
||||
|
@ -442,27 +443,27 @@ public class HttpExporterTests extends MarvelIntegTestCase {
|
|||
return (HttpExporter) exporters.iterator().next();
|
||||
}
|
||||
|
||||
private MarvelDoc newRandomMarvelDoc() {
|
||||
private MonitoringDoc newRandomMarvelDoc() {
|
||||
if (randomBoolean()) {
|
||||
IndexRecoveryMarvelDoc doc = new IndexRecoveryMarvelDoc();
|
||||
IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(MonitoringIds.ES.getId(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setType(IndexRecoveryCollector.TYPE);
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setRecoveryResponse(new RecoveryResponse());
|
||||
return doc;
|
||||
} else {
|
||||
ClusterStateMarvelDoc doc = new ClusterStateMarvelDoc();
|
||||
ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(MonitoringIds.ES.getId(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setType(ClusterStateCollector.TYPE);
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setClusterState(ClusterState.PROTO);
|
||||
doc.setStatus(ClusterHealthStatus.GREEN);
|
||||
return doc;
|
||||
}
|
||||
}
|
||||
|
||||
private List<MarvelDoc> newRandomMarvelDocs(int nb) {
|
||||
List<MarvelDoc> docs = new ArrayList<>(nb);
|
||||
private List<MonitoringDoc> newRandomMarvelDocs(int nb) {
|
||||
List<MonitoringDoc> docs = new ArrayList<>(nb);
|
||||
for (int i = 0; i < nb; i++) {
|
||||
docs.add(newRandomMarvelDoc());
|
||||
}
|
||||
|
|
|
@ -6,20 +6,22 @@
|
|||
package org.elasticsearch.marvel.agent.exporter.local;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateCollector;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryCollector;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMarvelDoc;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporter;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporters;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -77,20 +79,21 @@ public class LocalExporterTests extends MarvelIntegTestCase {
|
|||
|
||||
deleteMarvelIndices();
|
||||
|
||||
final List<MarvelDoc> marvelDocs = new ArrayList<>();
|
||||
final List<MonitoringDoc> monitoringDocs = new ArrayList<>();
|
||||
for (int i=0; i < randomIntBetween(2, 50); i++) {
|
||||
marvelDocs.add(newRandomMarvelDoc());
|
||||
monitoringDocs.add(newRandomMarvelDoc());
|
||||
}
|
||||
|
||||
logger.debug("--> exporting {} monitoring docs", marvelDocs.size());
|
||||
exporter.export(marvelDocs);
|
||||
awaitMarvelDocsCount(is((long) marvelDocs.size()));
|
||||
logger.debug("--> exporting {} monitoring docs", monitoringDocs.size());
|
||||
exporter.export(monitoringDocs);
|
||||
awaitMarvelDocsCount(is((long) monitoringDocs.size()));
|
||||
|
||||
SearchResponse response = client().prepareSearch(MarvelSettings.MONITORING_INDICES_PREFIX + "*").get();
|
||||
SearchResponse response = client().prepareSearch(MONITORING_INDICES_PREFIX + "*").get();
|
||||
for (SearchHit hit : response.getHits().hits()) {
|
||||
Map<String, Object> source = hit.sourceAsMap();
|
||||
assertNotNull(source.get("cluster_uuid"));
|
||||
assertNotNull(source.get("timestamp"));
|
||||
assertNotNull(source.get("source_node"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,7 +112,6 @@ public class LocalExporterTests extends MarvelIntegTestCase {
|
|||
}
|
||||
|
||||
public void testIndexTimestampFormat() throws Exception {
|
||||
long time = System.currentTimeMillis();
|
||||
String timeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM");
|
||||
|
||||
internalCluster().startNode(Settings.builder()
|
||||
|
@ -120,16 +122,11 @@ public class LocalExporterTests extends MarvelIntegTestCase {
|
|||
|
||||
LocalExporter exporter = getLocalExporter("_local");
|
||||
|
||||
// first lets test that the index resolver works with time
|
||||
String indexName = MarvelSettings.MONITORING_INDICES_PREFIX + MarvelTemplateUtils.TEMPLATE_VERSION + "-" +
|
||||
DateTimeFormat.forPattern(timeFormat).withZoneUTC().print(time);
|
||||
assertThat(exporter.indexNameResolver().resolve(time), equalTo(indexName));
|
||||
|
||||
// now lets test that the index name resolver works with a doc
|
||||
MarvelDoc doc = newRandomMarvelDoc();
|
||||
indexName = MarvelSettings.MONITORING_INDICES_PREFIX + MarvelTemplateUtils.TEMPLATE_VERSION + "-" +
|
||||
DateTimeFormat.forPattern(timeFormat).withZoneUTC().print(doc.getTimestamp());
|
||||
assertThat(exporter.indexNameResolver().resolve(doc), equalTo(indexName));
|
||||
MonitoringDoc doc = newRandomMarvelDoc();
|
||||
String indexName = ".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-"
|
||||
+ DateTimeFormat.forPattern(timeFormat).withZoneUTC().print(doc.getTimestamp());
|
||||
assertThat(exporter.getResolvers().getResolver(doc).index(doc), equalTo(indexName));
|
||||
|
||||
logger.debug("--> exporting a random monitoring document");
|
||||
exporter.export(Collections.singletonList(doc));
|
||||
|
@ -139,9 +136,9 @@ public class LocalExporterTests extends MarvelIntegTestCase {
|
|||
timeFormat = randomFrom("dd", "dd.MM.YYYY", "dd.MM");
|
||||
updateClusterSettings(Settings.builder().put("xpack.monitoring.agent.exporters._local.index.name.time_format", timeFormat));
|
||||
exporter = getLocalExporter("_local"); // we need to get it again.. as it was rebuilt
|
||||
indexName = MarvelSettings.MONITORING_INDICES_PREFIX + MarvelTemplateUtils.TEMPLATE_VERSION + "-" +
|
||||
DateTimeFormat.forPattern(timeFormat).withZoneUTC().print(doc.getTimestamp());
|
||||
assertThat(exporter.indexNameResolver().resolve(doc), equalTo(indexName));
|
||||
indexName = ".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-"
|
||||
+ DateTimeFormat.forPattern(timeFormat).withZoneUTC().print(doc.getTimestamp());
|
||||
assertThat(exporter.getResolvers().getResolver(doc).index(doc), equalTo(indexName));
|
||||
|
||||
logger.debug("--> exporting the document again (this time with the the new index name time format [{}], expecting index name [{}]",
|
||||
timeFormat, indexName);
|
||||
|
@ -164,7 +161,7 @@ public class LocalExporterTests extends MarvelIntegTestCase {
|
|||
assertNull(exporter.getBulk().requestBuilder);
|
||||
|
||||
logger.debug("--> closing monitoring indices");
|
||||
assertAcked(client().admin().indices().prepareClose(MarvelSettings.MONITORING_INDICES_PREFIX + "*").get());
|
||||
assertAcked(client().admin().indices().prepareClose(MONITORING_INDICES_PREFIX + "*").get());
|
||||
|
||||
try {
|
||||
logger.debug("--> exporting a second monitoring doc");
|
||||
|
@ -188,19 +185,19 @@ public class LocalExporterTests extends MarvelIntegTestCase {
|
|||
return (LocalExporter) exporter;
|
||||
}
|
||||
|
||||
private MarvelDoc newRandomMarvelDoc() {
|
||||
private MonitoringDoc newRandomMarvelDoc() {
|
||||
if (randomBoolean()) {
|
||||
IndexRecoveryMarvelDoc doc = new IndexRecoveryMarvelDoc();
|
||||
IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(MonitoringIds.ES.getId(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setType(IndexRecoveryCollector.TYPE);
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setRecoveryResponse(new RecoveryResponse());
|
||||
return doc;
|
||||
} else {
|
||||
ClusterStateMarvelDoc doc = new ClusterStateMarvelDoc();
|
||||
ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(MonitoringIds.ES.getId(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setType(ClusterStateCollector.TYPE);
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setClusterState(ClusterState.PROTO);
|
||||
doc.setStatus(ClusterHealthStatus.GREEN);
|
||||
return doc;
|
||||
|
|
|
@ -1,93 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelDoc;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class RendererTestUtils {
|
||||
|
||||
/**
|
||||
* Verifies that two JSON documents have the same structure/fields once parsed.
|
||||
* This method does not verify that fields values are identical.
|
||||
*/
|
||||
public static void assertJSONStructure(String result, String expected) {
|
||||
assertContent(result, expected, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies that two JSON documents are identical once parsed.
|
||||
* This method verifies that fields values are the same.
|
||||
*/
|
||||
public static void assertJSONStructureAndValues(String result, String expected) {
|
||||
assertContent(result, expected, false);
|
||||
}
|
||||
|
||||
private static void assertContent(String result, String expected, boolean verifyValues) {
|
||||
assertNotNull(result);
|
||||
assertNotNull(expected);
|
||||
|
||||
try (
|
||||
XContentParser resultParser = XContentFactory.xContent(result).createParser(result);
|
||||
XContentParser expectedParser = XContentFactory.xContent(expected).createParser(expected);
|
||||
) {
|
||||
while (true) {
|
||||
XContentParser.Token token1 = resultParser.nextToken();
|
||||
XContentParser.Token token2 = expectedParser.nextToken();
|
||||
if (token1 == null) {
|
||||
assertThat(token2, nullValue());
|
||||
return;
|
||||
}
|
||||
assertThat(token1, Matchers.equalTo(token2));
|
||||
switch (token1) {
|
||||
case FIELD_NAME:
|
||||
assertThat("field name for property '" + resultParser.currentName() + "' must be identical",
|
||||
resultParser.currentName(), Matchers.equalTo(expectedParser.currentName()));
|
||||
break;
|
||||
case VALUE_STRING:
|
||||
if (verifyValues) {
|
||||
assertThat("string value for property '" + resultParser.currentName() + "' must be identical",
|
||||
resultParser.text(), Matchers.equalTo(expectedParser.text()));
|
||||
} else {
|
||||
assertThat("string value for property '" + resultParser.currentName() + "' must be empty or non empty",
|
||||
Strings.hasLength(resultParser.text()), Matchers.equalTo(Strings.hasLength(expectedParser.text())));
|
||||
}
|
||||
break;
|
||||
case VALUE_NUMBER:
|
||||
if (verifyValues) {
|
||||
assertThat("numeric value for property '" + resultParser.currentName() + "' must be identical",
|
||||
resultParser.numberValue(), Matchers.equalTo(expectedParser.numberValue()));
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
fail("Fail to verify the result of the renderer: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public static String renderAsJSON(MarvelDoc marvelDoc, Renderer renderer) throws IOException {
|
||||
assertNotNull(marvelDoc);
|
||||
assertNotNull(renderer);
|
||||
|
||||
try (BytesStreamOutput os = new BytesStreamOutput()) {
|
||||
renderer.render(marvelDoc, XContentType.JSON, os);
|
||||
return os.bytes().toUtf8();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererTestUtils;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
|
||||
public class ClusterStateRendererTests extends ESSingleNodeTestCase {
|
||||
private static final String SAMPLE_FILE = "/samples/cluster_state.json";
|
||||
|
||||
public void testClusterStateRenderer() throws Exception {
|
||||
createIndex("my-index", Settings.settingsBuilder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.build());
|
||||
|
||||
logger.debug("--> retrieving cluster state");
|
||||
ClusterState clusterState = getInstanceFromNode(ClusterService.class).state();
|
||||
|
||||
logger.debug("--> retrieving cluster health");
|
||||
ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().get();
|
||||
|
||||
logger.debug("--> creating the cluster state monitoring document");
|
||||
ClusterStateMarvelDoc marvelDoc = new ClusterStateMarvelDoc();
|
||||
marvelDoc.setClusterUUID("test");
|
||||
marvelDoc.setType("cluster_state");
|
||||
marvelDoc.setTimestamp(1437580442979L);
|
||||
marvelDoc.setClusterState(clusterState);
|
||||
marvelDoc.setStatus(clusterHealth.getStatus());
|
||||
|
||||
logger.debug("--> rendering the document");
|
||||
Renderer renderer = new ClusterStateRenderer();
|
||||
String result = RendererTestUtils.renderAsJSON(marvelDoc, renderer);
|
||||
|
||||
logger.debug("--> loading sample document from file {}", SAMPLE_FILE);
|
||||
String expected = StreamsUtils.copyToStringFromClasspath(SAMPLE_FILE);
|
||||
|
||||
String nodeId = clusterState.getNodes().getLocalNodeId();
|
||||
logger.debug("--> replace the local node id in sample document with {}", nodeId);
|
||||
expected = Strings.replace(expected, "__node_id__", nodeId);
|
||||
|
||||
logger.debug("--> comparing both documents, they must have the same structure");
|
||||
RendererTestUtils.assertJSONStructure(result, expected);
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererTestUtils;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
|
||||
public class ClusterStatsRendererTests extends ESSingleNodeTestCase {
|
||||
private static final String SAMPLE_FILE = "/samples/cluster_stats.json";
|
||||
|
||||
public void testClusterStatsRenderer() throws Exception {
|
||||
createIndex("index-0");
|
||||
|
||||
logger.debug("--> retrieving cluster stats response");
|
||||
ClusterStatsResponse clusterStats = client().admin().cluster().prepareClusterStats().get();
|
||||
|
||||
logger.debug("--> creating the cluster stats monitoring document");
|
||||
ClusterStatsMarvelDoc marvelDoc = new ClusterStatsMarvelDoc();
|
||||
marvelDoc.setClusterUUID("test");
|
||||
marvelDoc.setType("cluster_stats");
|
||||
marvelDoc.setTimestamp(1437580442979L);
|
||||
marvelDoc.setClusterStats(clusterStats);
|
||||
|
||||
logger.debug("--> rendering the document");
|
||||
Renderer renderer = new ClusterStatsRenderer();
|
||||
String result = RendererTestUtils.renderAsJSON(marvelDoc, renderer);
|
||||
|
||||
logger.debug("--> loading sample document from file {}", SAMPLE_FILE);
|
||||
String expected = StreamsUtils.copyToStringFromClasspath(SAMPLE_FILE);
|
||||
|
||||
logger.debug("--> comparing both documents, they must have the same structure");
|
||||
RendererTestUtils.assertJSONStructure(result, expected);
|
||||
}
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.indices;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererTestUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class IndexRecoveryRendererTests extends ESTestCase {
|
||||
private static final String SAMPLE_FILE = "/samples/index_recovery.json";
|
||||
|
||||
public void testIndexRecoveryRenderer() throws Exception {
|
||||
logger.debug("--> creating the index recovery monitoring document");
|
||||
String indexName = "index-0";
|
||||
|
||||
DiscoveryNode source = new DiscoveryNode("node-src", DummyTransportAddress.INSTANCE, Version.CURRENT);
|
||||
DiscoveryNode target = new DiscoveryNode("node-tgt", DummyTransportAddress.INSTANCE, Version.CURRENT);
|
||||
|
||||
List<RecoveryState> shards = new ArrayList<>();
|
||||
|
||||
// Shard 0
|
||||
RecoveryState shard0 = new RecoveryState(new ShardId(indexName, "testUUID", 0), true, RecoveryState.Type.PRIMARY_RELOCATION,
|
||||
source, target);
|
||||
shards.add(shard0);
|
||||
|
||||
// Shard 1
|
||||
RecoveryState shard1 = new RecoveryState(new ShardId(indexName, "testUUID", 1), true, RecoveryState.Type.STORE, source, target);
|
||||
shards.add(shard1);
|
||||
|
||||
Map<String, List<RecoveryState>> shardResponses = new HashMap<>(1);
|
||||
shardResponses.put(indexName, shards);
|
||||
|
||||
RecoveryResponse recoveryResponse = new RecoveryResponse(2, 2, 2, false, shardResponses, null);
|
||||
|
||||
IndexRecoveryMarvelDoc marvelDoc = new IndexRecoveryMarvelDoc();
|
||||
marvelDoc.setClusterUUID("test");
|
||||
marvelDoc.setType("index_recovery");
|
||||
marvelDoc.setTimestamp(1437580442979L);
|
||||
marvelDoc.setRecoveryResponse(recoveryResponse);
|
||||
|
||||
logger.debug("--> rendering the document");
|
||||
Renderer renderer = new IndexRecoveryRenderer();
|
||||
String result = RendererTestUtils.renderAsJSON(marvelDoc, renderer);
|
||||
|
||||
logger.debug("--> loading sample document from file {}", SAMPLE_FILE);
|
||||
String expected = StreamsUtils.copyToStringFromClasspath(SAMPLE_FILE);
|
||||
|
||||
logger.debug("--> comparing both documents, they must be identical");
|
||||
RendererTestUtils.assertJSONStructureAndValues(result, expected);
|
||||
}
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||
import org.elasticsearch.index.engine.SegmentsStats;
|
||||
import org.elasticsearch.index.fielddata.FieldDataStats;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.shard.DocsStats;
|
||||
import org.elasticsearch.index.shard.IndexingStats;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererTestUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
|
||||
public class IndexStatsRendererTests extends ESTestCase {
|
||||
private static final String SAMPLE_FILE = "/samples/index_stats.json";
|
||||
|
||||
public void testIndexStatsRenderer() throws Exception {
|
||||
logger.debug("--> creating the index stats monitoring document");
|
||||
IndexStatsMarvelDoc marvelDoc = new IndexStatsMarvelDoc();
|
||||
marvelDoc.setClusterUUID("test");
|
||||
marvelDoc.setType("index_stats");
|
||||
marvelDoc.setTimestamp(1437580442979L);
|
||||
marvelDoc.setIndexStats(
|
||||
new IndexStats("index-0", new ShardStats[0]) {
|
||||
@Override
|
||||
public CommonStats getTotal() {
|
||||
CommonStats stats = new CommonStats();
|
||||
stats.docs = new DocsStats(345678L, 123L);
|
||||
stats.store = new StoreStats(5761573L, 0L);
|
||||
stats.indexing = new IndexingStats(new IndexingStats.Stats(3L, 71L, 0L, 0L, 0L, 0L, 0L, 0L, true, 302L), null);
|
||||
stats.search = new SearchStats(new SearchStats.Stats(1L, 7L, 0L, 0L, 0L, 0L, 0L, 0L, 0L), 0L, null);
|
||||
stats.merge = new MergeStats();
|
||||
stats.merge.add(0L, 0L, 0L, 42L, 0L, 0L, 0L, 0L, 0L, 0L);
|
||||
stats.refresh = new RefreshStats(0L, 978L);
|
||||
stats.fieldData = new FieldDataStats(123456L, 0L, null);
|
||||
stats.segments = new SegmentsStats();
|
||||
stats.segments.add(0, 87965412L);
|
||||
return stats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CommonStats getPrimaries() {
|
||||
// Primaries will be filtered out by the renderer
|
||||
CommonStats stats = new CommonStats();
|
||||
stats.docs = new DocsStats(345678L, randomLong());
|
||||
stats.store = new StoreStats(randomLong(), randomLong());
|
||||
stats.indexing = new IndexingStats(new IndexingStats.Stats(0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, true,
|
||||
randomLong()), null);
|
||||
stats.search = new SearchStats(new SearchStats.Stats(1L, 7L, 0L, 0L, 0L, 0L, 0L, 0L, 0L), 0L, null);
|
||||
stats.merge = new MergeStats();
|
||||
stats.merge.add(0L, 0L, 0L, 42L, 0L, 0L, 0L, 0L, 0L, 0L);
|
||||
stats.refresh = new RefreshStats(0L, 978L);
|
||||
stats.fieldData = new FieldDataStats(123456L, 0L, null);
|
||||
stats.segments = new SegmentsStats();
|
||||
stats.segments.add(0, 87965412L);
|
||||
return stats;
|
||||
}
|
||||
});
|
||||
|
||||
logger.debug("--> rendering the document");
|
||||
Renderer renderer = new IndexStatsRenderer();
|
||||
String result = RendererTestUtils.renderAsJSON(marvelDoc, renderer);
|
||||
|
||||
logger.debug("--> loading sample document from file {}", SAMPLE_FILE);
|
||||
String expected = StreamsUtils.copyToStringFromClasspath(SAMPLE_FILE);
|
||||
|
||||
logger.debug("--> comparing both documents, they must be identical");
|
||||
RendererTestUtils.assertJSONStructureAndValues(result, expected);
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererTestUtils;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
|
||||
public class IndicesStatsRendererTests extends ESSingleNodeTestCase {
|
||||
private static final String SAMPLE_FILE = "/samples/indices_stats.json";
|
||||
|
||||
public void testIndexStatsRenderer() throws Exception {
|
||||
createIndex("index-0");
|
||||
|
||||
logger.debug("--> retrieving indices stats response");
|
||||
IndicesStatsResponse indicesStats = client().admin().indices().prepareStats().get();
|
||||
|
||||
logger.debug("--> creating the indices stats monitoring document");
|
||||
IndicesStatsMarvelDoc marvelDoc = new IndicesStatsMarvelDoc();
|
||||
marvelDoc.setClusterUUID("test");
|
||||
marvelDoc.setType("indices_stats");
|
||||
marvelDoc.setTimestamp(1437580442979L);
|
||||
marvelDoc.setIndicesStats(indicesStats);
|
||||
|
||||
logger.debug("--> rendering the document");
|
||||
Renderer renderer = new IndicesStatsRenderer();
|
||||
String result = RendererTestUtils.renderAsJSON(marvelDoc, renderer);
|
||||
|
||||
logger.debug("--> loading sample document from file {}", SAMPLE_FILE);
|
||||
String expected = StreamsUtils.copyToStringFromClasspath(SAMPLE_FILE);
|
||||
|
||||
logger.debug("--> comparing both documents, they must have the same structure");
|
||||
RendererTestUtils.assertJSONStructure(result, expected);
|
||||
}
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.node;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsMarvelDoc;
|
||||
import org.elasticsearch.node.service.NodeService;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class NodeStatsRendererTests extends ESSingleNodeTestCase {
|
||||
|
||||
public void testNodeStatsRenderer() throws Exception {
|
||||
createIndex("index-0");
|
||||
|
||||
logger.debug("--> retrieving node stats");
|
||||
NodeStats nodeStats = getInstanceFromNode(NodeService.class).stats();
|
||||
|
||||
logger.debug("--> creating the node stats monitoring document");
|
||||
NodeStatsMarvelDoc marvelDoc = new NodeStatsMarvelDoc();
|
||||
marvelDoc.setClusterUUID("test");
|
||||
marvelDoc.setType("node_stats");
|
||||
marvelDoc.setTimestamp(1437580442979L);
|
||||
marvelDoc.setNodeId("node-0");
|
||||
marvelDoc.setNodeMaster(true);
|
||||
marvelDoc.setNodeStats(nodeStats);
|
||||
marvelDoc.setMlockall(false);
|
||||
marvelDoc.setDiskThresholdWaterMarkHigh(90.0);
|
||||
marvelDoc.setDiskThresholdDeciderEnabled(true);
|
||||
|
||||
logger.debug("--> rendering the document");
|
||||
try (BytesStreamOutput os = new BytesStreamOutput()) {
|
||||
new NodeStatsRenderer().render(marvelDoc, XContentType.JSON, os);
|
||||
Map<String, Object> result = XContentHelper.convertToMap(os.bytes(), false).v2();
|
||||
|
||||
for (String field : NodeStatsRenderer.FILTERS) {
|
||||
if (Constants.WINDOWS) {
|
||||
// load average is unavailable on Windows
|
||||
if ("node_stats.os.cpu.load_average.1m".equals(field)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
assertNotNull("expecting field to be present:" + field, XContentMapValues.extractValue(field, result));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.shards;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.agent.collector.shards.ShardMarvelDoc;
|
||||
import org.elasticsearch.marvel.agent.renderer.Renderer;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererTestUtils;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ShardsRendererTests extends ESSingleNodeTestCase {
|
||||
private static final String SAMPLE_FILE = "/samples/shards.json";
|
||||
|
||||
public void testShardsRenderer() throws Exception {
|
||||
createIndex("my-index", Settings.settingsBuilder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.build());
|
||||
|
||||
logger.debug("--> retrieving cluster state");
|
||||
ClusterState clusterState = getInstanceFromNode(ClusterService.class).state();
|
||||
|
||||
logger.debug("--> creating the shard monitoring document");
|
||||
ShardMarvelDoc marvelDoc = new ShardMarvelDoc(null, "shards", "my-id");
|
||||
marvelDoc.setClusterUUID("test");
|
||||
marvelDoc.setClusterUUID(clusterState.metaData().clusterUUID());
|
||||
marvelDoc.setTimestamp(1437580442979L);
|
||||
marvelDoc.setShardRouting(clusterState.routingTable().allShards().iterator().next());
|
||||
marvelDoc.setClusterStateUUID(clusterState.stateUUID());
|
||||
|
||||
logger.debug("--> rendering the document");
|
||||
Renderer renderer = new ShardsRenderer();
|
||||
String result = RendererTestUtils.renderAsJSON(marvelDoc, renderer);
|
||||
|
||||
logger.debug("--> loading sample document from file {}", SAMPLE_FILE);
|
||||
String expected = StreamsUtils.copyToStringFromClasspath(SAMPLE_FILE);
|
||||
|
||||
logger.debug("--> comparing both documents, they must have the same structure");
|
||||
RendererTestUtils.assertJSONStructure(result, expected);
|
||||
}
|
||||
|
||||
public void testNoShard() throws IOException {
|
||||
ShardMarvelDoc marvelDoc = new ShardMarvelDoc(null, "shards", "my-id");
|
||||
marvelDoc.setClusterUUID("cluster-uuid");
|
||||
marvelDoc.setTimestamp(1437580442979L);
|
||||
marvelDoc.setShardRouting(null);
|
||||
marvelDoc.setClusterStateUUID("my-state-uuid");
|
||||
|
||||
String result = RendererTestUtils.renderAsJSON(marvelDoc, new ShardsRenderer());
|
||||
RendererTestUtils.assertJSONStructureAndValues(result, "{\"cluster_uuid\":\"my-cluster-uuid\",\"timestamp\":\"2015-07-22T15:54:02" +
|
||||
".979Z\",\"state_uuid\":\"my-state-uuid\"}");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class DataResolverTests extends MonitoringIndexNameResolverTestCase {
|
||||
|
||||
private int randomVersion = randomIntBetween(0, 100);
|
||||
|
||||
@Override
|
||||
protected MonitoringIndexNameResolver<MonitoringDoc> newResolver() {
|
||||
return newDataResolver(randomVersion);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MonitoringDoc newMarvelDoc() {
|
||||
MonitoringDoc doc = new MonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedType() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedId() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkFilters() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testDataResolver() {
|
||||
assertThat(newDataResolver(randomVersion).index(newMarvelDoc()), equalTo(".monitoring-data-" + randomVersion));
|
||||
assertThat(newDataResolver(42).index(newMarvelDoc()), equalTo(".monitoring-data-42"));
|
||||
}
|
||||
|
||||
private MonitoringIndexNameResolver.Data<MonitoringDoc> newDataResolver(int randomVersion) {
|
||||
return new MonitoringIndexNameResolver.Data<MonitoringDoc>(randomVersion) {
|
||||
@Override
|
||||
public String type(MonitoringDoc document) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id(MonitoringDoc document) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(MonitoringDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,196 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver.DELIMITER;
|
||||
import static org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver.Fields.CLUSTER_UUID;
|
||||
import static org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver.Fields.SOURCE_NODE;
|
||||
import static org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver.Fields.TIMESTAMP;
|
||||
import static org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver.PREFIX;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.emptyArray;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.isEmptyOrNullString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public abstract class MonitoringIndexNameResolverTestCase<M extends MonitoringDoc, R extends MonitoringIndexNameResolver<M>>
|
||||
extends ESTestCase {
|
||||
|
||||
/**
|
||||
* @return the {@link MonitoringIndexNameResolver} to test
|
||||
*/
|
||||
protected abstract R newResolver();
|
||||
|
||||
/**
|
||||
* @return a new randomly created {@link MonitoringDoc} instance to use in tests
|
||||
*/
|
||||
protected abstract M newMarvelDoc();
|
||||
|
||||
public void testMarvelDoc() {
|
||||
MonitoringDoc doc = newMarvelDoc();
|
||||
assertThat(doc.getMonitoringId(), equalTo(MonitoringIds.fromId(doc.getMonitoringId()).getId()));
|
||||
assertThat(doc.getMonitoringVersion(), not(isEmptyOrNullString()));
|
||||
assertThat(doc.getClusterUUID(), not(isEmptyOrNullString()));
|
||||
assertThat(doc.getTimestamp(), greaterThan(0L));
|
||||
}
|
||||
|
||||
protected boolean checkResolvedIndex() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testIndex() {
|
||||
if (checkResolvedIndex()) {
|
||||
assertThat(newResolver().index(newMarvelDoc()), not(isEmptyOrNullString()));
|
||||
} else {
|
||||
assertThat(newResolver().index(newMarvelDoc()), is(nullValue()));
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean checkResolvedType() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testType() {
|
||||
if (checkResolvedType()) {
|
||||
assertThat(newResolver().type(newMarvelDoc()), not(isEmptyOrNullString()));
|
||||
} else {
|
||||
assertThat(newResolver().type(newMarvelDoc()), is(nullValue()));
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean checkResolvedId() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testId() {
|
||||
if (checkResolvedId()) {
|
||||
assertThat(newResolver().id(newMarvelDoc()), not(isEmptyOrNullString()));
|
||||
} else {
|
||||
assertThat(newResolver().id(newMarvelDoc()), is(nullValue()));
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean checkFilters() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testFilters() {
|
||||
if (checkFilters()) {
|
||||
assertThat(newResolver().filters(), allOf(not(emptyArray()), notNullValue()));
|
||||
} else {
|
||||
assertThat(newResolver().filters(), anyOf(emptyArray(), nullValue()));
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testSource() throws IOException {
|
||||
MonitoringIndexNameResolver resolver = newResolver();
|
||||
BytesReference source = resolver.source(newMarvelDoc(), randomFrom(XContentType.values()));
|
||||
|
||||
assertNotNull(source);
|
||||
assertThat(source.length(), greaterThan(0));
|
||||
assertSource(source, resolver.filters());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testResolver() {
|
||||
MonitoringIndexNameResolver resolver = newResolver();
|
||||
assertThat(resolver, notNullValue());
|
||||
assertThat(resolver.getVersion(), greaterThanOrEqualTo(0));
|
||||
|
||||
if (resolver instanceof MonitoringIndexNameResolver.Timestamped) {
|
||||
MonitoringIndexNameResolver.Timestamped timestamped = (MonitoringIndexNameResolver.Timestamped) resolver;
|
||||
assertThat(resolver.index(newMarvelDoc()),
|
||||
startsWith(PREFIX + DELIMITER + timestamped.getId() + DELIMITER + timestamped.getVersion() + DELIMITER));
|
||||
}
|
||||
|
||||
if (resolver instanceof MonitoringIndexNameResolver.Data) {
|
||||
MonitoringIndexNameResolver.Data data = (MonitoringIndexNameResolver.Data) resolver;
|
||||
assertThat(resolver.index(newMarvelDoc()),
|
||||
equalTo(PREFIX + DELIMITER + MonitoringIndexNameResolver.Data.DATA + DELIMITER + String.valueOf(data.getVersion())));
|
||||
}
|
||||
}
|
||||
|
||||
protected void assertSource(BytesReference source, String... fields) {
|
||||
Map<String, Object> sourceFields = XContentHelper.convertToMap(source, false).v2();
|
||||
assertNotNull(sourceFields);
|
||||
|
||||
String[] commons = new String[]{
|
||||
CLUSTER_UUID.underscore().getValue(),
|
||||
TIMESTAMP.underscore().getValue(),
|
||||
SOURCE_NODE.underscore().getValue(),
|
||||
};
|
||||
assertThat("source must contains default fields", sourceFields.keySet(), hasItems(commons));
|
||||
|
||||
if (fields != null) {
|
||||
for (String field : fields) {
|
||||
assertNotNull("source must contain field [" + field + "] with a non-null value",
|
||||
XContentMapValues.extractValue(field, sourceFields));
|
||||
}
|
||||
|
||||
// Checks that no extra fields are present
|
||||
Set<String> extra = new HashSet<>();
|
||||
for (String key : flattenMapKeys(new HashSet<>(), null, sourceFields)) {
|
||||
boolean found = false;
|
||||
for (String field : fields) {
|
||||
if (key.equalsIgnoreCase(field) || key.startsWith(field + ".")) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
extra.add(key);
|
||||
}
|
||||
}
|
||||
assertThat("found extra field(s) " + extra, extra.size(), equalTo(0));
|
||||
}
|
||||
}
|
||||
|
||||
protected static String randomMonitoringId() {
|
||||
return randomFrom(MonitoringIds.values()).getId();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Set<String> flattenMapKeys(final Set<String> keys, String path, Map<String, Object> map) {
|
||||
if (map == null || map.isEmpty()) {
|
||||
return keys;
|
||||
}
|
||||
|
||||
for (Map.Entry<String, Object> entry : map.entrySet()) {
|
||||
String p = path != null ? path + "." + entry.getKey() : entry.getKey();
|
||||
if (entry.getValue() instanceof Map) {
|
||||
flattenMapKeys(keys, p, (Map)entry.getValue());
|
||||
} else {
|
||||
keys.add(p);
|
||||
}
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,96 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver.DELIMITER;
|
||||
import static org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver.PREFIX;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class TimestampedResolverTests extends MonitoringIndexNameResolverTestCase {
|
||||
|
||||
private String randomId = randomFrom(MonitoringIds.values()).getId();
|
||||
private int randomVersion = randomIntBetween(0, 100);
|
||||
|
||||
@Override
|
||||
protected MonitoringIndexNameResolver<MonitoringDoc> newResolver() {
|
||||
return newTimestampedResolver(randomId, randomVersion, Settings.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MonitoringDoc newMarvelDoc() {
|
||||
MonitoringDoc doc = new MonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode(randomAsciiOfLength(5), DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedType() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedId() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkFilters() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testTimestampedResolver() {
|
||||
final MonitoringDoc doc = newMarvelDoc();
|
||||
doc.setTimestamp(1437580442979L); // "2015-07-22T15:54:02.979Z"
|
||||
|
||||
for (String format : Arrays.asList("YYYY", "YYYY.MM", "YYYY.MM.dd", "YYYY.MM.dd-HH", "YYYY.MM.dd-HH.mm", "YYYY.MM.dd-HH.mm.SS")) {
|
||||
Settings settings = Settings.EMPTY;
|
||||
if (format != null) {
|
||||
settings = settingsBuilder()
|
||||
.put(MonitoringIndexNameResolver.Timestamped.INDEX_NAME_TIME_FORMAT_SETTING.getKey(), format)
|
||||
.build();
|
||||
}
|
||||
|
||||
MonitoringIndexNameResolver.Timestamped resolver = newTimestampedResolver(randomId, randomVersion, settings);
|
||||
assertThat(resolver, notNullValue());
|
||||
assertThat(resolver.getId(), equalTo(randomId));
|
||||
assertThat(resolver.getVersion(), equalTo(randomVersion));
|
||||
assertThat(resolver.index(doc),
|
||||
equalTo(PREFIX + DELIMITER + resolver.getId() + DELIMITER + String.valueOf(resolver.getVersion())
|
||||
+ DELIMITER + DateTimeFormat.forPattern(format).withZoneUTC().print(1437580442979L)));
|
||||
}
|
||||
}
|
||||
|
||||
private MonitoringIndexNameResolver.Timestamped<MonitoringDoc> newTimestampedResolver(String id, int version, Settings settings) {
|
||||
return new MonitoringIndexNameResolver.Timestamped<MonitoringDoc>(id, version, settings) {
|
||||
@Override
|
||||
public String type(MonitoringDoc document) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void buildXContent(MonitoringDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
return;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsNodeResponse;
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterInfoMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ClusterInfoResolverTests extends MonitoringIndexNameResolverTestCase<ClusterInfoMonitoringDoc, ClusterInfoResolver> {
|
||||
|
||||
@Override
|
||||
protected ClusterInfoResolver newResolver() {
|
||||
return new ClusterInfoResolver();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterInfoMonitoringDoc newMarvelDoc() {
|
||||
try {
|
||||
License.Builder licenseBuilder = License.builder()
|
||||
.uid(UUID.randomUUID().toString())
|
||||
.type("trial")
|
||||
.issuer("elasticsearch")
|
||||
.issuedTo("customer")
|
||||
.maxNodes(5)
|
||||
.issueDate(1437580442979L)
|
||||
.expiryDate(1437580442979L + TimeValue.timeValueHours(2).getMillis());
|
||||
|
||||
ClusterInfoMonitoringDoc doc = new ClusterInfoMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setVersion(randomFrom(Version.V_2_0_0, Version.CURRENT).toString());
|
||||
doc.setLicense(licenseBuilder.build());
|
||||
doc.setClusterName(randomAsciiOfLength(5));
|
||||
doc.setClusterStats(new ClusterStatsResponse(Math.abs(randomLong()), ClusterName.DEFAULT,
|
||||
randomAsciiOfLength(5), new ClusterStatsNodeResponse[]{}));
|
||||
return doc;
|
||||
} catch (Exception e) {
|
||||
throw new IllegalStateException("Failed to generated random ClusterInfoMarvelDoc", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkFilters() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testClusterInfoResolver() throws Exception {
|
||||
String clusterUUID = UUID.randomUUID().toString();
|
||||
|
||||
ClusterInfoMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setClusterUUID(clusterUUID);
|
||||
|
||||
ClusterInfoResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-data-" + MarvelTemplateUtils.TEMPLATE_VERSION));
|
||||
assertThat(resolver.type(doc), equalTo(ClusterInfoResolver.TYPE));
|
||||
assertThat(resolver.id(doc), equalTo(clusterUUID));
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"cluster_name",
|
||||
"version",
|
||||
"license",
|
||||
"cluster_stats");
|
||||
}
|
||||
}
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
|
@ -11,9 +11,10 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.junit.After;
|
||||
|
@ -31,10 +32,11 @@ import static org.hamcrest.Matchers.greaterThan;
|
|||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.isEmptyOrNullString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
@ClusterScope(scope = TEST)
|
||||
public class ClusterInfoTests extends MarvelIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
|
@ -62,35 +64,38 @@ public class ClusterInfoTests extends MarvelIntegTestCase {
|
|||
assertTrue(Strings.hasText(clusterUUID));
|
||||
|
||||
logger.debug("--> waiting for the monitoring data index to be created (it should have been created by the ClusterInfoCollector)");
|
||||
String dataIndex = ".monitoring-es-data-" + MarvelTemplateUtils.TEMPLATE_VERSION;
|
||||
String dataIndex = ".monitoring-data-" + MarvelTemplateUtils.TEMPLATE_VERSION;
|
||||
awaitIndexExists(dataIndex);
|
||||
|
||||
logger.debug("--> waiting for cluster info collector to collect data");
|
||||
awaitMarvelDocsCount(equalTo(1L), ClusterStatsCollector.CLUSTER_INFO_TYPE);
|
||||
awaitMarvelDocsCount(equalTo(1L), ClusterInfoResolver.TYPE);
|
||||
|
||||
logger.debug("--> retrieving cluster info document");
|
||||
GetResponse response = client().prepareGet(dataIndex, ClusterStatsCollector.CLUSTER_INFO_TYPE, clusterUUID).get();
|
||||
GetResponse response = client().prepareGet(dataIndex, ClusterInfoResolver.TYPE, clusterUUID).get();
|
||||
assertTrue("cluster_info document does not exist in data index", response.isExists());
|
||||
|
||||
logger.debug("--> checking that the document contains license information");
|
||||
assertThat(response.getIndex(), equalTo(dataIndex));
|
||||
assertThat(response.getType(), equalTo(ClusterStatsCollector.CLUSTER_INFO_TYPE));
|
||||
assertThat(response.getType(), equalTo(ClusterInfoResolver.TYPE));
|
||||
assertThat(response.getId(), equalTo(clusterUUID));
|
||||
|
||||
Map<String, Object> source = response.getSource();
|
||||
assertThat(source.get(ClusterInfoRenderer.Fields.CLUSTER_NAME.underscore().toString()), is(cluster().getClusterName()));
|
||||
assertThat(source.get(ClusterInfoRenderer.Fields.VERSION.underscore().toString()), is(Version.CURRENT.toString()));
|
||||
assertThat(source.get(MonitoringIndexNameResolver.Fields.CLUSTER_UUID.underscore().toString()), notNullValue());
|
||||
assertThat(source.get(MonitoringIndexNameResolver.Fields.TIMESTAMP.underscore().toString()), notNullValue());
|
||||
assertThat(source.get(MonitoringIndexNameResolver.Fields.SOURCE_NODE.underscore().toString()), notNullValue());
|
||||
assertThat(source.get(ClusterInfoResolver.Fields.CLUSTER_NAME.underscore().toString()), equalTo(cluster().getClusterName()));
|
||||
assertThat(source.get(ClusterInfoResolver.Fields.VERSION.underscore().toString()), equalTo(Version.CURRENT.toString()));
|
||||
|
||||
Object licenseObj = source.get(ClusterInfoRenderer.Fields.LICENSE.underscore().toString());
|
||||
logger.debug("--> checking that the document contains license information");
|
||||
Object licenseObj = source.get(ClusterInfoResolver.Fields.LICENSE.underscore().toString());
|
||||
assertThat(licenseObj, instanceOf(Map.class));
|
||||
Map license = (Map) licenseObj;
|
||||
|
||||
assertThat(license, instanceOf(Map.class));
|
||||
|
||||
String uid = (String) license.get(ClusterInfoRenderer.Fields.UID.underscore().toString());
|
||||
String uid = (String) license.get(ClusterInfoResolver.Fields.UID.underscore().toString());
|
||||
assertThat(uid, not(isEmptyOrNullString()));
|
||||
|
||||
String type = (String) license.get(ClusterInfoRenderer.Fields.TYPE.underscore().toString());
|
||||
String type = (String) license.get(ClusterInfoResolver.Fields.TYPE.underscore().toString());
|
||||
assertThat(type, not(isEmptyOrNullString()));
|
||||
|
||||
String status = (String) license.get(License.XFields.STATUS.underscore().toString());
|
||||
|
@ -100,8 +105,8 @@ public class ClusterInfoTests extends MarvelIntegTestCase {
|
|||
assertThat(expiryDate, greaterThan(0L));
|
||||
|
||||
// We basically recompute the hash here
|
||||
String hkey = (String) license.get(ClusterInfoRenderer.Fields.HKEY.underscore().toString());
|
||||
String recalculated = ClusterInfoRenderer.hash(status, uid, type, String.valueOf(expiryDate), clusterUUID);
|
||||
String hkey = (String) license.get(ClusterInfoResolver.Fields.HKEY.underscore().toString());
|
||||
String recalculated = ClusterInfoResolver.hash(status, uid, type, String.valueOf(expiryDate), clusterUUID);
|
||||
assertThat(hkey, equalTo(recalculated));
|
||||
|
||||
assertThat((String) license.get(License.XFields.ISSUER.underscore().toString()), not(isEmptyOrNullString()));
|
||||
|
@ -109,7 +114,7 @@ public class ClusterInfoTests extends MarvelIntegTestCase {
|
|||
assertThat((Long) license.get(License.XFields.ISSUE_DATE_IN_MILLIS.underscore().toString()), greaterThan(0L));
|
||||
assertThat((Integer) license.get(License.XFields.MAX_NODES.underscore().toString()), greaterThan(0));
|
||||
|
||||
Object clusterStats = source.get(ClusterStatsRenderer.Fields.CLUSTER_STATS.underscore().toString());
|
||||
Object clusterStats = source.get(ClusterInfoResolver.Fields.CLUSTER_STATS.underscore().toString());
|
||||
assertNotNull(clusterStats);
|
||||
assertThat(clusterStats, instanceOf(Map.class));
|
||||
assertThat(((Map) clusterStats).size(), greaterThan(0));
|
||||
|
@ -123,13 +128,13 @@ public class ClusterInfoTests extends MarvelIntegTestCase {
|
|||
|
||||
assertHitCount(client().prepareSearch().setSize(0)
|
||||
.setIndices(dataIndex)
|
||||
.setTypes(ClusterStatsCollector.CLUSTER_INFO_TYPE)
|
||||
.setTypes(ClusterInfoResolver.TYPE)
|
||||
.setQuery(
|
||||
QueryBuilders.boolQuery()
|
||||
.should(QueryBuilders.matchQuery(License.XFields.STATUS.underscore().toString(), License.Status.ACTIVE.label()))
|
||||
.should(QueryBuilders.matchQuery(License.XFields.STATUS.underscore().toString(), License.Status.INVALID.label()))
|
||||
.should(QueryBuilders.matchQuery(License.XFields.STATUS.underscore().toString(), License.Status.EXPIRED.label()))
|
||||
.should(QueryBuilders.matchQuery(ClusterInfoRenderer.Fields.CLUSTER_NAME.underscore().toString(),
|
||||
.should(QueryBuilders.matchQuery(ClusterInfoResolver.Fields.CLUSTER_NAME.underscore().toString(),
|
||||
cluster().getClusterName()))
|
||||
.minimumNumberShouldMatch(1)
|
||||
).get(), 0L);
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateNodeMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class ClusterStateNodeResolverTests extends
|
||||
MonitoringIndexNameResolverTestCase<ClusterStateNodeMonitoringDoc, ClusterStateNodeResolver> {
|
||||
|
||||
@Override
|
||||
protected ClusterStateNodeResolver newResolver() {
|
||||
return new ClusterStateNodeResolver(Settings.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterStateNodeMonitoringDoc newMarvelDoc() {
|
||||
ClusterStateNodeMonitoringDoc doc = new ClusterStateNodeMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setNodeId(UUID.randomUUID().toString());
|
||||
doc.setStateUUID(UUID.randomUUID().toString());
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkFilters() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedId() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testClusterStateNodeResolver() throws Exception {
|
||||
final String nodeId = UUID.randomUUID().toString();
|
||||
final String stateUUID = UUID.randomUUID().toString();
|
||||
|
||||
ClusterStateNodeMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setNodeId(nodeId);
|
||||
doc.setStateUUID(stateUUID);
|
||||
doc.setTimestamp(1437580442979L);
|
||||
|
||||
ClusterStateNodeResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
assertThat(resolver.type(doc), equalTo(ClusterStateNodeResolver.TYPE));
|
||||
assertThat(resolver.id(doc), nullValue());
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"state_uuid",
|
||||
"node.id");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class ClusterStateResolverTests extends MonitoringIndexNameResolverTestCase<ClusterStateMonitoringDoc, ClusterStateResolver> {
|
||||
|
||||
@Override
|
||||
protected ClusterStateResolver newResolver() {
|
||||
return new ClusterStateResolver(Settings.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterStateMonitoringDoc newMarvelDoc() {
|
||||
ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setStatus(randomFrom(ClusterHealthStatus.values()));
|
||||
|
||||
DiscoveryNode masterNode = new DiscoveryNode("master", new LocalTransportAddress("master"), Version.CURRENT);
|
||||
DiscoveryNode otherNode = new DiscoveryNode("other", new LocalTransportAddress("other"), Version.CURRENT);
|
||||
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().put(masterNode).put(otherNode).masterNodeId(masterNode.id()).build();
|
||||
ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(discoveryNodes).build();
|
||||
doc.setClusterState(clusterState);
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedId() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testClusterStateResolver() throws IOException {
|
||||
ClusterStateMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setTimestamp(1437580442979L);
|
||||
|
||||
ClusterStateResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
assertThat(resolver.type(doc), equalTo(ClusterStateResolver.TYPE));
|
||||
assertThat(resolver.id(doc), nullValue());
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"cluster_state");
|
||||
}
|
||||
}
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.BadApple;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -12,8 +12,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -25,8 +24,8 @@ import java.util.Map;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.marvel.MarvelSettings.MONITORING_INDICES_PREFIX;
|
||||
import static org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils.TEMPLATE_VERSION;
|
||||
import static org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver.Fields.SOURCE_NODE;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
|
@ -64,14 +63,14 @@ public class ClusterStateTests extends MarvelIntegTestCase {
|
|||
|
||||
public void testClusterState() throws Exception {
|
||||
logger.debug("--> waiting for documents to be collected");
|
||||
awaitMarvelDocsCount(greaterThan(0L), ClusterStateCollector.TYPE);
|
||||
awaitMarvelDocsCount(greaterThan(0L), ClusterStateResolver.TYPE);
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", ClusterStateCollector.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(ClusterStateCollector.TYPE).get();
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", ClusterStateResolver.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(ClusterStateResolver.TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = ClusterStateRenderer.FILTERS;
|
||||
String[] filters = new ClusterStateResolver(Settings.EMPTY).filters();
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||
|
||||
|
@ -89,17 +88,17 @@ public class ClusterStateTests extends MarvelIntegTestCase {
|
|||
*/
|
||||
public void testNoNodesIndexing() throws Exception {
|
||||
logger.debug("--> waiting for documents to be collected");
|
||||
awaitMarvelDocsCount(greaterThan(0L), ClusterStateCollector.TYPE);
|
||||
awaitMarvelDocsCount(greaterThan(0L), ClusterStateResolver.TYPE);
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", ClusterStateCollector.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(ClusterStateCollector.TYPE).get();
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", ClusterStateResolver.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(ClusterStateResolver.TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
DiscoveryNodes nodes = client().admin().cluster().prepareState().clear().setNodes(true).get().getState().nodes();
|
||||
|
||||
logger.debug("--> ensure that the 'nodes' attributes of the cluster state document is not indexed");
|
||||
assertHitCount(client().prepareSearch().setSize(0)
|
||||
.setTypes(ClusterStateCollector.TYPE)
|
||||
.setTypes(ClusterStateResolver.TYPE)
|
||||
.setQuery(matchQuery("cluster_state.nodes." + nodes.masterNodeId() + ".name", nodes.masterNode().name())).get(), 0L);
|
||||
}
|
||||
|
||||
|
@ -107,22 +106,21 @@ public class ClusterStateTests extends MarvelIntegTestCase {
|
|||
final long nbNodes = internalCluster().size();
|
||||
|
||||
logger.debug("--> waiting for documents to be collected");
|
||||
awaitMarvelDocsCount(greaterThanOrEqualTo(nbNodes), ClusterStateCollector.NODE_TYPE);
|
||||
awaitMarvelDocsCount(greaterThanOrEqualTo(nbNodes), ClusterStateNodeResolver.TYPE);
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", ClusterStateCollector.NODE_TYPE);
|
||||
SearchResponse response = client().prepareSearch(MONITORING_INDICES_PREFIX + TEMPLATE_VERSION + "-*")
|
||||
.setTypes(ClusterStateCollector.NODE_TYPE).get();
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", ClusterStateNodeResolver.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(ClusterStateNodeResolver.TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(nbNodes));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = {
|
||||
AbstractRenderer.Fields.CLUSTER_UUID.underscore().toString(),
|
||||
AbstractRenderer.Fields.TIMESTAMP.underscore().toString(),
|
||||
AbstractRenderer.Fields.SOURCE_NODE.underscore().toString(),
|
||||
ClusterStateNodeRenderer.Fields.STATE_UUID.underscore().toString(),
|
||||
ClusterStateNodeRenderer.Fields.NODE.underscore().toString(),
|
||||
ClusterStateNodeRenderer.Fields.NODE.underscore().toString() + "." +
|
||||
ClusterStateNodeRenderer.Fields.ID.underscore().toString(),
|
||||
MonitoringIndexNameResolver.Fields.CLUSTER_UUID.underscore().toString(),
|
||||
MonitoringIndexNameResolver.Fields.TIMESTAMP.underscore().toString(),
|
||||
SOURCE_NODE.underscore().toString(),
|
||||
ClusterStateNodeResolver.Fields.STATE_UUID.underscore().toString(),
|
||||
ClusterStateNodeResolver.Fields.NODE.underscore().toString(),
|
||||
ClusterStateNodeResolver.Fields.NODE.underscore().toString() + "."
|
||||
+ ClusterStateNodeResolver.Fields.ID.underscore().toString(),
|
||||
};
|
||||
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
|
@ -136,9 +134,8 @@ public class ClusterStateTests extends MarvelIntegTestCase {
|
|||
logger.debug("--> check that node attributes are indexed");
|
||||
assertThat(client().prepareSearch().setSize(0)
|
||||
.setIndices(MONITORING_INDICES_PREFIX + TEMPLATE_VERSION + "-*")
|
||||
.setTypes(ClusterStateCollector.NODE_TYPE)
|
||||
.setQuery(QueryBuilders.matchQuery(AbstractRenderer.Fields.SOURCE_NODE.underscore().toString() + ".attributes.custom",
|
||||
randomInt)
|
||||
.setTypes(ClusterStateNodeResolver.TYPE)
|
||||
.setQuery(QueryBuilders.matchQuery(SOURCE_NODE.underscore().toString() + ".attributes.custom", randomInt)
|
||||
).get().getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
logger.debug("--> cluster state nodes successfully collected");
|
||||
|
@ -147,28 +144,33 @@ public class ClusterStateTests extends MarvelIntegTestCase {
|
|||
public void testDiscoveryNodes() throws Exception {
|
||||
final long nbNodes = internalCluster().size();
|
||||
|
||||
String dataIndex = ".monitoring-es-data-" + MarvelTemplateUtils.TEMPLATE_VERSION;
|
||||
MonitoringIndexNameResolver.Data dataResolver = internalCluster().getInstance(MonitoringIndexNameResolver.Data.class);
|
||||
assertNotNull(dataResolver);
|
||||
|
||||
String dataIndex = dataResolver.indexPattern();
|
||||
awaitIndexExists(dataIndex);
|
||||
|
||||
logger.debug("--> waiting for documents to be collected");
|
||||
awaitMarvelDocsCount(greaterThanOrEqualTo(nbNodes), ClusterStateCollector.NODE_TYPE);
|
||||
awaitMarvelDocsCount(greaterThanOrEqualTo(nbNodes), DiscoveryNodeResolver.TYPE);
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", ClusterStateCollector.NODE_TYPE);
|
||||
SearchResponse response = client().prepareSearch(MarvelSettings.MONITORING_DATA_INDEX_PREFIX + "*")
|
||||
.setTypes(ClusterStateCollector.NODE_TYPE).get();
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", DiscoveryNodeResolver.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(DiscoveryNodeResolver.TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(nbNodes));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = {
|
||||
AbstractRenderer.Fields.CLUSTER_UUID.underscore().toString(),
|
||||
AbstractRenderer.Fields.TIMESTAMP.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString() + "." + DiscoveryNodeRenderer.Fields.ID.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString() + "." + DiscoveryNodeRenderer.Fields.NAME.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString() + "." +
|
||||
DiscoveryNodeRenderer.Fields.ATTRIBUTES.underscore().toString(),
|
||||
DiscoveryNodeRenderer.Fields.NODE.underscore().toString() + "." +
|
||||
DiscoveryNodeRenderer.Fields.TRANSPORT_ADDRESS.underscore().toString(),
|
||||
MonitoringIndexNameResolver.Fields.CLUSTER_UUID.underscore().toString(),
|
||||
MonitoringIndexNameResolver.Fields.TIMESTAMP.underscore().toString(),
|
||||
MonitoringIndexNameResolver.Fields.SOURCE_NODE.underscore().toString(),
|
||||
DiscoveryNodeResolver.Fields.NODE.underscore().toString(),
|
||||
DiscoveryNodeResolver.Fields.NODE.underscore().toString() + "."
|
||||
+ DiscoveryNodeResolver.Fields.ID.underscore().toString(),
|
||||
DiscoveryNodeResolver.Fields.NODE.underscore().toString() + "."
|
||||
+ DiscoveryNodeResolver.Fields.NAME.underscore().toString(),
|
||||
DiscoveryNodeResolver.Fields.NODE.underscore().toString() + "."
|
||||
+ DiscoveryNodeResolver.Fields.ATTRIBUTES.underscore().toString(),
|
||||
DiscoveryNodeResolver.Fields.NODE.underscore().toString() + "."
|
||||
+ DiscoveryNodeResolver.Fields.TRANSPORT_ADDRESS.underscore().toString(),
|
||||
};
|
||||
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
|
@ -183,28 +185,16 @@ public class ClusterStateTests extends MarvelIntegTestCase {
|
|||
final String nodeId = internalCluster().clusterService(nodeName).localNode().getId();
|
||||
|
||||
logger.debug("--> getting monitoring document for node id [{}]", nodeId);
|
||||
assertThat(client().prepareGet(dataIndex, ClusterStateCollector.NODE_TYPE, nodeId).get().isExists(), is(true));
|
||||
assertThat(client().prepareGet(dataIndex, DiscoveryNodeResolver.TYPE, nodeId).get().isExists(), is(true));
|
||||
|
||||
// checks that document is not indexed
|
||||
assertHitCount(client().prepareSearch().setSize(0)
|
||||
.setIndices(dataIndex)
|
||||
.setTypes(ClusterStateCollector.NODE_TYPE)
|
||||
.setTypes(DiscoveryNodeResolver.TYPE)
|
||||
.setQuery(QueryBuilders.boolQuery()
|
||||
.should(matchQuery("node.id", nodeId))
|
||||
.should(matchQuery("node.name", nodeName))).get(), 0);
|
||||
}
|
||||
|
||||
logger.debug("--> check that node documents are not indexed");
|
||||
securedFlush();
|
||||
securedRefresh();
|
||||
|
||||
assertHitCount(client().prepareSearch().setSize(0)
|
||||
.setIndices(dataIndex)
|
||||
.setTypes(ClusterStateCollector.NODE_TYPE)
|
||||
.setQuery(QueryBuilders.matchQuery(ClusterInfoRenderer.Fields.CLUSTER_NAME.underscore().toString(),
|
||||
cluster().getClusterName())
|
||||
).get(), 0L);
|
||||
|
||||
logger.debug("--> cluster state nodes successfully collected");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,149 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsNodeResponse;
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.http.HttpInfo;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
import org.elasticsearch.index.fielddata.FieldDataStats;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.ShardPath;
|
||||
import org.elasticsearch.indices.NodeIndicesStats;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.monitor.os.DummyOsInfo;
|
||||
import org.elasticsearch.monitor.process.ProcessInfo;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolInfo;
|
||||
import org.elasticsearch.transport.TransportInfo;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCase<ClusterStatsMonitoringDoc, ClusterStatsResolver> {
|
||||
|
||||
@Override
|
||||
protected ClusterStatsResolver newResolver() {
|
||||
return new ClusterStatsResolver(Settings.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterStatsMonitoringDoc newMarvelDoc() {
|
||||
ClusterStatsMonitoringDoc doc = new ClusterStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setClusterStats(randomClusterStats());
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedId() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testClusterStatsResolver() throws Exception {
|
||||
ClusterStatsMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setTimestamp(1437580442979L);
|
||||
|
||||
ClusterStatsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
assertThat(resolver.type(doc), equalTo(ClusterStatsResolver.TYPE));
|
||||
assertThat(resolver.id(doc), nullValue());
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"cluster_stats");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a testing {@link ClusterStatsResponse} used to resolve a marvel document.
|
||||
*/
|
||||
private ClusterStatsResponse randomClusterStats() {
|
||||
ClusterStatsNodeResponse[] responses = {
|
||||
new ClusterStatsNodeResponse(new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT),
|
||||
ClusterHealthStatus.GREEN, randomNodeInfo(), randomNodeStats(), randomShardStats())
|
||||
};
|
||||
return new ClusterStatsResponse(Math.abs(randomLong()), ClusterName.DEFAULT, UUID.randomUUID().toString(), responses);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a random {@link NodeInfo} used to resolve a marvel document.
|
||||
*/
|
||||
private NodeInfo randomNodeInfo() {
|
||||
BoundTransportAddress transportAddress = new BoundTransportAddress(new TransportAddress[]{DummyTransportAddress.INSTANCE},
|
||||
DummyTransportAddress.INSTANCE);
|
||||
return new NodeInfo(Version.CURRENT, org.elasticsearch.Build.CURRENT,
|
||||
new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT), Collections.emptyMap(),
|
||||
Settings.EMPTY, DummyOsInfo.INSTANCE, new ProcessInfo(randomInt(), randomBoolean()), JvmInfo.jvmInfo(),
|
||||
new ThreadPoolInfo(Collections.singletonList(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5))),
|
||||
new TransportInfo(transportAddress, Collections.emptyMap()), new HttpInfo(transportAddress, randomLong()),
|
||||
new PluginsAndModules());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a random {@link NodeStats} used to resolve a marvel document.
|
||||
*/
|
||||
private NodeStats randomNodeStats() {
|
||||
Index index = new Index("test", UUID.randomUUID().toString());
|
||||
FsInfo.Path[] pathInfo = new FsInfo.Path[]{
|
||||
new FsInfo.Path("/test", "/dev/sda", 10, -8, 0),
|
||||
};
|
||||
Map<Index, List<IndexShardStats>> statsByShard = new HashMap<>();
|
||||
statsByShard.put(index, Collections.singletonList(new IndexShardStats(new ShardId(index, 0), randomShardStats())));
|
||||
return new NodeStats(new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT), 0,
|
||||
new NodeIndicesStats(new CommonStats(), statsByShard), null, null, null, null,
|
||||
new FsInfo(0, pathInfo), null, null, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a random ShardStats[] used to resolve a marvel document.
|
||||
*/
|
||||
private ShardStats[] randomShardStats() {
|
||||
Index index = new Index("test", UUID.randomUUID().toString());
|
||||
Path shardPath = createTempDir().resolve("indices").resolve("test").resolve("0");
|
||||
ShardRouting shardRouting = ShardRouting.newUnassigned(index, 0, null, false,
|
||||
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
|
||||
CommonStats shardCommonStats = new CommonStats();
|
||||
shardCommonStats.fieldData = new FieldDataStats();
|
||||
shardCommonStats.queryCache = new QueryCacheStats();
|
||||
return new ShardStats[]{
|
||||
new ShardStats(shardRouting, new ShardPath(false, shardPath, shardPath, "", new ShardId(index, 0)), shardCommonStats, null)
|
||||
};
|
||||
}
|
||||
}
|
|
@ -3,13 +3,13 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.cluster;
|
||||
package org.elasticsearch.marvel.agent.resolver.cluster;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsNodes;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -25,6 +25,7 @@ import static org.hamcrest.Matchers.greaterThan;
|
|||
|
||||
@ClusterScope(scope = Scope.TEST, numClientNodes = 0)
|
||||
public class ClusterStatsTests extends MarvelIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
|
@ -61,14 +62,14 @@ public class ClusterStatsTests extends MarvelIntegTestCase {
|
|||
// ok.. we'll start collecting now...
|
||||
updateMarvelInterval(3L, TimeUnit.SECONDS);
|
||||
|
||||
awaitMarvelDocsCount(greaterThan(0L), ClusterStatsCollector.CLUSTER_STATS_TYPE);
|
||||
awaitMarvelDocsCount(greaterThan(0L), ClusterStatsResolver.TYPE);
|
||||
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
SearchResponse response = client().prepareSearch().setTypes(ClusterStatsCollector.CLUSTER_STATS_TYPE).get();
|
||||
String[] filters = ClusterStatsRenderer.FILTERS;
|
||||
SearchResponse response = client().prepareSearch().setTypes(ClusterStatsResolver.TYPE).get();
|
||||
String[] filters = new ClusterStatsResolver(Settings.EMPTY).filters();
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.indices;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class IndexRecoveryResolverTests extends MonitoringIndexNameResolverTestCase<IndexRecoveryMonitoringDoc, IndexRecoveryResolver> {
|
||||
|
||||
@Override
|
||||
protected IndexRecoveryResolver newResolver() {
|
||||
return new IndexRecoveryResolver(Settings.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IndexRecoveryMonitoringDoc newMarvelDoc() {
|
||||
IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);
|
||||
Map<String, java.util.List<RecoveryState>> shardRecoveryStates = new HashMap<>();
|
||||
shardRecoveryStates.put("test", Collections.singletonList(new RecoveryState(new ShardId("test", "uuid", 0), true,
|
||||
RecoveryState.Type.STORE, localNode, localNode)));
|
||||
doc.setRecoveryResponse(new RecoveryResponse(10, 10, 0, false, shardRecoveryStates, Collections.emptyList()));
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkFilters() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedId() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testIndexRecoveryResolver() throws Exception {
|
||||
IndexRecoveryMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setTimestamp(1437580442979L);
|
||||
|
||||
IndexRecoveryResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
assertThat(resolver.type(doc), equalTo(IndexRecoveryResolver.TYPE));
|
||||
assertThat(resolver.id(doc), nullValue());
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"index_recovery");
|
||||
}
|
||||
}
|
|
@ -3,15 +3,15 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.indices;
|
||||
package org.elasticsearch.marvel.agent.resolver.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryCollector;
|
||||
import org.elasticsearch.marvel.agent.renderer.AbstractRenderer;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryCollector;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -28,6 +28,7 @@ import static org.hamcrest.Matchers.is;
|
|||
|
||||
@ClusterScope(scope = TEST)
|
||||
public class IndexRecoveryTests extends MarvelIntegTestCase {
|
||||
|
||||
private static final String INDEX_PREFIX = "test-index-recovery-";
|
||||
|
||||
@Override
|
||||
|
@ -68,29 +69,27 @@ public class IndexRecoveryTests extends MarvelIntegTestCase {
|
|||
updateMarvelInterval(3L, TimeUnit.SECONDS);
|
||||
waitForMarvelIndices();
|
||||
|
||||
awaitMarvelDocsCount(greaterThan(0L), IndexRecoveryCollector.TYPE);
|
||||
awaitMarvelDocsCount(greaterThan(0L), IndexRecoveryResolver.TYPE);
|
||||
|
||||
String clusterUUID = client().admin().cluster().prepareState().setMetaData(true).get().getState().metaData().clusterUUID();
|
||||
assertTrue(Strings.hasText(clusterUUID));
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", IndexRecoveryCollector.TYPE);
|
||||
SearchResponse response = client().prepareSearch(MarvelSettings.MONITORING_INDICES_PREFIX + "*")
|
||||
.setTypes(IndexRecoveryCollector.TYPE)
|
||||
.get();
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", IndexRecoveryResolver.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(IndexRecoveryResolver.TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = {
|
||||
AbstractRenderer.Fields.CLUSTER_UUID.underscore().toString(),
|
||||
AbstractRenderer.Fields.TIMESTAMP.underscore().toString(),
|
||||
IndexRecoveryRenderer.Fields.INDEX_RECOVERY.underscore().toString(),
|
||||
IndexRecoveryRenderer.Fields.INDEX_RECOVERY.underscore().toString() + "." +
|
||||
IndexRecoveryRenderer.Fields.SHARDS.underscore().toString(),
|
||||
MonitoringIndexNameResolver.Fields.CLUSTER_UUID.underscore().toString(),
|
||||
MonitoringIndexNameResolver.Fields.TIMESTAMP.underscore().toString(),
|
||||
MonitoringIndexNameResolver.Fields.SOURCE_NODE.underscore().toString(),
|
||||
IndexRecoveryResolver.Fields.INDEX_RECOVERY.underscore().toString(),
|
||||
IndexRecoveryResolver.Fields.INDEX_RECOVERY.underscore().toString() + "."
|
||||
+ IndexRecoveryResolver.Fields.SHARDS.underscore().toString(),
|
||||
};
|
||||
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||
|
||||
for (String filter : filters) {
|
||||
assertContains(filter, fields);
|
||||
}
|
||||
|
@ -100,11 +99,11 @@ public class IndexRecoveryTests extends MarvelIntegTestCase {
|
|||
securedRefresh();
|
||||
|
||||
logger.debug("--> checking that cluster_uuid field is correctly indexed");
|
||||
response = client().prepareSearch().setTypes(IndexRecoveryCollector.TYPE).setSize(0).setQuery(existsQuery("cluster_uuid")).get();
|
||||
response = client().prepareSearch().setTypes(IndexRecoveryResolver.TYPE).setSize(0).setQuery(existsQuery("cluster_uuid")).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
logger.debug("--> checking that timestamp field is correctly indexed");
|
||||
response = client().prepareSearch().setTypes(IndexRecoveryCollector.TYPE).setSize(0).setQuery(existsQuery("timestamp")).get();
|
||||
response = client().prepareSearch().setTypes(IndexRecoveryResolver.TYPE).setSize(0).setQuery(existsQuery("timestamp")).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
logger.debug("--> checking that other fields are not indexed");
|
||||
|
@ -118,7 +117,7 @@ public class IndexRecoveryTests extends MarvelIntegTestCase {
|
|||
};
|
||||
|
||||
for (String field : fields) {
|
||||
response = client().prepareSearch().setTypes(IndexRecoveryCollector.TYPE).setSize(0).setQuery(existsQuery(field)).get();
|
||||
response = client().prepareSearch().setTypes(IndexRecoveryResolver.TYPE).setSize(0).setQuery(existsQuery(field)).get();
|
||||
assertHitCount(response, 0L);
|
||||
}
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.indices;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingTestUtils;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
import org.elasticsearch.index.engine.SegmentsStats;
|
||||
import org.elasticsearch.index.fielddata.FieldDataStats;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.shard.DocsStats;
|
||||
import org.elasticsearch.index.shard.IndexingStats;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.ShardPath;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class IndexStatsResolverTests extends MonitoringIndexNameResolverTestCase<IndexStatsMonitoringDoc, IndexStatsResolver> {
|
||||
|
||||
@Override
|
||||
protected IndexStatsResolver newResolver() {
|
||||
return new IndexStatsResolver(Settings.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IndexStatsMonitoringDoc newMarvelDoc() {
|
||||
IndexStatsMonitoringDoc doc = new IndexStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setIndexStats(randomIndexStats());
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedId() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testIndexStatsResolver() throws Exception {
|
||||
IndexStatsMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setTimestamp(1437580442979L);
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
|
||||
IndexStatsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
assertThat(resolver.type(doc), equalTo(IndexStatsResolver.TYPE));
|
||||
assertThat(resolver.id(doc), nullValue());
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"index_stats");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a random {@link IndexStats} object.
|
||||
*/
|
||||
private IndexStats randomIndexStats() {
|
||||
Index index = new Index("test-" + randomIntBetween(0, 5), UUID.randomUUID().toString());
|
||||
ShardId shardId = new ShardId(index, 0);
|
||||
Path path = createTempDir().resolve("indices").resolve(index.getName()).resolve("0");
|
||||
ShardRouting shardRouting = ShardRouting.newUnassigned(index, 0, null, true,
|
||||
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
|
||||
ShardRoutingTestUtils.initialize(shardRouting, "node-0");
|
||||
ShardRoutingTestUtils.moveToStarted(shardRouting);
|
||||
CommonStats stats = new CommonStats();
|
||||
stats.fieldData = new FieldDataStats();
|
||||
stats.queryCache = new QueryCacheStats();
|
||||
stats.docs = new DocsStats();
|
||||
stats.store = new StoreStats();
|
||||
stats.indexing = new IndexingStats();
|
||||
stats.search = new SearchStats();
|
||||
stats.segments = new SegmentsStats();
|
||||
stats.merge = new MergeStats();
|
||||
stats.refresh = new RefreshStats();
|
||||
ShardStats shardStats = new ShardStats(shardRouting, new ShardPath(false, path, path, null, shardId), stats, null);
|
||||
return new IndexStats(index.getName(), new ShardStats[]{shardStats});
|
||||
}
|
||||
}
|
|
@ -3,13 +3,14 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.indices;
|
||||
package org.elasticsearch.marvel.agent.resolver.indices;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndexStatsCollector;
|
||||
import org.elasticsearch.marvel.agent.resolver.indices.IndexStatsResolver;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -23,6 +24,7 @@ import static org.hamcrest.Matchers.greaterThan;
|
|||
|
||||
@ClusterScope(scope = Scope.TEST, numClientNodes = 0)
|
||||
public class IndexStatsTests extends MarvelIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
|
@ -63,7 +65,7 @@ public class IndexStatsTests extends MarvelIntegTestCase {
|
|||
updateMarvelInterval(3L, TimeUnit.SECONDS);
|
||||
waitForMarvelIndices();
|
||||
|
||||
awaitMarvelDocsCount(greaterThan(0L), IndexStatsCollector.TYPE);
|
||||
awaitMarvelDocsCount(greaterThan(0L), IndexStatsResolver.TYPE);
|
||||
|
||||
logger.debug("--> wait for index stats collector to collect stat for each index");
|
||||
assertBusy(new Runnable() {
|
||||
|
@ -74,7 +76,7 @@ public class IndexStatsTests extends MarvelIntegTestCase {
|
|||
for (int i = 0; i < nbIndices; i++) {
|
||||
SearchResponse count = client().prepareSearch()
|
||||
.setSize(0)
|
||||
.setTypes(IndexStatsCollector.TYPE)
|
||||
.setTypes(IndexStatsResolver.TYPE)
|
||||
.setQuery(QueryBuilders.termQuery("index_stats.index", indices[i]))
|
||||
.get();
|
||||
assertThat(count.getHits().totalHits(), greaterThan(0L));
|
||||
|
@ -82,12 +84,12 @@ public class IndexStatsTests extends MarvelIntegTestCase {
|
|||
}
|
||||
});
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", IndexStatsCollector.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(IndexStatsCollector.TYPE).get();
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", IndexStatsResolver.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(IndexStatsResolver.TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = IndexStatsRenderer.FILTERS;
|
||||
String[] filters = new IndexStatsResolver(Settings.EMPTY).filters();
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||
|
|
@ -0,0 +1,114 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.indices;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponseTestUtils;
|
||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingTestUtils;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
import org.elasticsearch.index.engine.SegmentsStats;
|
||||
import org.elasticsearch.index.fielddata.FieldDataStats;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.shard.DocsStats;
|
||||
import org.elasticsearch.index.shard.IndexingStats;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.ShardPath;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class IndicesStatsResolverTests extends MonitoringIndexNameResolverTestCase<IndicesStatsMonitoringDoc, IndicesStatsResolver> {
|
||||
|
||||
@Override
|
||||
protected IndicesStatsResolver newResolver() {
|
||||
return new IndicesStatsResolver(Settings.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IndicesStatsMonitoringDoc newMarvelDoc() {
|
||||
IndicesStatsMonitoringDoc doc = new IndicesStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setIndicesStats(randomIndicesStats());
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedId() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testIndicesStatsResolver() throws Exception {
|
||||
IndicesStatsMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(1437580442979L);
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
|
||||
IndicesStatsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
assertThat(resolver.type(doc), equalTo(IndicesStatsResolver.TYPE));
|
||||
assertThat(resolver.id(doc), nullValue());
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"indices_stats");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a random {@link IndicesStatsResponse} object.
|
||||
*/
|
||||
private IndicesStatsResponse randomIndicesStats() {
|
||||
Index index = new Index("test", UUID.randomUUID().toString());
|
||||
|
||||
List<ShardStats> shardStats = new ArrayList<>();
|
||||
for (int i=0; i < randomIntBetween(2, 5); i++) {
|
||||
ShardId shardId = new ShardId(index, i);
|
||||
Path path = createTempDir().resolve("indices").resolve(index.getName()).resolve(String.valueOf(i));
|
||||
ShardRouting shardRouting = ShardRouting.newUnassigned(index, i, null, true,
|
||||
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
|
||||
ShardRoutingTestUtils.initialize(shardRouting, "node-0");
|
||||
ShardRoutingTestUtils.moveToStarted(shardRouting);
|
||||
CommonStats stats = new CommonStats();
|
||||
stats.fieldData = new FieldDataStats();
|
||||
stats.queryCache = new QueryCacheStats();
|
||||
stats.docs = new DocsStats();
|
||||
stats.store = new StoreStats();
|
||||
stats.indexing = new IndexingStats();
|
||||
stats.search = new SearchStats();
|
||||
stats.segments = new SegmentsStats();
|
||||
stats.merge = new MergeStats();
|
||||
stats.refresh = new RefreshStats();
|
||||
shardStats.add(new ShardStats(shardRouting, new ShardPath(false, path, path, null, shardId), stats, null));
|
||||
}
|
||||
return IndicesStatsResponseTestUtils.newIndicesStatsResponse(shardStats.toArray(new ShardStats[shardStats.size()]),
|
||||
shardStats.size(), shardStats.size(), 0, emptyList());
|
||||
}
|
||||
}
|
|
@ -3,13 +3,14 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.indices;
|
||||
package org.elasticsearch.marvel.agent.resolver.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsCollector;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.indices.IndicesStatsCollector;
|
||||
import org.elasticsearch.marvel.agent.resolver.indices.IndicesStatsResolver;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -23,6 +24,7 @@ import static org.hamcrest.Matchers.greaterThan;
|
|||
|
||||
@ClusterScope(scope = Scope.TEST, numClientNodes = 0)
|
||||
public class IndicesStatsTests extends MarvelIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
|
@ -72,14 +74,14 @@ public class IndicesStatsTests extends MarvelIntegTestCase {
|
|||
waitForMarvelIndices();
|
||||
|
||||
logger.debug("--> wait for indices stats collector to collect global stat");
|
||||
awaitMarvelDocsCount(greaterThan(0L), IndicesStatsCollector.TYPE);
|
||||
awaitMarvelDocsCount(greaterThan(0L), IndicesStatsResolver.TYPE);
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", IndicesStatsCollector.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(IndicesStatsCollector.TYPE).get();
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", IndicesStatsResolver.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(IndicesStatsResolver.TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = IndicesStatsRenderer.FILTERS;
|
||||
String[] filters = new IndicesStatsResolver(Settings.EMPTY).filters();
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.node;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.marvel.agent.collector.cluster.DiscoveryNodeMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
import org.elasticsearch.marvel.agent.resolver.cluster.DiscoveryNodeResolver;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class DiscoveryNodeResolverTests extends MonitoringIndexNameResolverTestCase<DiscoveryNodeMonitoringDoc, DiscoveryNodeResolver> {
|
||||
|
||||
@Override
|
||||
protected DiscoveryNodeResolver newResolver() {
|
||||
return new DiscoveryNodeResolver();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DiscoveryNodeMonitoringDoc newMarvelDoc() {
|
||||
DiscoveryNodeMonitoringDoc doc = new DiscoveryNodeMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setNode(new DiscoveryNode(randomAsciiOfLength(3), UUID.randomUUID().toString(),
|
||||
DummyTransportAddress.INSTANCE, emptyMap(),
|
||||
VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), Version.CURRENT)));
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkFilters() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testDiscoveryNodeResolver() throws Exception {
|
||||
DiscoveryNodeMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setTimestamp(1437580442979L);
|
||||
|
||||
DiscoveryNodeResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-data-" + MarvelTemplateUtils.TEMPLATE_VERSION));
|
||||
assertThat(resolver.type(doc), equalTo(DiscoveryNodeResolver.TYPE));
|
||||
assertThat(resolver.id(doc), equalTo(doc.getNode().getId()));
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"node.id",
|
||||
"node.name",
|
||||
"node.transport_address",
|
||||
"node.attributes");
|
||||
}
|
||||
}
|
|
@ -3,12 +3,11 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.node;
|
||||
package org.elasticsearch.marvel.agent.resolver.node;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsCollector;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
|
@ -86,13 +85,13 @@ public class MultiNodesStatsTests extends MarvelIntegTestCase {
|
|||
|
||||
logger.debug("--> checking that every node correctly reported its own node stats");
|
||||
assertBusy(() -> {
|
||||
String indices = MarvelSettings.MONITORING_INDICES_PREFIX + "*";
|
||||
String indices = MONITORING_INDICES_PREFIX + "*";
|
||||
securedFlush(indices);
|
||||
securedRefresh();
|
||||
|
||||
try {
|
||||
SearchResponse response = client().prepareSearch(indices)
|
||||
.setTypes(NodeStatsCollector.TYPE)
|
||||
.setTypes(NodeStatsResolver.TYPE)
|
||||
.setSize(0)
|
||||
.addAggregation(AggregationBuilders.terms("nodes_ids").field("node_stats.node_id"))
|
||||
.get();
|
|
@ -0,0 +1,131 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.node;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingTestUtils;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
import org.elasticsearch.index.engine.SegmentsStats;
|
||||
import org.elasticsearch.index.fielddata.FieldDataStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.shard.DocsStats;
|
||||
import org.elasticsearch.index.shard.IndexingStats;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.ShardPath;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.indices.NodeIndicesStats;
|
||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmStats;
|
||||
import org.elasticsearch.monitor.os.OsProbe;
|
||||
import org.elasticsearch.monitor.process.ProcessProbe;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolStats;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase<NodeStatsMonitoringDoc, NodeStatsResolver> {
|
||||
|
||||
@Override
|
||||
protected NodeStatsResolver newResolver() {
|
||||
return new NodeStatsResolver(Settings.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeStatsMonitoringDoc newMarvelDoc() {
|
||||
NodeStatsMonitoringDoc doc = new NodeStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
doc.setNodeMaster(randomBoolean());
|
||||
doc.setNodeId(UUID.randomUUID().toString());
|
||||
doc.setDiskThresholdDeciderEnabled(randomBoolean());
|
||||
doc.setDiskThresholdWaterMarkHigh(randomDouble());
|
||||
doc.setMlockall(randomBoolean());
|
||||
doc.setNodeStats(randomNodeStats());
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean checkResolvedId() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testNodeStatsResolver() throws IOException {
|
||||
NodeStatsMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setTimestamp(1437580442979L);
|
||||
|
||||
NodeStatsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
assertThat(resolver.type(doc), equalTo(NodeStatsResolver.TYPE));
|
||||
assertThat(resolver.id(doc), nullValue());
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"node_stats");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a random {@link NodeStats} object.
|
||||
*/
|
||||
private NodeStats randomNodeStats() {
|
||||
Index index = new Index("test-" + randomIntBetween(0, 5), UUID.randomUUID().toString());
|
||||
ShardId shardId = new ShardId(index, 0);
|
||||
Path path = createTempDir().resolve("indices").resolve(index.getName()).resolve("0");
|
||||
ShardRouting shardRouting = ShardRouting.newUnassigned(index, 0, null, true,
|
||||
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
|
||||
ShardRoutingTestUtils.initialize(shardRouting, "node-0");
|
||||
ShardRoutingTestUtils.moveToStarted(shardRouting);
|
||||
CommonStats stats = new CommonStats();
|
||||
stats.fieldData = new FieldDataStats();
|
||||
stats.queryCache = new QueryCacheStats();
|
||||
stats.docs = new DocsStats();
|
||||
stats.store = new StoreStats();
|
||||
stats.indexing = new IndexingStats();
|
||||
stats.search = new SearchStats();
|
||||
stats.segments = new SegmentsStats();
|
||||
ShardStats shardStats = new ShardStats(shardRouting, new ShardPath(false, path, path, null, shardId), stats, null);
|
||||
FsInfo.Path[] pathInfo = new FsInfo.Path[]{
|
||||
new FsInfo.Path("/test", "/dev/sda", 10, -8, 0),
|
||||
};
|
||||
Map<Index, List<IndexShardStats>> statsByShard = new HashMap<>();
|
||||
statsByShard.put(index, Collections.singletonList(new IndexShardStats(shardId, new ShardStats[]{shardStats})));
|
||||
List<ThreadPoolStats.Stats> threadPoolStats = Arrays.asList(new ThreadPoolStats.Stats(ThreadPool.Names.INDEX, 0, 0, 0, 0, 0, 0),
|
||||
new ThreadPoolStats.Stats(ThreadPool.Names.BULK, 0, 0, 0, 0, 0, 0),
|
||||
new ThreadPoolStats.Stats(ThreadPool.Names.SEARCH, 0, 0, 0, 0, 0, 0)
|
||||
);
|
||||
return new NodeStats(new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT), 0,
|
||||
new NodeIndicesStats(new CommonStats(), statsByShard), OsProbe.getInstance().osStats(),
|
||||
ProcessProbe.getInstance().processStats(), JvmStats.jvmStats(),
|
||||
new ThreadPoolStats(threadPoolStats),
|
||||
new FsInfo(0, pathInfo), null, null, null, null, null);
|
||||
}
|
||||
}
|
|
@ -3,14 +3,15 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.node;
|
||||
package org.elasticsearch.marvel.agent.resolver.node;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.node.NodeStatsCollector;
|
||||
import org.elasticsearch.marvel.agent.exporter.local.LocalExporter;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.resolver.node.NodeStatsResolver;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -25,6 +26,7 @@ import static org.hamcrest.Matchers.greaterThan;
|
|||
// numClientNodes is set to 0 because Client nodes don't have Filesystem stats
|
||||
@ClusterScope(scope = Scope.TEST, numClientNodes = 0, transportClientRatio = 0.0)
|
||||
public class NodeStatsTests extends MarvelIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
|
@ -54,15 +56,14 @@ public class NodeStatsTests extends MarvelIntegTestCase {
|
|||
updateMarvelInterval(3L, TimeUnit.SECONDS);
|
||||
waitForMarvelIndices();
|
||||
|
||||
awaitMarvelDocsCount(greaterThan(0L), NodeStatsCollector.TYPE);
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", NodeStatsCollector.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(NodeStatsCollector.TYPE).get();
|
||||
awaitMarvelDocsCount(greaterThan(0L), NodeStatsResolver.TYPE);
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", NodeStatsResolver.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(NodeStatsResolver.TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = NodeStatsRenderer.FILTERS;
|
||||
String[] filters = new NodeStatsResolver(Settings.EMPTY).filters();
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.resolver.shards;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingTestUtils;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.marvel.agent.collector.shards.ShardMonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ShardsResolverTests extends MonitoringIndexNameResolverTestCase<ShardMonitoringDoc, ShardsResolver> {
|
||||
|
||||
@Override
|
||||
protected ShardsResolver newResolver() {
|
||||
return new ShardsResolver(Settings.EMPTY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ShardMonitoringDoc newMarvelDoc() {
|
||||
ShardMonitoringDoc doc = new ShardMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setClusterStateUUID(UUID.randomUUID().toString());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
|
||||
ShardRouting shardRouting = ShardRouting.newUnassigned(new Index(randomAsciiOfLength(5), UUID.randomUUID().toString()),
|
||||
randomIntBetween(0, 5), null, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
|
||||
ShardRoutingTestUtils.initialize(shardRouting, "node-0");
|
||||
ShardRoutingTestUtils.moveToStarted(shardRouting);
|
||||
ShardRoutingTestUtils.relocate(shardRouting, "node-1");
|
||||
doc.setShardRouting(shardRouting);
|
||||
return doc;
|
||||
}
|
||||
|
||||
public void testShardsResolver() throws Exception {
|
||||
ShardMonitoringDoc doc = newMarvelDoc();
|
||||
doc.setTimestamp(1437580442979L);
|
||||
|
||||
final String clusterStateUUID = UUID.randomUUID().toString();
|
||||
doc.setClusterStateUUID(clusterStateUUID);
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
|
||||
|
||||
ShardsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
assertThat(resolver.type(doc), equalTo(ShardsResolver.TYPE));
|
||||
assertThat(resolver.id(doc), equalTo(ShardsResolver.id(clusterStateUUID, doc.getShardRouting())));
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"state_uuid",
|
||||
"shard.state",
|
||||
"shard.primary",
|
||||
"shard.node",
|
||||
"shard.relocating_node",
|
||||
"shard.shard",
|
||||
"shard.index");
|
||||
|
||||
final String index = "test-" + randomIntBetween(0, 100);
|
||||
final int shardId = randomIntBetween(0, 500);
|
||||
final boolean primary = randomBoolean();
|
||||
final String sourceNode = "node-" + randomIntBetween(0, 5);
|
||||
final String relocationNode = "node-" + randomIntBetween(6, 10);
|
||||
|
||||
final ShardRouting shardRouting = ShardRouting.newUnassigned(new Index(index, ""), shardId, null, primary,
|
||||
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
|
||||
ShardRoutingTestUtils.initialize(shardRouting, sourceNode);
|
||||
ShardRoutingTestUtils.moveToStarted(shardRouting);
|
||||
ShardRoutingTestUtils.relocate(shardRouting, relocationNode);
|
||||
doc.setShardRouting(shardRouting);
|
||||
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MarvelTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
assertThat(resolver.type(doc), equalTo(ShardsResolver.TYPE));
|
||||
assertThat(resolver.id(doc), equalTo(ShardsResolver.id(clusterStateUUID, shardRouting)));
|
||||
assertThat(resolver.id(doc),
|
||||
equalTo(clusterStateUUID + ":" + sourceNode + ":" + index + ":" + shardId + ":" + (primary ? "p" : "r")));
|
||||
|
||||
assertSource(resolver.source(doc, XContentType.JSON),
|
||||
"cluster_uuid",
|
||||
"timestamp",
|
||||
"source_node",
|
||||
"state_uuid",
|
||||
"shard.state",
|
||||
"shard.primary",
|
||||
"shard.node",
|
||||
"shard.relocating_node",
|
||||
"shard.shard",
|
||||
"shard.index");
|
||||
}
|
||||
|
||||
public void testShardId() {
|
||||
ShardRouting shardRouting = ShardRouting.newUnassigned(new Index("bar", ""), 42, null, false,
|
||||
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
|
||||
assertThat(ShardsResolver.id("foo", shardRouting), equalTo("foo:_na:bar:42:r"));
|
||||
ShardRoutingTestUtils.initialize(shardRouting, "node1");
|
||||
ShardRoutingTestUtils.moveToStarted(shardRouting);
|
||||
assertThat(ShardsResolver.id("foo", shardRouting), equalTo("foo:node1:bar:42:r"));
|
||||
}
|
||||
}
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.agent.renderer.shards;
|
||||
package org.elasticsearch.marvel.agent.resolver.shards;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.BadApple;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
|
@ -11,8 +11,9 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.marvel.agent.collector.shards.ShardsCollector;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.collector.shards.ShardsCollector;
|
||||
import org.elasticsearch.marvel.agent.resolver.shards.ShardsResolver;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
|
@ -35,6 +36,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
@BadApple(bugUrl = "https://github.com/elastic/x-plugins/issues/1007")
|
||||
@ClusterScope(scope = Scope.TEST)
|
||||
public class ShardsTests extends MarvelIntegTestCase {
|
||||
|
||||
private static final String INDEX_PREFIX = "test-shards-";
|
||||
|
||||
@Override
|
||||
|
@ -66,14 +68,14 @@ public class ShardsTests extends MarvelIntegTestCase {
|
|||
updateMarvelInterval(3L, TimeUnit.SECONDS);
|
||||
waitForMarvelIndices();
|
||||
|
||||
awaitMarvelDocsCount(greaterThan(0L), ShardsCollector.TYPE);
|
||||
awaitMarvelDocsCount(greaterThan(0L), ShardsResolver.TYPE);
|
||||
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", ShardsCollector.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(ShardsCollector.TYPE).get();
|
||||
logger.debug("--> searching for monitoring documents of type [{}]", ShardsResolver.TYPE);
|
||||
SearchResponse response = client().prepareSearch().setTypes(ShardsResolver.TYPE).get();
|
||||
assertThat(response.getHits().getTotalHits(), greaterThan(0L));
|
||||
|
||||
logger.debug("--> checking that every document contains the expected fields");
|
||||
String[] filters = ShardsRenderer.FILTERS;
|
||||
String[] filters = new ShardsResolver(Settings.EMPTY).filters();
|
||||
for (SearchHit searchHit : response.getHits().getHits()) {
|
||||
Map<String, Object> fields = searchHit.sourceAsMap();
|
||||
|
||||
|
@ -97,11 +99,11 @@ public class ShardsTests extends MarvelIntegTestCase {
|
|||
updateMarvelInterval(3L, TimeUnit.SECONDS);
|
||||
waitForMarvelIndices();
|
||||
|
||||
awaitMarvelDocsCount(greaterThan(0L), ShardsCollector.TYPE);
|
||||
awaitMarvelDocsCount(greaterThan(0L), ShardsResolver.TYPE);
|
||||
|
||||
SearchRequestBuilder searchRequestBuilder = client()
|
||||
.prepareSearch()
|
||||
.setTypes(ShardsCollector.TYPE)
|
||||
.setTypes(ShardsResolver.TYPE)
|
||||
.setQuery(QueryBuilders.termQuery("shard.index", indexName));
|
||||
|
||||
String[] notAnalyzedFields = {"state_uuid", "shard.state", "shard.index", "shard.node"};
|
|
@ -11,15 +11,18 @@ import org.elasticsearch.license.core.License;
|
|||
import org.elasticsearch.license.plugin.core.LicenseState;
|
||||
import org.elasticsearch.license.plugin.core.Licensee;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.MonitoringIds;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporter;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporters;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
|
@ -212,21 +215,26 @@ public abstract class AbstractIndicesCleanerTestCase extends MarvelIntegTestCase
|
|||
throw new IllegalStateException("unable to find listener");
|
||||
}
|
||||
|
||||
private MonitoringDoc randomMonitoringDoc() {
|
||||
return new MonitoringDoc(randomFrom(MonitoringIds.values()).getId(), VersionUtils.randomVersion(random()).toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a monitoring data index in a given version.
|
||||
*/
|
||||
protected void createDataIndex(int version, DateTime creationDate) {
|
||||
String indexName = MarvelSettings.MONITORING_DATA_INDEX_PREFIX + String.valueOf(version);
|
||||
createIndex(indexName, creationDate);
|
||||
createIndex(new MockDataIndexNameResolver(version).index(randomMonitoringDoc()), creationDate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a monitoring timestamped index in a given version.
|
||||
*/
|
||||
protected void createTimestampedIndex(int version, DateTime creationDate) {
|
||||
String indexName = MarvelSettings.MONITORING_INDICES_PREFIX + String.valueOf(version) + "-"
|
||||
+ DateTimeFormat.forPattern(Exporter.DEFAULT_INDEX_NAME_TIME_FORMAT).withZoneUTC().print(creationDate.getMillis());
|
||||
createIndex(indexName, creationDate);
|
||||
MonitoringDoc monitoringDoc = randomMonitoringDoc();
|
||||
monitoringDoc.setTimestamp(creationDate.getMillis());
|
||||
|
||||
MonitoringIndexNameResolver.Timestamped resolver = new MockTimestampedIndexNameResolver(monitoringDoc.getMonitoringId(), version);
|
||||
createIndex(resolver.index(monitoringDoc), creationDate);
|
||||
}
|
||||
|
||||
protected abstract void createIndex(String name, DateTime creationDate);
|
||||
|
|
|
@ -47,17 +47,14 @@ public class LocalIndicesCleanerTests extends AbstractIndicesCleanerTestCase {
|
|||
|
||||
@Override
|
||||
protected void assertIndicesCount(int count) throws Exception {
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
assertThat(client().admin().indices().prepareGetSettings().get().getIndexToSettings().size(), equalTo(count));
|
||||
} catch (IndexNotFoundException e) {
|
||||
if (shieldEnabled) {
|
||||
assertThat(0, equalTo(count));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
assertThat(client().admin().indices().prepareGetSettings().get().getIndexToSettings().size(), equalTo(count));
|
||||
} catch (IndexNotFoundException e) {
|
||||
if (shieldEnabled) {
|
||||
assertThat(0, equalTo(count));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -10,8 +10,8 @@ import org.elasticsearch.action.ActionRequestBuilder;
|
|||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.MarvelSettings;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.shield.InternalClient;
|
||||
|
@ -45,8 +45,8 @@ public class MarvelInternalClientTests extends MarvelIntegTestCase {
|
|||
assertAccessIsAllowed(internalClient.admin().indices().prepareRecoveries());
|
||||
assertAccessIsAllowed(internalClient.admin().indices().prepareStats());
|
||||
|
||||
assertAccessIsAllowed(internalClient.admin().indices().prepareDelete(MarvelSettings.MONITORING_INDICES_PREFIX));
|
||||
assertAccessIsAllowed(internalClient.admin().indices().prepareCreate(MarvelSettings.MONITORING_INDICES_PREFIX + "test"));
|
||||
assertAccessIsAllowed(internalClient.admin().indices().prepareDelete(MONITORING_INDICES_PREFIX + "*"));
|
||||
assertAccessIsAllowed(internalClient.admin().indices().prepareCreate(MONITORING_INDICES_PREFIX + "test"));
|
||||
|
||||
assertAccessIsAllowed(internalClient.admin().indices().preparePutTemplate("foo")
|
||||
.setSource(MarvelTemplateUtils.loadTimestampedIndexTemplate()));
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue