SOLR-13234: Prometheus Metric Exporter not threadsafe.

This changes the prometheus exporter to collect metrics from Solr on a fixed interval controlled by this tool and prevents concurrent collections. This change also improves performance slightly by using the cluster state instead of sending multiple HTTP requests to each node to lookup all the cores.

This closes #571.
This commit is contained in:
Shalin Shekhar Mangar 2019-03-04 11:02:47 +05:30
parent fed80599a6
commit 1f9c767aac
42 changed files with 2679 additions and 1126 deletions

View File

@ -81,6 +81,12 @@ Bug Fixes
* SOLR-11883: 500 code on functional query syntax errors and parameter dereferencing errors
(Munendra S N via Mikhail Khludnev)
* SOLR-13234: Prometheus Metric Exporter not threadsafe. This changes the prometheus exporter to collect metrics
from Solr on a fixed interval controlled by this tool and prevents concurrent collections. This change also improves
performance slightly by using the cluster state instead of sending multiple HTTP requests to each node to lookup
all the cores.
(Danyal Prout via shalin)
Improvements
----------------------
* SOLR-12999: Index replication could delete segments before downloading segments from master if there is not enough

View File

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import org.apache.solr.prometheus.exporter.MetricsQuery;
import org.apache.solr.prometheus.scraper.SolrScraper;
public class CollectionsCollector implements MetricCollector {
private final SolrScraper solrClient;
private final MetricsQuery metricsQuery;
public CollectionsCollector(SolrScraper solrClient, MetricsQuery metricsQuery) {
this.solrClient = solrClient;
this.metricsQuery = metricsQuery;
}
@Override
public MetricSamples collect() throws Exception {
return solrClient.collections(metricsQuery);
}
}

View File

@ -0,0 +1,22 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
public interface MetricCollector {
MetricSamples collect() throws Exception;
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import io.prometheus.client.Collector;
public class MetricSamples {
private final Map<String, Collector.MetricFamilySamples> samplesByMetricName;
public MetricSamples(Map<String, Collector.MetricFamilySamples> input) {
samplesByMetricName = input;
}
public MetricSamples() {
this(new HashMap<>());
}
public void addSamplesIfNotPresent(String metricName, Collector.MetricFamilySamples samples) {
samplesByMetricName.putIfAbsent(metricName, samples);
}
public void addSampleIfMetricExists(String metricName, Collector.MetricFamilySamples.Sample sample) {
Collector.MetricFamilySamples sampleFamily = samplesByMetricName.get(metricName);
if (sampleFamily == null) {
return;
}
if (!sampleFamily.samples.contains(sample)) {
sampleFamily.samples.add(sample);
}
}
public void addAll(MetricSamples other) {
for (String key : other.samplesByMetricName.keySet()) {
if (this.samplesByMetricName.containsKey(key)) {
for (Collector.MetricFamilySamples.Sample sample : other.samplesByMetricName.get(key).samples) {
addSampleIfMetricExists(key, sample);
}
} else {
this.samplesByMetricName.put(key, other.samplesByMetricName.get(key));
}
}
}
public List<Collector.MetricFamilySamples> asList() {
return samplesByMetricName.values().stream()
.filter(value -> !value.samples.isEmpty())
.collect(Collectors.toList());
}
}

View File

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import org.apache.solr.prometheus.exporter.MetricsQuery;
import org.apache.solr.prometheus.scraper.SolrScraper;
public class MetricsCollector implements MetricCollector {
private final MetricsQuery metricsQuery;
private final SolrScraper solrClient;
public MetricsCollector(SolrScraper solrClient, MetricsQuery metricsQuery) {
this.solrClient = solrClient;
this.metricsQuery = metricsQuery;
}
@Override
public MetricSamples collect() throws Exception {
MetricSamples results = new MetricSamples();
solrClient.metricsForAllHosts(metricsQuery)
.forEach((host, samples) -> results.addAll(samples));
return results;
}
}

View File

@ -0,0 +1,72 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.solr.prometheus.exporter.MetricsConfiguration;
import org.apache.solr.prometheus.scraper.SolrScraper;
public class MetricsCollectorFactory {
private final MetricsConfiguration metricsConfiguration;
private final Executor executor;
private final int refreshInSeconds;
private final SolrScraper solrScraper;
public MetricsCollectorFactory(
Executor executor,
int refreshInSeconds,
SolrScraper solrScraper,
MetricsConfiguration metricsConfiguration) {
this.executor = executor;
this.refreshInSeconds = refreshInSeconds;
this.solrScraper = solrScraper;
this.metricsConfiguration = metricsConfiguration;
}
public SchedulerMetricsCollector create() {
Stream<MetricCollector> pings = metricsConfiguration.getPingConfiguration()
.stream()
.map(query -> new PingCollector(solrScraper, query));
Stream<MetricCollector> metrics = metricsConfiguration.getMetricsConfiguration()
.stream()
.map(query -> new MetricsCollector(solrScraper, query));
Stream<MetricCollector> searches = metricsConfiguration.getSearchConfiguration()
.stream()
.map(query -> new SearchCollector(solrScraper, query));
Stream<MetricCollector> collections = metricsConfiguration.getCollectionsConfiguration()
.stream()
.map(query -> new CollectionsCollector(solrScraper, query));
List<MetricCollector> collectors = Stream.of(pings, metrics, searches, collections)
.reduce(Stream::concat)
.orElseGet(Stream::empty)
.collect(Collectors.toList());
return new SchedulerMetricsCollector(executor, refreshInSeconds, TimeUnit.SECONDS, collectors);
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import org.apache.solr.prometheus.exporter.MetricsQuery;
import org.apache.solr.prometheus.scraper.SolrScraper;
public class PingCollector implements MetricCollector {
private final SolrScraper solrScraper;
private final MetricsQuery metricsQuery;
public PingCollector(SolrScraper solrScraper, MetricsQuery metricsQuery) {
this.solrScraper = solrScraper;
this.metricsQuery = metricsQuery;
}
@Override
public MetricSamples collect() throws Exception {
MetricSamples results = new MetricSamples();
solrScraper.pingAllCollections(metricsQuery)
.forEach((collection, metrics) -> results.addAll(metrics));
solrScraper.pingAllCores(metricsQuery)
.forEach((collection, metrics) -> results.addAll(metrics));
return results;
}
}

View File

@ -0,0 +1,131 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import java.io.Closeable;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import io.prometheus.client.Collector;
import io.prometheus.client.Histogram;
import org.apache.solr.prometheus.exporter.SolrExporter;
import org.apache.solr.prometheus.scraper.Async;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SchedulerMetricsCollector implements Closeable {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public interface Observer {
void metricsUpdated(List<Collector.MetricFamilySamples> samples);
}
private final List<MetricCollector> metricCollectors;
private final int duration;
private final TimeUnit timeUnit;
private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(
1,
new DefaultSolrThreadFactory("scheduled-metrics-collector"));
private final Executor executor;
private final List<Observer> observers = new CopyOnWriteArrayList<>();
private static final Histogram metricsCollectionTime = Histogram.build()
.name("solr_exporter_duration_seconds")
.help("Duration taken to record all metrics")
.register(SolrExporter.defaultRegistry);
public SchedulerMetricsCollector(
Executor executor,
int duration,
TimeUnit timeUnit,
List<MetricCollector> metricCollectors) {
this.executor = executor;
this.metricCollectors = metricCollectors;
this.duration = duration;
this.timeUnit = timeUnit;
}
public void start() {
scheduler.scheduleWithFixedDelay(this::collectMetrics, 0, duration, timeUnit);
}
private void collectMetrics() {
try (Histogram.Timer timer = metricsCollectionTime.startTimer()) {
log.info("Beginning metrics collection");
List<CompletableFuture<MetricSamples>> futures = new ArrayList<>();
for (MetricCollector metricsCollector : metricCollectors) {
futures.add(CompletableFuture.supplyAsync(() -> {
try {
return metricsCollector.collect();
} catch (Exception e) {
throw new RuntimeException(e);
}
}, executor));
}
try {
CompletableFuture<List<MetricSamples>> sampleFuture = Async.waitForAllSuccessfulResponses(futures);
List<MetricSamples> samples = sampleFuture.get();
MetricSamples metricSamples = new MetricSamples();
samples.forEach(metricSamples::addAll);
notifyObservers(metricSamples.asList());
log.info("Completed metrics collection");
} catch (InterruptedException | ExecutionException e) {
log.error("Error while waiting for metric collection to complete", e);
}
}
}
public void addObserver(Observer observer) {
this.observers.add(observer);
}
public void removeObserver(Observer observer) {
this.observers.remove(observer);
}
private void notifyObservers(List<Collector.MetricFamilySamples> samples) {
observers.forEach(observer -> observer.metricsUpdated(samples));
}
@Override
public void close() {
scheduler.shutdownNow();
}
}

View File

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import org.apache.solr.prometheus.exporter.MetricsQuery;
import org.apache.solr.prometheus.scraper.SolrScraper;
public class SearchCollector implements MetricCollector {
private final MetricsQuery metricsQuery;
private final SolrScraper solrClient;
public SearchCollector(SolrScraper solrClient, MetricsQuery metricsQuery) {
this.solrClient = solrClient;
this.metricsQuery = metricsQuery;
}
@Override
public MetricSamples collect() throws Exception {
return solrClient.search(metricsQuery);
}
}

View File

@ -1,463 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.solr.core.XmlConfigFile;
import org.apache.solr.prometheus.scraper.SolrScraper;
import io.prometheus.client.Collector;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.impl.NoOpResponseParser;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.util.DOMUtil;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import java.lang.invoke.MethodHandles;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.TimeUnit;
/**
* SolrCollector
*/
public class SolrCollector extends Collector implements Collector.Describable {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private SolrClient solrClient;
private XmlConfigFile config;
private int numThreads;
private ExecutorService executorService;
private static ObjectMapper om = new ObjectMapper();
/**
* Constructor.
*/
public SolrCollector(SolrClient solrClient, XmlConfigFile config, int numThreads) {
this.solrClient = solrClient;
this.config = config;
this.numThreads = numThreads;
}
/**
* Describe scrape status.
*/
public List<Collector.MetricFamilySamples> describe() {
List<MetricFamilySamples> metricFamilies = new ArrayList<>();
metricFamilies.add(new MetricFamilySamples("solr_exporter_duration_seconds", Type.GAUGE, "Time this Solr scrape took, in seconds.", new ArrayList<>()));
return metricFamilies;
}
/**
* Collect samples.
*/
public List<MetricFamilySamples> collect() {
// start time of scraping.
long startTime = System.nanoTime();
this.executorService = ExecutorUtil.newMDCAwareFixedThreadPool(numThreads, new DefaultSolrThreadFactory("solr-exporter"));
Map<String, MetricFamilySamples> metricFamilySamplesMap = new LinkedHashMap<>();
List<Future<Map<String, MetricFamilySamples>>> futureList = new ArrayList<>();
try {
// Ping
Node pingNode = this.config.getNode("/config/rules/ping", true);
if (pingNode != null) {
NamedList pingNL = DOMUtil.childNodesToNamedList(pingNode);
List<NamedList> requestsNL = pingNL.getAll("request");
if (this.solrClient instanceof CloudSolrClient) {
// in SolrCloud mode
List<HttpSolrClient> httpSolrClients = new ArrayList<>();
try {
httpSolrClients = getHttpSolrClients((CloudSolrClient) this.solrClient);
for (HttpSolrClient httpSolrClient : httpSolrClients) {
for (NamedList requestNL : requestsNL) {
String coreName = (String) ((NamedList) requestNL.get("query")).get("core");
String collectionName = (String) ((NamedList) requestNL.get("query")).get("collection");
if (coreName == null && collectionName == null) {
try {
List<String> cores = getCores(httpSolrClient);
for (String core : cores) {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
LinkedHashMap query = (LinkedHashMap) conf.get("query");
if (query != null) {
query.put("core", core);
}
SolrScraper scraper = new SolrScraper(httpSolrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
} catch (SolrServerException | IOException e) {
this.log.error("failed to get cores: " + e.getMessage());
}
} else if (coreName != null && collectionName == null) {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
SolrScraper scraper = new SolrScraper(httpSolrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
}
}
// wait for HttpColeClients
for (Future<Map<String, MetricFamilySamples>> future : futureList) {
try {
Map<String, MetricFamilySamples> m = future.get(60, TimeUnit.SECONDS);
mergeMetrics(metricFamilySamplesMap, m);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
this.log.error(e.getMessage());
}
}
} catch (SolrServerException | IOException e) {
this.log.error("failed to get HttpSolrClients: " + e.getMessage());
} finally {
for (HttpSolrClient httpSolrClient : httpSolrClients) {
try {
httpSolrClient.close();
} catch (IOException e) {
this.log.error("failed to close HttpSolrClient: " + e.getMessage());
}
}
}
// collection
for (NamedList requestNL : requestsNL) {
String coreName = (String) ((NamedList) requestNL.get("query")).get("core");
String collectionName = (String) ((NamedList) requestNL.get("query")).get("collection");
if (coreName == null && collectionName == null) {
try {
List<String> collections = getCollections((CloudSolrClient) this.solrClient);
for (String collection : collections) {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
LinkedHashMap query = (LinkedHashMap) conf.get("query");
if (query != null) {
query.put("collection", collection);
}
SolrScraper scraper = new SolrScraper(this.solrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
} catch (SolrServerException | IOException e) {
this.log.error("failed to get cores: " + e.getMessage());
}
} else if (coreName == null && collectionName != null) {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
SolrScraper scraper = new SolrScraper(this.solrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
}
} else {
// in Standalone mode
for (NamedList requestNL : requestsNL) {
String coreName = (String) ((NamedList) requestNL.get("query")).get("core");
if (coreName == null) {
try {
List<String> cores = getCores((HttpSolrClient) this.solrClient);
for (String core : cores) {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
LinkedHashMap query = (LinkedHashMap) conf.get("query");
if (query != null) {
query.put("core", core);
}
SolrScraper scraper = new SolrScraper(this.solrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
} catch (SolrServerException | IOException e) {
this.log.error("failed to get cores: " + e.getMessage());
}
} else {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
SolrScraper scraper = new SolrScraper(this.solrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
}
}
}
// Metrics
Node metricsNode = this.config.getNode("/config/rules/metrics", false);
if (metricsNode != null) {
NamedList metricsNL = DOMUtil.childNodesToNamedList(metricsNode);
List<NamedList> requestsNL = metricsNL.getAll("request");
if (this.solrClient instanceof CloudSolrClient) {
// in SolrCloud mode
List<HttpSolrClient> httpSolrClients = new ArrayList<>();
try {
httpSolrClients = getHttpSolrClients((CloudSolrClient) this.solrClient);
for (HttpSolrClient httpSolrClient : httpSolrClients) {
for (NamedList requestNL : requestsNL) {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
SolrScraper scraper = new SolrScraper(httpSolrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
}
// wait for HttpColeClients
for (Future<Map<String, MetricFamilySamples>> future : futureList) {
try {
Map<String, MetricFamilySamples> m = future.get(60, TimeUnit.SECONDS);
mergeMetrics(metricFamilySamplesMap, m);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
this.log.error(e.getMessage());
}
}
} catch (SolrServerException | IOException e) {
this.log.error(e.getMessage());
} finally {
for (HttpSolrClient httpSolrClient : httpSolrClients) {
try {
httpSolrClient.close();
} catch (IOException e) {
this.log.error(e.getMessage());
}
}
}
} else {
// in Standalone mode
for (NamedList requestNL : requestsNL) {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
SolrScraper scraper = new SolrScraper(this.solrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
}
}
// Collections
Node collectionsNode = this.config.getNode("/config/rules/collections", false);
if (collectionsNode != null && this.solrClient instanceof CloudSolrClient) {
NamedList collectionsNL = DOMUtil.childNodesToNamedList(collectionsNode);
List<NamedList> requestsNL = collectionsNL.getAll("request");
for (NamedList requestNL : requestsNL) {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
SolrScraper scraper = new SolrScraper(this.solrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
}
// Search
Node searchNode = this.config.getNode("/config/rules/search", false);
if (searchNode != null) {
NamedList searchNL = DOMUtil.childNodesToNamedList(searchNode);
List<NamedList> requestsNL = searchNL.getAll("request");
for (NamedList requestNL : requestsNL) {
LinkedHashMap conf = (LinkedHashMap) requestNL.asMap(10);
SolrScraper scraper = new SolrScraper(this.solrClient, conf);
Future<Map<String, MetricFamilySamples>> future = this.executorService.submit(scraper);
futureList.add(future);
}
}
// get future
for (Future<Map<String, MetricFamilySamples>> future : futureList) {
try {
Map<String, MetricFamilySamples> m = future.get(60, TimeUnit.SECONDS);
mergeMetrics(metricFamilySamplesMap, m);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
this.log.error(e.getMessage());
}
}
} catch (Exception e) {
this.log.error(e.getMessage());
e.printStackTrace();
}
// return value
List<MetricFamilySamples> metricFamiliesSamplesList = new ArrayList<>();
// add solr metrics
for (String gaugeMetricName : metricFamilySamplesMap.keySet()) {
MetricFamilySamples metricFamilySamples = metricFamilySamplesMap.get(gaugeMetricName);
if (metricFamilySamples.samples.size() > 0) {
metricFamiliesSamplesList.add(metricFamilySamples);
}
}
// add scrape duration metric
List<MetricFamilySamples.Sample> durationSample = new ArrayList<>();
durationSample.add(new MetricFamilySamples.Sample("solr_exporter_duration_seconds", new ArrayList<>(), new ArrayList<>(), (System.nanoTime() - startTime) / 1.0E9));
metricFamiliesSamplesList.add(new MetricFamilySamples("solr_exporter_duration_seconds", Type.GAUGE, "Time this Solr exporter took, in seconds.", durationSample));
this.executorService.shutdown();
return metricFamiliesSamplesList;
}
/**
* Merge metrics.
*/
private Map<String, MetricFamilySamples> mergeMetrics(Map<String, MetricFamilySamples> metrics1, Map<String, MetricFamilySamples> metrics2) {
// marge MetricFamilySamples
for (String k : metrics2.keySet()) {
if (metrics1.containsKey(k)) {
for (MetricFamilySamples.Sample sample : metrics2.get(k).samples) {
if (!metrics1.get(k).samples.contains(sample)) {
metrics1.get(k).samples.add(sample);
}
}
} else {
metrics1.put(k, metrics2.get(k));
}
}
return metrics1;
}
/**
* Get target cores via CoreAdminAPI.
*/
public static List<String> getCores(HttpSolrClient httpSolrClient) throws SolrServerException, IOException {
List<String> cores = new ArrayList<>();
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
httpSolrClient.setParser(responseParser);
CoreAdminRequest coreAdminRequest = new CoreAdminRequest();
coreAdminRequest.setAction(CoreAdminParams.CoreAdminAction.STATUS);
coreAdminRequest.setIndexInfoNeeded(false);
NamedList<Object> coreAdminResponse = httpSolrClient.request(coreAdminRequest);
JsonNode statusJsonNode = om.readTree((String) coreAdminResponse.get("response")).get("status");
for (Iterator<JsonNode> i = statusJsonNode.iterator(); i.hasNext(); ) {
String core = i.next().get("name").textValue();
if (!cores.contains(core)) {
cores.add(core);
}
}
return cores;
}
/**
* Get target cores via CollectionsAPI.
*/
public static List<String> getCollections(CloudSolrClient cloudSolrClient) throws SolrServerException, IOException {
List<String> collections = new ArrayList<>();
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
cloudSolrClient.setParser(responseParser);
CollectionAdminRequest collectionAdminRequest = new CollectionAdminRequest.List();
NamedList<Object> collectionAdminResponse = cloudSolrClient.request(collectionAdminRequest);
JsonNode collectionsJsonNode = om.readTree((String) collectionAdminResponse.get("response")).get("collections");
for (Iterator<JsonNode> i = collectionsJsonNode.iterator(); i.hasNext(); ) {
String collection = i.next().textValue();
if (!collections.contains(collection)) {
collections.add(collection);
}
}
return collections;
}
/**
* Get base urls via CollectionsAPI.
*/
private List<String> getBaseUrls(CloudSolrClient cloudSolrClient) throws SolrServerException, IOException {
List<String> baseUrls = new ArrayList<>();
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
cloudSolrClient.setParser(responseParser);
CollectionAdminRequest collectionAdminRequest = new CollectionAdminRequest.ClusterStatus();
NamedList<Object> collectionAdminResponse = cloudSolrClient.request(collectionAdminRequest);
List<JsonNode> baseUrlJsonNode = om.readTree((String) collectionAdminResponse.get("response")).findValues("base_url");
for (Iterator<JsonNode> i = baseUrlJsonNode.iterator(); i.hasNext(); ) {
String baseUrl = i.next().textValue();
if (!baseUrls.contains(baseUrl)) {
baseUrls.add(baseUrl);
}
}
return baseUrls;
}
/**
* Get HTTP Solr Clients
*/
private List<HttpSolrClient> getHttpSolrClients(CloudSolrClient cloudSolrClient) throws SolrServerException, IOException {
List<HttpSolrClient> solrClients = new ArrayList<>();
for (String baseUrl : getBaseUrls(cloudSolrClient)) {
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
HttpSolrClient.Builder builder = new HttpSolrClient.Builder();
builder.withBaseSolrUrl(baseUrl);
HttpSolrClient httpSolrClient = builder.build();
httpSolrClient.setParser(responseParser);
solrClients.add(httpSolrClient);
}
return solrClients;
}
}

View File

@ -15,7 +15,7 @@
* limitations under the License.
*/
/**
* Solr metrics collector.
/**
* Collects metrics from Solr via various endpoints.
*/
package org.apache.solr.prometheus.collector;

View File

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.exporter;
import java.util.List;
import io.prometheus.client.Collector;
import org.apache.solr.prometheus.collector.SchedulerMetricsCollector;
public class CachedPrometheusCollector extends Collector implements SchedulerMetricsCollector.Observer {
private volatile List<MetricFamilySamples> samples;
@Override
public List<MetricFamilySamples> collect() {
return samples;
}
@Override
public void metricsUpdated(List<MetricFamilySamples> samples) {
this.samples = samples;
}
}

View File

@ -0,0 +1,94 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.exporter;
import java.util.Collections;
import java.util.List;
import net.thisptr.jackson.jq.exception.JsonQueryException;
import org.apache.solr.core.XmlConfigFile;
import org.w3c.dom.Node;
public class MetricsConfiguration {
private final PrometheusExporterSettings settings;
private final List<MetricsQuery> pingConfiguration;
private final List<MetricsQuery> metricsConfiguration;
private final List<MetricsQuery> collectionsConfiguration;
private final List<MetricsQuery> searchConfiguration;
private MetricsConfiguration(
PrometheusExporterSettings settings,
List<MetricsQuery> pingConfiguration,
List<MetricsQuery> metricsConfiguration,
List<MetricsQuery> collectionsConfiguration,
List<MetricsQuery> searchConfiguration) {
this.settings = settings;
this.pingConfiguration = pingConfiguration;
this.metricsConfiguration = metricsConfiguration;
this.collectionsConfiguration = collectionsConfiguration;
this.searchConfiguration = searchConfiguration;
}
public PrometheusExporterSettings getSettings() {
return settings;
}
public List<MetricsQuery> getPingConfiguration() {
return pingConfiguration;
}
public List<MetricsQuery> getMetricsConfiguration() {
return metricsConfiguration;
}
public List<MetricsQuery> getCollectionsConfiguration() {
return collectionsConfiguration;
}
public List<MetricsQuery> getSearchConfiguration() {
return searchConfiguration;
}
public static MetricsConfiguration from(XmlConfigFile config) throws Exception {
Node settings = config.getNode("/config/settings", false);
Node pingConfig = config.getNode("/config/rules/ping", false);
Node metricsConfig = config.getNode("/config/rules/metrics", false);
Node collectionsConfig = config.getNode("/config/rules/collections", false);
Node searchConfiguration = config.getNode("/config/rules/search", false);
return new MetricsConfiguration(
settings == null ? PrometheusExporterSettings.builder().build() : PrometheusExporterSettings.from(settings),
toMetricQueries(pingConfig),
toMetricQueries(metricsConfig),
toMetricQueries(collectionsConfig),
toMetricQueries(searchConfiguration)
);
}
private static List<MetricsQuery> toMetricQueries(Node node) throws JsonQueryException {
if (node == null) {
return Collections.emptyList();
}
return MetricsQuery.from(node);
}
}

View File

@ -0,0 +1,137 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.exporter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import net.thisptr.jackson.jq.JsonQuery;
import net.thisptr.jackson.jq.exception.JsonQueryException;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.util.DOMUtil;
import org.w3c.dom.Node;
public class MetricsQuery {
private final String path;
private final ModifiableSolrParams parameters;
private final String core;
private final String collection;
private final List<JsonQuery> jsonQueries;
private MetricsQuery(
String path,
ModifiableSolrParams parameters,
String core,
String collection,
List<JsonQuery> jsonQueries) {
this.path = path;
this.parameters = parameters;
this.core = core;
this.collection = collection;
this.jsonQueries = jsonQueries;
}
public MetricsQuery withCore(String core) {
return new MetricsQuery(
getPath(),
getParameters(),
core,
getCollection().orElse(null),
getJsonQueries()
);
}
public MetricsQuery withCollection(String collection) {
return new MetricsQuery(
getPath(),
getParameters(),
getCore().orElse(null),
collection,
getJsonQueries()
);
}
public String getPath() {
return path;
}
public Optional<String> getCore() {
return Optional.ofNullable(core);
}
public Optional<String> getCollection() {
return Optional.ofNullable(collection);
}
public List<JsonQuery> getJsonQueries() {
return jsonQueries;
}
public static List<MetricsQuery> from(Node node) throws JsonQueryException {
List<MetricsQuery> metricsQueries = new ArrayList<>();
NamedList config = DOMUtil.childNodesToNamedList(node);
List<NamedList> requests = config.getAll("request");
for (NamedList request : requests) {
NamedList query = (NamedList) request.get("query");
NamedList queryParameters = (NamedList) query.get("params");
String path = (String) query.get("path");
String core = (String) query.get("core");
String collection = (String) query.get("collection");
List<String> jsonQueries = (ArrayList<String>) request.get("jsonQueries");
ModifiableSolrParams params = new ModifiableSolrParams();
if (queryParameters != null) {
for (Map.Entry<String, String> entrySet : (Set<Map.Entry<String, String>>) queryParameters.asShallowMap().entrySet()) {
params.add(entrySet.getKey(), entrySet.getValue());
}
}
QueryRequest queryRequest = new QueryRequest(params);
queryRequest.setPath(path);
List<JsonQuery> compiledQueries = new ArrayList<>();
if (jsonQueries != null) {
for (String jsonQuery : jsonQueries) {
JsonQuery compiledJsonQuery = JsonQuery.compile(jsonQuery);
compiledQueries.add(compiledJsonQuery);
}
}
metricsQueries.add(new MetricsQuery(
path,
params,
core,
collection,
compiledQueries));
}
return metricsQueries;
}
public ModifiableSolrParams getParameters() {
return parameters;
}
}

View File

@ -0,0 +1,96 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.exporter;
import java.util.List;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.util.DOMUtil;
import org.w3c.dom.Node;
public class PrometheusExporterSettings {
private final int httpConnectionTimeout;
private final int httpReadTimeout;
public static Builder builder() {
return new Builder();
}
public static class Builder {
private int httpConnectionTimeout = 10000;
private int httpReadTimeout = 60000;
private Builder() {
}
public Builder withConnectionHttpTimeout(int httpConnectionTimeout) {
this.httpConnectionTimeout = httpConnectionTimeout;
return this;
}
public Builder witReadHttpTimeout(int httpReadTimeout) {
this.httpReadTimeout = httpReadTimeout;
return this;
}
public PrometheusExporterSettings build() {
return new PrometheusExporterSettings(httpConnectionTimeout, httpReadTimeout);
}
}
public static PrometheusExporterSettings from(Node settings) {
NamedList config = DOMUtil.childNodesToNamedList(settings);
Builder builder = builder();
List<NamedList> httpClientSettings = config.getAll("httpClients");
for (NamedList entry : httpClientSettings) {
Integer connectionTimeout = (Integer) entry.get("connectionTimeout");
if (connectionTimeout != null) {
builder.withConnectionHttpTimeout(connectionTimeout);
}
Integer readTimeout = (Integer) entry.get("readTimeout");
if (readTimeout != null) {
builder.witReadHttpTimeout(readTimeout);
}
}
return builder.build();
}
private PrometheusExporterSettings(
int httpConnectionTimeout,
int httpReadTimeout) {
this.httpConnectionTimeout = httpConnectionTimeout;
this.httpReadTimeout = httpReadTimeout;
}
public int getHttpConnectionTimeout() {
return httpConnectionTimeout;
}
public int getHttpReadTimeout() {
return httpReadTimeout;
}
}

View File

@ -0,0 +1,78 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.exporter;
import java.util.Locale;
import java.util.Optional;
import java.util.stream.Collectors;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.impl.NoOpResponseParser;
import org.apache.zookeeper.client.ConnectStringParser;
public class SolrClientFactory {
private PrometheusExporterSettings settings;
public SolrClientFactory(PrometheusExporterSettings settings) {
this.settings = settings;
}
public HttpSolrClient createStandaloneSolrClient(String solrHost) {
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
HttpSolrClient.Builder standaloneBuilder = new HttpSolrClient.Builder();
standaloneBuilder.withBaseSolrUrl(solrHost);
standaloneBuilder.withConnectionTimeout(settings.getHttpConnectionTimeout())
.withSocketTimeout(settings.getHttpReadTimeout());
HttpSolrClient httpSolrClient = standaloneBuilder.build();
httpSolrClient.setParser(responseParser);
return httpSolrClient;
}
public CloudSolrClient createCloudSolrClient(String zookeeperConnectionString) {
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
ConnectStringParser parser = new ConnectStringParser(zookeeperConnectionString);
CloudSolrClient.Builder cloudBuilder = new CloudSolrClient.Builder(
parser.getServerAddresses().stream()
.map(address -> String.format(Locale.ROOT, "%s:%s", address.getHostString(), address.getPort()))
.collect(Collectors.toList()),
Optional.ofNullable(parser.getChrootPath()));
cloudBuilder.withConnectionTimeout(settings.getHttpConnectionTimeout())
.withSocketTimeout(settings.getHttpReadTimeout());
CloudSolrClient client = cloudBuilder.build();
client.setParser(responseParser);
client.connect();
return client;
}
}

View File

@ -16,194 +16,146 @@
*/
package org.apache.solr.prometheus.exporter;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.InetSocketAddress;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Locale;
import java.util.concurrent.ExecutorService;
import io.prometheus.client.CollectorRegistry;
import io.prometheus.client.Counter;
import io.prometheus.client.exporter.HTTPServer;
import net.sourceforge.argparse4j.ArgumentParsers;
import net.sourceforge.argparse4j.inf.ArgumentParser;
import net.sourceforge.argparse4j.inf.ArgumentParserException;
import net.sourceforge.argparse4j.inf.Namespace;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.impl.NoOpResponseParser;
import org.apache.solr.core.XmlConfigFile;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.prometheus.collector.SolrCollector;
import org.apache.solr.core.XmlConfigFile;
import org.apache.solr.prometheus.collector.MetricsCollectorFactory;
import org.apache.solr.prometheus.collector.SchedulerMetricsCollector;
import org.apache.solr.prometheus.scraper.SolrCloudScraper;
import org.apache.solr.prometheus.scraper.SolrScraper;
import org.apache.solr.prometheus.scraper.SolrStandaloneScraper;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import java.lang.invoke.MethodHandles;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* SolrExporter
*/
public class SolrExporter {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final String[] ARG_PORT_FLAGS = { "-p", "--port" };
private static final String[] ARG_PORT_FLAGS = {"-p", "--port"};
private static final String ARG_PORT_METAVAR = "PORT";
private static final String ARG_PORT_DEST = "port";
private static final Integer ARG_PORT_DEFAULT = 9983;
private static final String ARG_PORT_HELP = "Specify the solr-exporter HTTP listen port; default is " + String.valueOf(ARG_PORT_DEFAULT) + ".";
private static final int ARG_PORT_DEFAULT = 9983;
private static final String ARG_PORT_HELP = "Specify the solr-exporter HTTP listen port; default is " + ARG_PORT_DEFAULT + ".";
private static final String[] ARG_BASE_URL_FLAGS = { "-b", "--baseurl" };
private static final String[] ARG_BASE_URL_FLAGS = {"-b", "--baseurl"};
private static final String ARG_BASE_URL_METAVAR = "BASE_URL";
private static final String ARG_BASE_URL_DEST = "baseUrl";
private static final String ARG_BASE_URL_DEFAULT = "";
private static final String ARG_BASE_URL_DEFAULT = "http://localhost:8983/solr";
private static final String ARG_BASE_URL_HELP = "Specify the Solr base URL when connecting to Solr in standalone mode. If omitted both the -b parameter and the -z parameter, connect to http://localhost:8983/solr. For example 'http://localhost:8983/solr'.";
private static final String[] ARG_ZK_HOST_FLAGS = { "-z", "--zkhost" };
private static final String[] ARG_ZK_HOST_FLAGS = {"-z", "--zkhost"};
private static final String ARG_ZK_HOST_METAVAR = "ZK_HOST";
private static final String ARG_ZK_HOST_DEST = "zkHost";
private static final String ARG_ZK_HOST_DEFAULT = "";
private static final String ARG_ZK_HOST_HELP = "Specify the ZooKeeper connection string when connecting to Solr in SolrCloud mode. If omitted both the -b parameter and the -z parameter, connect to http://localhost:8983/solr. For example 'localhost:2181/solr'.";
private static final String[] ARG_CONFIG_FLAGS = { "-f", "--config-file" };
private static final String[] ARG_CONFIG_FLAGS = {"-f", "--config-file"};
private static final String ARG_CONFIG_METAVAR = "CONFIG";
private static final String ARG_CONFIG_DEST = "configFile";
private static final String ARG_CONFIG_DEFAULT = "./conf/solr-exporter-config.xml";
private static final String ARG_CONFIG_HELP = "Specify the configuration file; default is " + ARG_CONFIG_DEFAULT + ".";
private static final String ARG_CONFIG_HELP = "Specify the configuration file; the default is " + ARG_CONFIG_DEFAULT + ".";
private static final String[] ARG_NUM_THREADS_FLAGS = { "-n", "--num-threads" };
private static final String[] ARG_SCRAPE_INTERVAL_FLAGS = {"-s", "--scrape-interval"};
private static final String ARG_SCRAPE_INTERVAL_METAVAR = "SCRAPE_INTERVAL";
private static final String ARG_SCRAPE_INTERVAL_DEST = "scrapeInterval";
private static final int ARG_SCRAPE_INTERVAL_DEFAULT = 60;
private static final String ARG_SCRAPE_INTERVAL_HELP = "Specify the delay between scraping Solr metrics; the default is " + ARG_SCRAPE_INTERVAL_DEFAULT + " seconds.";
private static final String[] ARG_NUM_THREADS_FLAGS = {"-n", "--num-threads"};
private static final String ARG_NUM_THREADS_METAVAR = "NUM_THREADS";
private static final String ARG_NUM_THREADS_DEST = "numThreads";
private static final Integer ARG_NUM_THREADS_DEFAULT = 1;
private static final String ARG_NUM_THREADS_HELP = "Specify the number of threads. solr-exporter creates a thread pools for request to Solr. If you need to improve request latency via solr-exporter, you can increase the number of threads; default is " + String.valueOf(ARG_NUM_THREADS_DEFAULT) + ".";
private static final String ARG_NUM_THREADS_HELP = "Specify the number of threads. solr-exporter creates a thread pools for request to Solr. If you need to improve request latency via solr-exporter, you can increase the number of threads; the default is " + ARG_NUM_THREADS_DEFAULT + ".";
private int port;
private SolrClient solrClient;
private XmlConfigFile config;
private int numThreads;
public static final CollectorRegistry defaultRegistry = new CollectorRegistry();
CollectorRegistry registry = new CollectorRegistry();
private final int port;
private final CachedPrometheusCollector prometheusCollector;
private final SchedulerMetricsCollector metricsCollector;
private final SolrScraper solrScraper;
private final ExecutorService metricCollectorExecutor;
private final ExecutorService requestExecutor;
private HTTPServer httpServer;
private SolrCollector collector;
private SolrResourceLoader loader;
public static final Counter scrapeErrorTotal = Counter.build()
.name("solr_exporter_scrape_error_total")
.help("Number of scrape error.").register();
/**
* Constructor.
*/
public SolrExporter(int port, String connStr, Path configPath, int numThreads) throws ParserConfigurationException, SAXException, IOException {
this(port, createClient(connStr), configPath, numThreads);
}
/**
* Constructor.
*/
public SolrExporter(int port, SolrClient solrClient, Path configPath, int numThreads) throws ParserConfigurationException, SAXException, IOException {
super();
this.loader = new SolrResourceLoader(configPath.getParent());
public SolrExporter(
int port,
int numberThreads,
int scrapeInterval,
SolrScrapeConfiguration scrapeConfiguration,
MetricsConfiguration metricsConfiguration) {
this.port = port;
this.solrClient = solrClient;
this.config = new XmlConfigFile(this.loader, configPath.getFileName().toString());
this.numThreads = numThreads;
this.metricCollectorExecutor = ExecutorUtil.newMDCAwareFixedThreadPool(
numberThreads,
new DefaultSolrThreadFactory("solr-exporter-collectors"));
this.requestExecutor = ExecutorUtil.newMDCAwareFixedThreadPool(
numberThreads,
new DefaultSolrThreadFactory("solr-exporter-requests"));
this.solrScraper = createScraper(scrapeConfiguration, metricsConfiguration.getSettings());
this.metricsCollector = new MetricsCollectorFactory(metricCollectorExecutor, scrapeInterval, solrScraper, metricsConfiguration).create();
this.prometheusCollector = new CachedPrometheusCollector();
}
/**
* Start HTTP server for exporting Solr metrics.
*/
public void start() throws IOException {
this.collector = new SolrCollector(solrClient, config, numThreads);
this.registry.register(this.collector);
this.registry.register(scrapeErrorTotal);
this.httpServer = new HTTPServer(new InetSocketAddress(port), this.registry);
void start() throws IOException {
defaultRegistry.register(prometheusCollector);
metricsCollector.addObserver(prometheusCollector);
metricsCollector.start();
httpServer = new HTTPServer(new InetSocketAddress(port), defaultRegistry);
}
/**
* Stop HTTP server for exporting Solr metrics.
*/
public void stop() {
this.httpServer.stop();
this.registry.unregister(this.collector);
void stop() {
httpServer.stop();
metricsCollector.removeObserver(prometheusCollector);
requestExecutor.shutdownNow();
metricCollectorExecutor.shutdownNow();
IOUtils.closeQuietly(metricsCollector);
IOUtils.closeQuietly(solrScraper);
defaultRegistry.unregister(this.prometheusCollector);
}
/**
* Create Solr client
*/
private static SolrClient createClient(String connStr) {
SolrClient solrClient;
private SolrScraper createScraper(SolrScrapeConfiguration configuration, PrometheusExporterSettings settings) {
SolrClientFactory factory = new SolrClientFactory(settings);
Pattern baseUrlPattern = Pattern.compile("^https?:\\/\\/[\\w\\/:%#\\$&\\?\\(\\)~\\.=\\+\\-]+$");
Pattern zkHostPattern = Pattern.compile("^(?<host>[^\\/]+)(?<chroot>|(?:\\/.*))$");
Matcher matcher;
matcher = baseUrlPattern.matcher(connStr);
if (matcher.matches()) {
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
HttpSolrClient.Builder builder = new HttpSolrClient.Builder();
builder.withBaseSolrUrl(connStr);
HttpSolrClient httpSolrClient = builder.build();
httpSolrClient.setParser(responseParser);
solrClient = httpSolrClient;
} else {
String host = "";
String chroot = "";
matcher = zkHostPattern.matcher(connStr);
if (matcher.matches()) {
host = matcher.group("host") != null ? matcher.group("host") : "";
chroot = matcher.group("chroot") != null ? matcher.group("chroot") : "";
}
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
CloudSolrClient.Builder builder = new CloudSolrClient.Builder();
if (host.contains(",")) {
List<String> hosts = new ArrayList<>();
for (String h : host.split(",")) {
if (h != null && !h.equals("")) {
hosts.add(h.trim());
}
}
builder.withZkHost(hosts);
} else {
builder.withZkHost(host);
}
if (chroot.equals("")) {
builder.withZkChroot("/");
} else {
builder.withZkChroot(chroot);
}
CloudSolrClient cloudSolrClient = builder.build();
cloudSolrClient.setParser(responseParser);
solrClient = cloudSolrClient;
switch (configuration.getType()) {
case STANDALONE:
return new SolrStandaloneScraper(
factory.createStandaloneSolrClient(configuration.getSolrHost().get()), requestExecutor);
case CLOUD:
return new SolrCloudScraper(
factory.createCloudSolrClient(configuration.getZookeeperConnectionString().get()), requestExecutor, factory);
default:
throw new RuntimeException(String.format(Locale.ROOT, "Invalid type: %s", configuration.getType()));
}
return solrClient;
}
/**
* Entry point of SolrExporter.
*/
public static void main( String[] args ) {
ArgumentParser parser = ArgumentParsers.newArgumentParser(SolrCollector.class.getSimpleName())
public static void main(String[] args) {
ArgumentParser parser = ArgumentParsers.newFor(SolrExporter.class.getSimpleName()).build()
.description("Prometheus exporter for Apache Solr.");
parser.addArgument(ARG_PORT_FLAGS)
@ -222,6 +174,10 @@ public class SolrExporter {
.metavar(ARG_CONFIG_METAVAR).dest(ARG_CONFIG_DEST).type(String.class)
.setDefault(ARG_CONFIG_DEFAULT).help(ARG_CONFIG_HELP);
parser.addArgument(ARG_SCRAPE_INTERVAL_FLAGS)
.metavar(ARG_SCRAPE_INTERVAL_METAVAR).dest(ARG_SCRAPE_INTERVAL_DEST).type(Integer.class)
.setDefault(ARG_SCRAPE_INTERVAL_DEFAULT).help(ARG_SCRAPE_INTERVAL_HELP);
parser.addArgument(ARG_NUM_THREADS_FLAGS)
.metavar(ARG_NUM_THREADS_METAVAR).dest(ARG_NUM_THREADS_DEST).type(Integer.class)
.setDefault(ARG_NUM_THREADS_DEFAULT).help(ARG_NUM_THREADS_HELP);
@ -229,25 +185,43 @@ public class SolrExporter {
try {
Namespace res = parser.parseArgs(args);
int port = res.getInt(ARG_PORT_DEST);
SolrScrapeConfiguration scrapeConfiguration = null;
String connStr = "http://localhost:8983/solr";
if (!res.getString(ARG_BASE_URL_DEST).equals("")) {
connStr = res.getString(ARG_BASE_URL_DEST);
} else if (!res.getString(ARG_ZK_HOST_DEST).equals("")) {
connStr = res.getString(ARG_ZK_HOST_DEST);
if (!res.getString(ARG_ZK_HOST_DEST).equals("")) {
scrapeConfiguration = SolrScrapeConfiguration.solrCloud(res.getString(ARG_ZK_HOST_DEST));
} else if (!res.getString(ARG_BASE_URL_DEST).equals("")) {
scrapeConfiguration = SolrScrapeConfiguration.standalone(res.getString(ARG_BASE_URL_DEST));
}
Path configPath = Paths.get(res.getString(ARG_CONFIG_DEST));
int numThreads = res.getInt(ARG_NUM_THREADS_DEST);
if (scrapeConfiguration == null) {
log.error("Must provide either %s or %s", ARG_BASE_URL_FLAGS, ARG_ZK_HOST_FLAGS);
}
SolrExporter solrExporter = new SolrExporter(port, connStr, configPath, numThreads);
SolrExporter solrExporter = new SolrExporter(
res.getInt(ARG_PORT_DEST),
res.getInt(ARG_NUM_THREADS_DEST),
res.getInt(ARG_SCRAPE_INTERVAL_DEST),
scrapeConfiguration,
loadMetricsConfiguration(Paths.get(res.getString(ARG_CONFIG_DEST))));
log.info("Starting Solr Prometheus Exporting");
solrExporter.start();
log.info("Start server");
} catch (ParserConfigurationException | SAXException | IOException e) {
log.error("Start server failed: " + e.toString());
log.info("Solr Prometheus Exporter is running");
} catch (IOException e) {
log.error("Failed to start Solr Prometheus Exporter: " + e.toString());
} catch (ArgumentParserException e) {
parser.handleError(e);
}
}
private static MetricsConfiguration loadMetricsConfiguration(Path configPath) {
try (SolrResourceLoader loader = new SolrResourceLoader(configPath.getParent())) {
XmlConfigFile config = new XmlConfigFile(loader, configPath.getFileName().toString());
return MetricsConfiguration.from(config);
} catch (Exception e) {
log.error("Could not load scrape configuration from %s", configPath.toAbsolutePath());
throw new RuntimeException(e);
}
}
}

View File

@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.exporter;
import java.util.Optional;
public class SolrScrapeConfiguration {
public enum ConnectionType {
CLOUD,
STANDALONE
}
private final ConnectionType type;
private final String zookeeperConnectionString;
private final String solrHost;
private SolrScrapeConfiguration(ConnectionType type, String zookeeperConnectionString, String solrHost) {
this.type = type;
this.zookeeperConnectionString = zookeeperConnectionString;
this.solrHost = solrHost;
}
public ConnectionType getType() {
return type;
}
public Optional<String> getZookeeperConnectionString() {
return Optional.ofNullable(zookeeperConnectionString);
}
public Optional<String> getSolrHost() {
return Optional.ofNullable(solrHost);
}
public static SolrScrapeConfiguration solrCloud(String zookeeperConnectionString) {
return new SolrScrapeConfiguration(ConnectionType.CLOUD, zookeeperConnectionString, null);
}
public static SolrScrapeConfiguration standalone(String solrHost) {
return new SolrScrapeConfiguration(ConnectionType.STANDALONE, null, solrHost);
}
}

View File

@ -15,7 +15,7 @@
* limitations under the License.
*/
/**
* Solr metrics exporter.
/**
* Prometheus Metrics Exporter.
*/
package org.apache.solr.prometheus.exporter;

View File

@ -0,0 +1,60 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.scraper;
import java.lang.invoke.MethodHandles;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Async {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static <T> CompletableFuture<List<T>> waitForAllSuccessfulResponses(List<CompletableFuture<T>> futures) {
CompletableFuture<Void> completed = CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]));
return completed.thenApply(values -> {
return futures.stream()
.map(CompletableFuture::join)
.collect(Collectors.toList());
}
).exceptionally(error -> {
futures.stream()
.filter(CompletableFuture::isCompletedExceptionally)
.forEach(future -> {
try {
future.get();
} catch (Exception exception) {
log.warn("Error occurred during metrics collection", exception);
}
});
return futures.stream()
.filter(future -> !(future.isCompletedExceptionally() || future.isCancelled()))
.map(CompletableFuture::join)
.collect(Collectors.toList());
}
);
}
}

View File

@ -0,0 +1,154 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.scraper;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.function.Function;
import java.util.stream.Collectors;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.Pair;
import org.apache.solr.prometheus.collector.MetricSamples;
import org.apache.solr.prometheus.exporter.MetricsQuery;
import org.apache.solr.prometheus.exporter.SolrClientFactory;
public class SolrCloudScraper extends SolrScraper {
private final CloudSolrClient solrClient;
private final SolrClientFactory solrClientFactory;
private Cache<String, HttpSolrClient> hostClientCache = CacheBuilder.newBuilder()
.maximumSize(100)
.removalListener((RemovalListener<String, HttpSolrClient>)
removalNotification -> IOUtils.closeQuietly(removalNotification.getValue()))
.build();
public SolrCloudScraper(CloudSolrClient solrClient, Executor executor, SolrClientFactory solrClientFactory) {
super(executor);
this.solrClient = solrClient;
this.solrClientFactory = solrClientFactory;
}
@Override
public Map<String, MetricSamples> pingAllCores(MetricsQuery query) throws IOException {
Map<String, HttpSolrClient> httpSolrClients = createHttpSolrClients();
Map<String, DocCollection> collectionState = solrClient.getClusterStateProvider().getClusterState().getCollectionsMap();
List<Replica> replicas = collectionState.values()
.stream()
.map(DocCollection::getReplicas)
.flatMap(List::stream)
.collect(Collectors.toList());
List<String> coreNames = replicas
.stream()
.map(Replica::getCoreName)
.collect(Collectors.toList());
Map<String, HttpSolrClient> coreToClient = replicas
.stream()
.map(replica -> new Pair<>(replica.getCoreName(), httpSolrClients.get(replica.getBaseUrl())))
.collect(Collectors.toMap(Pair::first, Pair::second));
return sendRequestsInParallel(coreNames, core -> {
try {
return request(coreToClient.get(core), query.withCore(core));
} catch (IOException exception) {
throw new RuntimeException(exception);
}
});
}
private Map<String, HttpSolrClient> createHttpSolrClients() throws IOException {
return getBaseUrls().stream()
.map(url -> {
try {
return hostClientCache.get(url, () -> solrClientFactory.createStandaloneSolrClient(url));
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
})
.collect(Collectors.toMap(HttpSolrClient::getBaseURL, Function.identity()));
}
@Override
public Map<String, MetricSamples> pingAllCollections(MetricsQuery query) throws IOException {
return sendRequestsInParallel(getCollections(), (collection) -> {
try {
return request(solrClient, query.withCollection(collection));
} catch (IOException exception) {
throw new RuntimeException(exception);
}
});
}
@Override
public Map<String, MetricSamples> metricsForAllHosts(MetricsQuery query) throws IOException {
Map<String, HttpSolrClient> httpSolrClients = createHttpSolrClients();
return sendRequestsInParallel(httpSolrClients.keySet(), (baseUrl) -> {
try {
return request(httpSolrClients.get(baseUrl), query);
} catch (IOException exception) {
throw new RuntimeException(exception);
}
});
}
@Override
public MetricSamples search(MetricsQuery query) throws IOException {
return request(solrClient, query);
}
@Override
public MetricSamples collections(MetricsQuery metricsQuery) throws IOException {
return request(solrClient, metricsQuery);
}
private Set<String> getBaseUrls() throws IOException {
return solrClient.getClusterStateProvider().getClusterState().getCollectionsMap().values()
.stream()
.map(DocCollection::getReplicas)
.flatMap(List::stream)
.map(Replica::getBaseUrl)
.collect(Collectors.toSet());
}
private Set<String> getCollections() throws IOException {
return solrClient.getClusterStateProvider().getClusterState().getCollectionsMap().keySet();
}
@Override
public void close() {
IOUtils.closeQuietly(solrClient);
hostClientCache.asMap().values().forEach(IOUtils::closeQuietly);
}
}

View File

@ -16,10 +16,24 @@
*/
package org.apache.solr.prometheus.scraper;
import java.io.Closeable;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.function.Function;
import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.solr.prometheus.exporter.SolrExporter;
import io.prometheus.client.Collector;
import io.prometheus.client.Counter;
import net.thisptr.jackson.jq.JsonQuery;
import net.thisptr.jackson.jq.exception.JsonQueryException;
import org.apache.solr.client.solrj.SolrClient;
@ -27,188 +41,135 @@ import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.Pair;
import org.apache.solr.prometheus.collector.MetricSamples;
import org.apache.solr.prometheus.exporter.MetricsQuery;
import org.apache.solr.prometheus.exporter.SolrExporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.invoke.MethodHandles;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
public abstract class SolrScraper implements Closeable {
/**
* SolrScraper
*/
public class SolrScraper implements Callable<Map<String, Collector.MetricFamilySamples>> {
private static final Counter scrapeErrorTotal = Counter.build()
.name("solr_exporter_scrape_error_total")
.help("Number of scrape error.")
.register(SolrExporter.defaultRegistry);
protected static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private SolrClient solrClient;
private LinkedHashMap conf;
protected final Executor executor;
private List<String> labelNames;
private List<String> labelValues;
public abstract Map<String, MetricSamples> metricsForAllHosts(MetricsQuery query) throws IOException;
/**
* Constructor.
*/
public SolrScraper(SolrClient solrClient, LinkedHashMap conf) {
super();
public abstract Map<String, MetricSamples> pingAllCores(MetricsQuery query) throws IOException;
public abstract Map<String, MetricSamples> pingAllCollections(MetricsQuery query) throws IOException;
this.solrClient = solrClient;
this.conf = conf;
public abstract MetricSamples search(MetricsQuery query) throws IOException;
public abstract MetricSamples collections(MetricsQuery metricsQuery) throws IOException;
this.labelNames = new ArrayList<>();
this.labelValues = new ArrayList<>();
public SolrScraper(Executor executor) {
this.executor = executor;
}
/**
* Execute collectResponse
*/
@Override
public Map<String, Collector.MetricFamilySamples> call() throws Exception {
return collectResponse(this.solrClient, this.conf);
}
protected Map<String, MetricSamples> sendRequestsInParallel(
Collection<String> items,
Function<String, MetricSamples> samplesCallable) throws IOException {
/**
* Collect facet count.
*/
public Map<String, Collector.MetricFamilySamples> collectResponse(SolrClient solrClient, LinkedHashMap conf) {
Map<String, Collector.MetricFamilySamples> metricFamilySamplesMap = new LinkedHashMap<>();
List<CompletableFuture<Pair<String, MetricSamples>>> futures = items.stream()
.map(item -> CompletableFuture.supplyAsync(() -> new Pair<>(item, samplesCallable.apply(item)), executor))
.collect(Collectors.toList());
Future<List<Pair<String, MetricSamples>>> allComplete = Async.waitForAllSuccessfulResponses(futures);
try {
// create Solr request parameters
LinkedHashMap confQuery = (LinkedHashMap) conf.get("query");
LinkedHashMap confParams = (LinkedHashMap) confQuery.get("params");
String path = (String) confQuery.get("path");
String core = (String) confQuery.get("core");
String collection = (String) confQuery.get("collection");
ArrayList<String> jsonQueries = (ArrayList<String>) conf.get("jsonQueries");
return allComplete.get().stream().collect(Collectors.toMap(Pair::first, Pair::second));
} catch (InterruptedException | ExecutionException e) {
throw new IOException(e);
}
}
ModifiableSolrParams params = new ModifiableSolrParams();
if (confParams != null) {
for (Object k : confParams.keySet()) {
String name = (String) k;
String value = (String) confParams.get(k);
params.add(name, value);
}
protected MetricSamples request(SolrClient client, MetricsQuery query) throws IOException {
MetricSamples samples = new MetricSamples();
QueryRequest queryRequest = new QueryRequest(query.getParameters());
queryRequest.setPath(query.getPath());
NamedList<Object> queryResponse = null;
try {
if (!query.getCollection().isPresent() && !query.getCore().isPresent()) {
queryResponse = client.request(queryRequest);
} else if (query.getCore().isPresent()) {
queryResponse = client.request(queryRequest, query.getCore().get());
} else if (query.getCollection().isPresent()) {
queryResponse = client.request(queryRequest, query.getCollection().get());
}
// create Solr queryConfig request
QueryRequest queryRequest = new QueryRequest(params);
queryRequest.setPath(path);
// request to Solr
NamedList<Object> queryResponse = null;
try {
if (core == null && collection == null) {
queryResponse = solrClient.request(queryRequest);
} else if (core != null) {
queryResponse = solrClient.request(queryRequest, core);
} else if (collection != null) {
queryResponse = solrClient.request(queryRequest, collection);
}
} catch (SolrServerException | IOException e) {
this.log.error("failed to request: " + queryRequest.getPath() + " " + e.getMessage());
}
ObjectMapper om = new ObjectMapper();
JsonNode metricsJson = om.readTree((String) queryResponse.get("response"));
List<JsonQuery> jqs = new ArrayList<>();
if (jsonQueries != null) {
for (String jsonQuery : jsonQueries) {
JsonQuery compiledJsonQuery = JsonQuery.compile(jsonQuery);
jqs.add(compiledJsonQuery);
}
}
for (int i = 0; i < jqs.size(); i++) {
JsonQuery q = jqs.get(i);
try {
List<JsonNode> results = q.apply(metricsJson);
for (JsonNode result : results) {
String type = result.get("type").textValue();
String name = result.get("name").textValue();
String help = result.get("help").textValue();
Double value = result.get("value").doubleValue();
ArrayList<String> labelNames = new ArrayList<>(this.labelNames);
ArrayList<String> labelValues = new ArrayList<>(this.labelValues);
if (solrClient instanceof CloudSolrClient) {
labelNames.add("zk_host");
labelValues.add(((CloudSolrClient) solrClient).getZkHost());
}
if (collection != null) {
labelNames.add("collection");
labelValues.add(collection);
}
if (solrClient instanceof HttpSolrClient) {
labelNames.add("base_url");
labelValues.add(((HttpSolrClient) solrClient).getBaseURL());
}
if (core != null) {
labelNames.add("core");
labelValues.add(core);
}
for(Iterator<JsonNode> ite = result.get("label_names").iterator();ite.hasNext();){
JsonNode item = ite.next();
labelNames.add(item.textValue());
}
for(Iterator<JsonNode> ite = result.get("label_values").iterator();ite.hasNext();){
JsonNode item = ite.next();
labelValues.add(item.textValue());
}
if (labelNames.indexOf("core") < 0 && labelNames.indexOf("collection") >= 0 && labelNames.indexOf("shard") >= 0 && labelNames.indexOf("replica") >= 0) {
StringBuffer sb = new StringBuffer();
sb.append(labelValues.get(labelNames.indexOf("collection")))
.append("_")
.append(labelValues.get(labelNames.indexOf("shard")))
.append("_")
.append(labelValues.get(labelNames.indexOf("replica")));
labelNames.add("core");
labelValues.add(sb.toString());
}
if (!metricFamilySamplesMap.containsKey(name)) {
Collector.MetricFamilySamples metricFamilySamples = new Collector.MetricFamilySamples(
name,
Collector.Type.valueOf(type),
help,
new ArrayList<>()
);
metricFamilySamplesMap.put(name, metricFamilySamples);
}
Collector.MetricFamilySamples.Sample sample = new Collector.MetricFamilySamples.Sample(name, labelNames, labelValues, value);
if (!metricFamilySamplesMap.get(name).samples.contains(sample)) {
metricFamilySamplesMap.get(name).samples.add(sample);
}
}
} catch (JsonQueryException e) {
this.log.error(e.toString() + " " + q.toString());
SolrExporter.scrapeErrorTotal.inc();
}
}
} catch (HttpSolrClient.RemoteSolrException | IOException e) {
this.log.error("failed to request: " + e.toString());
} catch (Exception e) {
this.log.error(e.toString());
e.printStackTrace();
} catch (SolrServerException | IOException e) {
log.error("failed to request: " + queryRequest.getPath() + " " + e.getMessage());
}
return metricFamilySamplesMap;
JsonNode jsonNode = OBJECT_MAPPER.readTree((String) queryResponse.get("response"));
for (JsonQuery jsonQuery : query.getJsonQueries()) {
try {
List<JsonNode> results = jsonQuery.apply(jsonNode);
for (JsonNode result : results) {
String type = result.get("type").textValue();
String name = result.get("name").textValue();
String help = result.get("help").textValue();
double value = result.get("value").doubleValue();
List<String> labelNames = new ArrayList<>();
List<String> labelValues = new ArrayList<>();
/* Labels in response */
for (JsonNode item : result.get("label_names")) {
labelNames.add(item.textValue());
}
for (JsonNode item : result.get("label_values")) {
labelValues.add(item.textValue());
}
/* Labels due to client */
if (client instanceof HttpSolrClient) {
labelNames.add("base_url");
labelValues.add(((HttpSolrClient) client).getBaseURL());
}
if (client instanceof CloudSolrClient) {
labelNames.add("zk_host");
labelValues.add(((CloudSolrClient) client).getZkHost());
}
// Deduce core if not there
if (labelNames.indexOf("core") < 0 && labelNames.indexOf("collection") >= 0 && labelNames.indexOf("shard") >= 0 && labelNames.indexOf("replica") >= 0) {
labelNames.add("core");
String collection = labelValues.get(labelNames.indexOf("collection"));
String shard = labelValues.get(labelNames.indexOf("shard"));
String replica = labelValues.get(labelNames.indexOf("replica"));
labelValues.add(collection + "_" + shard + "_" + replica);
}
samples.addSamplesIfNotPresent(name, new Collector.MetricFamilySamples(
name,
Collector.Type.valueOf(type),
help,
new ArrayList<>()));
samples.addSampleIfMetricExists(name, new Collector.MetricFamilySamples.Sample(
name, labelNames, labelValues, value));
}
} catch (JsonQueryException e) {
log.error("Error apply JSON query={} to result", jsonQuery.toString(), e);
scrapeErrorTotal.inc();
}
}
return samples;
}
}

View File

@ -0,0 +1,107 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.scraper;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executor;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.prometheus.collector.MetricSamples;
import org.apache.solr.prometheus.exporter.MetricsQuery;
public class SolrStandaloneScraper extends SolrScraper {
private final HttpSolrClient solrClient;
public SolrStandaloneScraper(HttpSolrClient solrClient, Executor executor) {
super(executor);
this.solrClient = solrClient;
}
@Override
public Map<String, MetricSamples> pingAllCores(MetricsQuery query) throws IOException {
return sendRequestsInParallel(getCores(), core -> {
try {
return request(solrClient, query.withCore(core));
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
@Override
public Map<String, MetricSamples> pingAllCollections(MetricsQuery query) throws IOException {
return Collections.emptyMap();
}
@Override
public Map<String, MetricSamples> metricsForAllHosts(MetricsQuery query) throws IOException {
Map<String, MetricSamples> samples = new HashMap<>();
samples.put(solrClient.getBaseURL(), request(solrClient, query));
return samples;
}
@Override
public MetricSamples search(MetricsQuery query) throws IOException {
return request(solrClient, query);
}
@Override
public MetricSamples collections(MetricsQuery metricsQuery) {
return new MetricSamples();
}
private Set<String> getCores() throws IOException {
Set<String> cores = new HashSet<>();
CoreAdminRequest coreAdminRequest = new CoreAdminRequest();
coreAdminRequest.setAction(CoreAdminParams.CoreAdminAction.STATUS);
coreAdminRequest.setIndexInfoNeeded(false);
NamedList<Object> coreAdminResponse;
try {
coreAdminResponse = solrClient.request(coreAdminRequest);
} catch (SolrServerException e) {
throw new IOException("Failed to get cores", e);
}
JsonNode statusJsonNode = OBJECT_MAPPER.readTree((String) coreAdminResponse.get("response")).get("status");
for (JsonNode jsonNode : statusJsonNode) {
cores.add(jsonNode.get("name").textValue());
}
return cores;
}
@Override
public void close() {
IOUtils.closeQuietly(solrClient);
}
}

View File

@ -15,7 +15,7 @@
* limitations under the License.
*/
/**
* Solr metrics scraper.
/**
* Send the raw requests to Solr endpoints.
*/
package org.apache.solr.prometheus.scraper;

View File

@ -18,12 +18,57 @@
<config>
<settings>
<httpClients>
<connectionTimeout>10000</connectionTimeout>
<readTimeout>10000</readTimeout>
</httpClients>
</settings>
<rules>
<search>
<lst name="request">
<lst name="query">
<str name="collection">collection1</str>
<str name="path">/select</str>
<lst name="params">
<str name="q">*:*</str>
<str name="start">0</str>
<str name="rows">0</str>
<str name="json.facet">
{
category: {
type: terms,
field: cat
}
}
</str>
</lst>
</lst>
<arr name="jsonQueries">
<str>
.facets.category.buckets[] as $object |
$object.val as $term |
$object.count as $value |
{
name : "solr_facets_category",
type : "GAUGE",
help : "Category facets",
label_names : ["term"],
label_values : [$term],
value : $value
}
</str>
</arr>
</lst>
</search>
<ping>
<lst name="request">
<lst name="query">
<str name="path">/admin/ping</str>
<str name="wt">json</str>
</lst>
<arr name="jsonQueries">
<str>
@ -54,49 +99,6 @@
</lst>
</lst>
<arr name="jsonQueries">
<!--
jetty metrics
-->
<!--
<str>
.metrics["solr.jetty"] | to_entries | .[] | select(.key | startswith("org.eclipse.jetty.server.handler.DefaultHandler")) | select(.key | endswith("xx-responses")) as $object |
$object.key | split(".") | last | split("-") | first as $status |
$object.value.count as $value |
{
name : "solr_metrics_jetty_response_total",
type : "COUNTER",
help : "See following URL: https://lucene.apache.org/solr/guide/metrics-reporting.html",
label_names : ["status"],
label_values : [$status],
value : $value
}
</str>
<str>
.metrics["solr.jetty"] | to_entries | .[] | select(.key | startswith("org.eclipse.jetty.server.handler.DefaultHandler.")) | select(.key | endswith("-requests")) | select (.value | type == "object") as $object |
$object.key | split(".") | last | split("-") | first as $method |
$object.value.count as $value |
{
name : "solr_metrics_jetty_requests_total",
type : "COUNTER",
help : "See following URL: https://lucene.apache.org/solr/guide/metrics-reporting.html",
label_names : ["method"],
label_values : [$method],
value : $value
}
</str>
<str>
.metrics["solr.jetty"] | to_entries | .[] | select(.key == "org.eclipse.jetty.server.handler.DefaultHandler.dispatches") as $object |
$object.value.count as $value |
{
name : "solr_metrics_jetty_dispatches_total",
type : "COUNTER",
help : "See following URL: https://lucene.apache.org/solr/guide/metrics-reporting.html",
label_names : [],
label_values : [],
value : $value
}
</str>
-->
<!--
jvm metrics
-->
@ -1662,45 +1664,6 @@
value : $value
}
</str>
<str>
.cluster.collections | to_entries | .[] | . as $object |
$object.key as $collection |
$object.value.pullReplicas | tonumber as $value |
{
name : "solr_collections_pull_replicas",
type : "GAUGE",
help : "See following URL: https://lucene.apache.org/solr/guide/collections-api.html#clusterstatus",
label_names : ["collection"],
label_values : [$collection],
value : $value
}
</str>
<str>
.cluster.collections | to_entries | .[] | . as $object |
$object.key as $collection |
$object.value.nrtReplicas | tonumber as $value |
{
name : "solr_collections_nrt_replicas",
type : "GAUGE",
help : "See following URL: https://lucene.apache.org/solr/guide/collections-api.html#clusterstatus",
label_names : ["collection"],
label_values : [$collection],
value : $value
}
</str>
<str>
.cluster.collections | to_entries | .[] | . as $object |
$object.key as $collection |
$object.value.tlogReplicas | tonumber as $value |
{
name : "solr_collections_tlog_replicas",
type : "GAUGE",
help : "See following URL: https://lucene.apache.org/solr/guide/collections-api.html#clusterstatus",
label_names : ["collection"],
label_values : [$collection],
value : $value
}
</str>
<str>
.cluster.collections | to_entries | .[] | . as $object |
$object.key as $collection |
@ -1764,43 +1727,6 @@
</lst>
</collections>
<search>
<lst name="request">
<lst name="query">
<str name="collection">collection1</str>
<str name="path">/select</str>
<lst name="params">
<str name="q">*:*</str>
<str name="start">0</str>
<str name="rows">0</str>
<str name="json.facet">
{
category: {
type: terms,
field: cat
}
}
</str>
</lst>
</lst>
<arr name="jsonQueries">
<str>
.facets.category.buckets[] as $object |
$object.val as $term |
$object.count as $value |
{
name : "solr_facets_category",
type : "GAUGE",
help : "Category facets",
label_names : ["term"],
label_values : [$term],
value : $value
}
</str>
</arr>
</lst>
</search>
</rules>
</config>

View File

@ -0,0 +1,162 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<config>
<rules>
<ping>
<lst name="request">
<lst name="query">
<str name="path">/admin/ping</str>
<str name="wt">json</str>
</lst>
<arr name="jsonQueries">
<str>
. as $object | $object |
(if $object.status == "OK" then 1.0 else 0.0 end) as $value |
{
name : "solr_ping",
type : "GAUGE",
help : "See following URL: https://lucene.apache.org/solr/guide/ping.html",
label_names : [],
label_values : [],
value : $value
}
</str>
</arr>
</lst>
</ping>
<collections>
<lst name="request">
<lst name="query">
<str name="path">/admin/collections</str>
<lst name="params">
<str name="action">CLUSTERSTATUS</str>
</lst>
</lst>
<arr name="jsonQueries">
<str>
.cluster.live_nodes | length as $value|
{
name : "solr_collections_live_nodes",
type : "GAUGE",
help : "See following URL: https://lucene.apache.org/solr/guide/collections-api.html#clusterstatus",
label_names : [],
label_values : [],
value : $value
}
</str>
<str>
.cluster.collections | to_entries | .[] | . as $object |
$object.key as $collection |
$object.value.shards | to_entries | .[] | . as $shard_obj |
$shard_obj.key as $shard |
$shard_obj.value.replicas | to_entries | .[] | . as $replica_obj |
$replica_obj.key as $replica_name |
$replica_obj.value.core as $core |
$core[$collection + "_" + $shard + "_" | length:] as $replica |
$replica_obj.value.base_url as $base_url |
$replica_obj.value.node_name as $node_name |
$replica_obj.value.type as $type |
(if $replica_obj.value.leader == "true" then 1.0 else 0.0 end) as $value |
{
name : "solr_collections_shard_leader",
type : "GAUGE",
help : "See following URL: https://lucene.apache.org/solr/guide/collections-api.html#clusterstatus",
label_names : ["collection", "shard", "replica", "core", "type"],
label_values : [$collection, $shard, $replica, $core, $type],
value : $value
}
</str>
</arr>
</lst>
</collections>
<metrics>
<lst name="request">
<lst name="query">
<str name="path">/admin/metrics</str>
<lst name="params">
<str name="group">all</str>
<str name="type">all</str>
</lst>
</lst>
<arr name="jsonQueries">
<!--
jvm metrics
-->
<str>
.metrics["solr.jvm"] | to_entries | .[] | select(.key | startswith("buffers.")) | select(.key |
endswith(".Count")) as $object |
$object.key | split(".")[1] as $pool |
$object.value as $value |
{
name : "solr_metrics_jvm_buffers",
type : "GAUGE",
help : "See following URL: https://lucene.apache.org/solr/guide/metrics-reporting.html",
label_names : ["pool"],
label_values : [$pool],
value : $value
}
</str>
</arr>
</lst>
</metrics>
<search>
<lst name="request">
<lst name="query">
<str name="collection">collection1</str>
<str name="path">/select</str>
<lst name="params">
<str name="q">*:*</str>
<str name="start">0</str>
<str name="rows">0</str>
<str name="json.facet">
{
category: {
type: terms,
field: cat
}
}
</str>
</lst>
</lst>
<arr name="jsonQueries">
<str>
.facets.category.buckets[] as $object |
$object.val as $term |
$object.count as $value |
{
name : "solr_facets_category",
type : "GAUGE",
help : "Category facets",
label_names : ["term"],
label_values : [$term],
value : $value
}
</str>
</arr>
</lst>
</search>
</rules>
</config>

View File

@ -53,7 +53,7 @@
<query>
<maxBooleanClauses>${solr.max.booleanClauses:1024}</maxBooleanClauses>
<maxBooleanClauses>1024</maxBooleanClauses>
<filterCache class="solr.FastLRUCache"
size="512"
@ -164,10 +164,22 @@
<updateProcessor class="solr.ParseDoubleFieldUpdateProcessorFactory" name="parse-double"/>
<updateProcessor class="solr.ParseDateFieldUpdateProcessorFactory" name="parse-date">
<arr name="format">
<str>yyyy-MM-dd'T'HH:mm[:ss[.SSS]][z</str>
<str>yyyy-MM-dd'T'HH:mm[:ss[,SSS]][z</str>
<str>yyyy-MM-dd HH:mm[:ss[.SSS]][z</str>
<str>yyyy-MM-dd HH:mm[:ss[,SSS]][z</str>
<str>yyyy-MM-dd'T'HH:mm:ss.SSSZ</str>
<str>yyyy-MM-dd'T'HH:mm:ss,SSSZ</str>
<str>yyyy-MM-dd'T'HH:mm:ss.SSS</str>
<str>yyyy-MM-dd'T'HH:mm:ss,SSS</str>
<str>yyyy-MM-dd'T'HH:mm:ssZ</str>
<str>yyyy-MM-dd'T'HH:mm:ss</str>
<str>yyyy-MM-dd'T'HH:mmZ</str>
<str>yyyy-MM-dd'T'HH:mm</str>
<str>yyyy-MM-dd HH:mm:ss.SSSZ</str>
<str>yyyy-MM-dd HH:mm:ss,SSSZ</str>
<str>yyyy-MM-dd HH:mm:ss.SSS</str>
<str>yyyy-MM-dd HH:mm:ss,SSS</str>
<str>yyyy-MM-dd HH:mm:ssZ</str>
<str>yyyy-MM-dd HH:mm:ss</str>
<str>yyyy-MM-dd HH:mmZ</str>
<str>yyyy-MM-dd HH:mm</str>
<str>yyyy-MM-dd</str>
</arr>
</updateProcessor>

View File

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<solr>
<str name="shareSchema">${shareSchema:false}</str>
<str name="configSetBaseDir">${configSetBaseDir:configsets}</str>
<str name="coreRootDirectory">${coreRootDirectory:.}</str>
<shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
<str name="urlScheme">${urlScheme:}</str>
<int name="socketTimeout">${socketTimeout:90000}</int>
<int name="connTimeout">${connTimeout:15000}</int>
</shardHandlerFactory>
<solrcloud>
<str name="host">127.0.0.1</str>
<int name="hostPort">${hostPort:8983}</int>
<str name="hostContext">${hostContext:solr}</str>
<int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
<bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
<int name="leaderVoteWait">${leaderVoteWait:10000}</int>
<int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
<int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
</solrcloud>
</solr>

View File

@ -0,0 +1,79 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus;
import com.google.common.collect.ImmutableMap;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.cloud.AbstractDistribZkTestBase;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.prometheus.utils.Helpers;
import org.junit.BeforeClass;
public class PrometheusExporterTestBase extends SolrCloudTestCase {
public static final String COLLECTION = "collection1";
public static final String CONF_NAME = COLLECTION + "_config";
public static final String CONF_DIR = getFile("solr/" + COLLECTION + "/conf").getAbsolutePath();
public static final int NUM_SHARDS = 2;
public static final int NUM_REPLICAS = 2;
public static final int MAX_SHARDS_PER_NODE = 1;
public static final int NUM_NODES = (NUM_SHARDS * NUM_REPLICAS + (MAX_SHARDS_PER_NODE - 1)) / MAX_SHARDS_PER_NODE;
public static final int TIMEOUT = 60;
public static final ImmutableMap<String, Double> FACET_VALUES = ImmutableMap.<String, Double>builder()
.put("electronics", 14.0)
.put("currency", 4.0)
.put("memory", 3.0)
.put("and", 2.0)
.put("card", 2.0)
.put("connector", 2.0)
.put("drive", 2.0)
.put("graphics", 2.0)
.put("hard", 2.0)
.put("search", 2.0)
.build();
@Override
public void setUp() throws Exception {
super.setUp();
}
@Override
public void tearDown() throws Exception {
super.tearDown();
}
@BeforeClass
public static void setupCluster() throws Exception {
configureCluster(NUM_NODES)
.addConfig(CONF_NAME, getFile(CONF_DIR).toPath())
.configure();
CollectionAdminRequest
.createCollection(COLLECTION, CONF_NAME, NUM_SHARDS, NUM_REPLICAS)
.setMaxShardsPerNode(MAX_SHARDS_PER_NODE)
.process(cluster.getSolrClient());
AbstractDistribZkTestBase
.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(), true, true, TIMEOUT);
Helpers.indexAllDocs(cluster.getSolrClient());
}
}

View File

@ -0,0 +1,100 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import io.prometheus.client.Collector;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class MetricSamplesTest {
private Collector.MetricFamilySamples.Sample sample(String name, Double value) {
return new Collector.MetricFamilySamples.Sample(name, Collections.emptyList(), Collections.emptyList(), value);
}
private Collector.MetricFamilySamples samples(String metricName, Collector.Type type, Collector.MetricFamilySamples.Sample...samples) {
return new Collector.MetricFamilySamples(
metricName,
type,
"help",
new ArrayList<>(Arrays.asList(samples))
);
}
private void validateMetricSamples(
List<Collector.MetricFamilySamples> allMetrics,
String metricName,
List<Double> expectedValues) {
Collector.MetricFamilySamples test1 = allMetrics.stream()
.filter(s -> s.name.equals(metricName))
.findFirst()
.orElseThrow(() -> new RuntimeException(String.format(Locale.ROOT, "Unable to find item %s", metricName)));
assertTrue(Iterables.elementsEqual(expectedValues, test1.samples.stream().map(s -> s.value).collect(Collectors.toList())));
}
@Test
public void asList() {
MetricSamples samples = new MetricSamples(Maps.newHashMap(ImmutableMap.<String, Collector.MetricFamilySamples>builder()
.put("test1", samples("test1", Collector.Type.GAUGE, sample("test1", 1.0), sample("test1", 2.0)))
.put("test2", samples("test2", Collector.Type.GAUGE, sample("test2", 1.0)))
.build()));
List<Collector.MetricFamilySamples> output = samples.asList();
assertEquals(2, output.size());
validateMetricSamples(output, "test1", Arrays.asList(1.0, 2.0));
validateMetricSamples(output, "test2", Collections.singletonList(1.0));
}
@Test
public void addAll() {
MetricSamples lhs = new MetricSamples(Maps.newHashMap(ImmutableMap.<String, Collector.MetricFamilySamples>builder()
.put("same", samples("same", Collector.Type.GAUGE, sample("same", 1.0), sample("same", 2.0)))
.put("diff1", samples("diff1", Collector.Type.GAUGE, sample("diff1", 1.0)))
.build()));
MetricSamples rhs = new MetricSamples(Maps.newHashMap(ImmutableMap.<String, Collector.MetricFamilySamples>builder()
.put("same", samples("test1", Collector.Type.GAUGE, sample("test1", 3.0), sample("test1", 4.0)))
.put("diff2", samples("diff2", Collector.Type.GAUGE, sample("diff2", 1.0)))
.build()));
lhs.addAll(rhs);
List<Collector.MetricFamilySamples> output = lhs.asList();
validateMetricSamples(output, "same", Arrays.asList(1.0, 2.0, 3.0, 4.0));
validateMetricSamples(output, "diff1", Collections.singletonList(1.0));
validateMetricSamples(output, "diff2", Collections.singletonList(1.0));
}
}

View File

@ -1,94 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.collector;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.core.XmlConfigFile;
import org.apache.solr.prometheus.exporter.SolrExporter;
import org.apache.solr.prometheus.exporter.SolrExporterTestBase;
import io.prometheus.client.CollectorRegistry;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.junit.Test;
import java.io.File;
import java.util.Arrays;
import java.util.List;
/**
* Unit test for SolrCollector.
*/
@Slow
public class SolrCollectorTest extends SolrExporterTestBase {
CollectorRegistry registry;
@Override
public void setUp() throws Exception {
super.setUp();
registry = new CollectorRegistry();
}
@Override
public void tearDown() throws Exception {
super.tearDown();
}
@Test
public void testSolrCollector() throws Exception {
String name = "solr-exporter-config.xml";
SolrResourceLoader loader = new SolrResourceLoader(getFile("conf/").toPath());
XmlConfigFile config = new XmlConfigFile(loader, name);
CloudSolrClient cloudSolrClient = cluster.getSolrClient();
SolrCollector collector = new SolrCollector(cloudSolrClient, config, 1);
assertNotNull(collector);
}
@Test
public void testCollect() throws Exception {
String name = "solr-exporter-config.xml";
SolrResourceLoader loader = new SolrResourceLoader(getFile("conf/").toPath());
XmlConfigFile config = new XmlConfigFile(loader, name);
CloudSolrClient cloudSolrClient = cluster.getSolrClient();
SolrCollector collector = new SolrCollector(cloudSolrClient, config, 1);
this.registry.register(collector);
this.registry.register(SolrExporter.scrapeErrorTotal);
// index sample docs
File exampleDocsDir = new File(getFile("exampledocs").getAbsolutePath());
List<File> xmlFiles = Arrays.asList(exampleDocsDir.listFiles((dir, file) -> file.endsWith(".xml")));
for (File xml : xmlFiles) {
ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update");
req.addFile(xml, "application/xml");
cloudSolrClient.request(req, "collection1");
}
cloudSolrClient.commit("collection1");
// collect metrics
collector.collect();
// check scrape error count
assertEquals(0.0, registry.getSampleValue("solr_exporter_scrape_error_total", new String[]{}, new String[]{}), .001);
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.exporter;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.junit.Before;
import org.junit.Test;
@Slow
public class SolrExporterIntegrationTest extends SolrExporterTestBase {
@Override
@Before
public void setUp() throws Exception {
super.setUp();
startMetricsExporterWithConfiguration("conf/prometheus-solr-exporter-integration-test-config.xml");
}
private Map<String, Double> metricsWithName(Map<String, Double> allMetrics, String name) {
return allMetrics.entrySet()
.stream()
.filter(entry -> entry.getKey().startsWith(name))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
@Test
public void pingAllCollectionsAndCoresAreAvailable() throws Exception {
Map<String, Double> pingMetrics = metricsWithName(getAllMetrics(), "solr_ping");
assertEquals(5, pingMetrics.size());
for (Map.Entry<String, Double> metric : pingMetrics.entrySet()) {
assertEquals(1.0, metric.getValue(), 0.001);
}
}
@Test
public void solrExporterDurationMetric() throws Exception {
Map<String, Double> durationHistogram = metricsWithName(getAllMetrics(), "solr_exporter_duration");
assertTrue(durationHistogram.get("solr_exporter_duration_seconds_count") > 0);
assertTrue(durationHistogram.get("solr_exporter_duration_seconds_sum") > 0);
// 17 = (15 buckets in the histogram) + (count metric) + (sum metric)
assertEquals(17, durationHistogram.size());
}
@Test
public void jvmMetrics() throws Exception {
Map<String, Double> jvmMetrics = metricsWithName(getAllMetrics(), "solr_metrics_jvm_threads{item=\"terminated\"");
assertEquals(NUM_NODES, jvmMetrics.size());
}
@Test
public void jsonFacetMetrics() throws Exception {
Map<String, Double> facetMetrics = metricsWithName(getAllMetrics(), "solr_facets_category");
assertEquals(FACET_VALUES.size(), facetMetrics.size());
}
@Test
public void collectionMetrics() throws Exception {
Map<String, Double> allMetrics = getAllMetrics();
Map<String, Double> liveNodeMetrics = metricsWithName(allMetrics, "solr_collections_live_nodes");
assertEquals(1, liveNodeMetrics.size());
liveNodeMetrics.forEach((metric, value) -> {
assertEquals((double) NUM_NODES, value, 0.001);
});
Map<String, Double> shardLeaderMetrics = metricsWithName(allMetrics, "solr_collections_shard_leader");
assertEquals(NUM_NODES, shardLeaderMetrics.size());
double totalLeaderCount = shardLeaderMetrics.values()
.stream()
.mapToDouble(Double::doubleValue)
.sum();
assertEquals(NUM_SHARDS, totalLeaderCount, 0.001);
}
}

View File

@ -1,99 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.exporter;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.junit.Test;
import java.io.File;
import java.net.ServerSocket;
import java.net.URI;
import java.util.Arrays;
import java.util.List;
/**
* Unit test for SolrExporter.
*/
@Slow
public class SolrExporterTest extends SolrExporterTestBase {
@Override
public void setUp() throws Exception {
super.setUp();
}
@Override
public void tearDown() throws Exception {
super.tearDown();
}
@Test
public void testExecute() throws Exception {
// solr client
CloudSolrClient cloudSolrClient = cluster.getSolrClient();
int port;
ServerSocket socket = null;
try {
socket = new ServerSocket(0);
port = socket.getLocalPort();
} finally {
socket.close();
}
// index sample docs
File exampleDocsDir = new File(getFile("exampledocs").getAbsolutePath());
List<File> xmlFiles = Arrays.asList(exampleDocsDir.listFiles((dir, name) -> name.endsWith(".xml")));
for (File xml : xmlFiles) {
ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update");
req.addFile(xml, "application/xml");
cloudSolrClient.request(req, "collection1");
}
cloudSolrClient.commit("collection1");
// start exporter
SolrExporter solrExporter = new SolrExporter(port, cloudSolrClient, getFile("conf/solr-exporter-config.xml").toPath(), 1);
try {
solrExporter.start();
URI uri = new URI("http://localhost:" + String.valueOf(port) + "/metrics");
CloseableHttpClient httpclient = HttpClients.createDefault();
CloseableHttpResponse response = null;
try {
HttpGet request = new HttpGet(uri);
response = httpclient.execute(request);
int expectedHTTPStatusCode = HttpStatus.SC_OK;
int actualHTTPStatusCode = response.getStatusLine().getStatusCode();
assertEquals(expectedHTTPStatusCode, actualHTTPStatusCode);
} finally {
response.close();
httpclient.close();
}
} finally {
solrExporter.stop();
}
}
}

View File

@ -16,38 +16,105 @@
*/
package org.apache.solr.prometheus.exporter;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.ServerSocket;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.cloud.AbstractDistribZkTestBase;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.junit.BeforeClass;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.solr.prometheus.PrometheusExporterTestBase;
import org.apache.solr.prometheus.utils.Helpers;
import org.junit.After;
/**
* Test base class.
*/
@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
public class SolrExporterTestBase extends SolrCloudTestCase {
public static String COLLECTION = "collection1";
public static String CONF_NAME = COLLECTION + "_config";
public static String CONF_DIR = getFile("configsets/" + COLLECTION + "/conf").getAbsolutePath();
public static int NUM_SHARDS = 2;
public static int NUM_REPLICAS = 2;
public static int MAX_SHARDS_PER_NODE = 1;
public static int NUM_NODES = (NUM_SHARDS * NUM_REPLICAS + (MAX_SHARDS_PER_NODE - 1)) / MAX_SHARDS_PER_NODE;
public static int TIMEOUT = 60;
public class SolrExporterTestBase extends PrometheusExporterTestBase {
@BeforeClass
public static void setupCluster() throws Exception {
configureCluster(NUM_NODES)
.addConfig(CONF_NAME, getFile(CONF_DIR).toPath())
.configure();
private SolrExporter solrExporter;
private CloseableHttpClient httpClient;
private int promtheusExporterPort;
CollectionAdminRequest
.createCollection(COLLECTION, CONF_NAME, NUM_SHARDS, NUM_REPLICAS)
.setMaxShardsPerNode(MAX_SHARDS_PER_NODE)
.process(cluster.getSolrClient());
AbstractDistribZkTestBase
.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(), true, true, TIMEOUT);
@Override
@After
public void tearDown() throws Exception {
if (solrExporter != null) {
solrExporter.stop();
}
IOUtils.closeQuietly(httpClient);
super.tearDown();
}
protected void startMetricsExporterWithConfiguration(String scrapeConfiguration) throws Exception {
try (ServerSocket socket = new ServerSocket(0)) {
promtheusExporterPort = socket.getLocalPort();
}
solrExporter = new SolrExporter(
promtheusExporterPort,
25,
10,
SolrScrapeConfiguration.solrCloud(cluster.getZkServer().getZkAddress()),
Helpers.loadConfiguration(scrapeConfiguration));
solrExporter.start();
httpClient = HttpClients.createDefault();
for (int i = 0; i < 50; ++i) {
Thread.sleep(100);
try {
getAllMetrics();
System.out.println("Prometheus exporter running");
break;
} catch (IOException exception) {
if (i % 10 == 0) {
System.out.println("Waiting for Prometheus exporter");
}
}
}
}
protected Map<String, Double> getAllMetrics() throws URISyntaxException, IOException {
URI uri = new URI("http://localhost:" + promtheusExporterPort + "/metrics");
HttpGet request = new HttpGet(uri);
Map<String, Double> metrics = new HashMap<>();
try (CloseableHttpResponse response = httpClient.execute(request)) {
assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode());
try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
String currentLine;
while ((currentLine = reader.readLine()) != null) {
// Lines that begin with a # are a comment in prometheus.
if (currentLine.startsWith("#")) {
continue;
}
String[] parts = currentLine.split(" ");
assertEquals("Metric must have name and value", 2, parts.length);
metrics.put(parts[0], Double.valueOf(parts[1]));
}
}
}
return metrics;
}
}

View File

@ -0,0 +1,78 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.scraper;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class AsyncTest {
private CompletableFuture<Integer> failedFuture() {
CompletableFuture<Integer> result = new CompletableFuture<>();
result.completeExceptionally(new RuntimeException("Some error"));
return result;
}
@Test
public void getAllResults() throws Exception {
List<Integer> expectedValues = Arrays.asList(1, 2, 3, 4, 5, 6, 7);
CompletableFuture<List<Integer>> results = Async.waitForAllSuccessfulResponses(
expectedValues.stream()
.map(CompletableFuture::completedFuture)
.collect(Collectors.toList()));
List<Integer> actualValues = results.get();
Collections.sort(expectedValues);
Collections.sort(actualValues);
assertEquals(expectedValues, actualValues);
}
@Test
public void ignoresFailures() throws Exception {
CompletableFuture<List<Integer>> results = Async.waitForAllSuccessfulResponses(Arrays.asList(
CompletableFuture.completedFuture(1),
failedFuture()
));
List<Integer> values = results.get();
assertEquals(Collections.singletonList(1), values);
}
@Test
public void allFuturesFail() throws Exception {
CompletableFuture<List<Integer>> results = Async.waitForAllSuccessfulResponses(Collections.singletonList(
failedFuture()
));
List<Integer> values = results.get();
assertTrue(values.isEmpty());
}
}

View File

@ -0,0 +1,205 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.scraper;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.stream.Collectors;
import io.prometheus.client.Collector;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.NoOpResponseParser;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.prometheus.PrometheusExporterTestBase;
import org.apache.solr.prometheus.collector.MetricSamples;
import org.apache.solr.prometheus.exporter.MetricsConfiguration;
import org.apache.solr.prometheus.exporter.PrometheusExporterSettings;
import org.apache.solr.prometheus.exporter.SolrClientFactory;
import org.apache.solr.prometheus.utils.Helpers;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class SolrCloudScraperTest extends PrometheusExporterTestBase {
private MetricsConfiguration configuration;
private SolrCloudScraper solrCloudScraper;
private ExecutorService executor;
private SolrCloudScraper createSolrCloudScraper() {
CloudSolrClient solrClient = new CloudSolrClient.Builder(
Collections.singletonList(cluster.getZkServer().getZkAddress()), Optional.empty())
.build();
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
solrClient.setParser(responseParser);
solrClient.connect();
SolrClientFactory factory = new SolrClientFactory(PrometheusExporterSettings.builder().build());
return new SolrCloudScraper(solrClient, executor, factory);
}
private ClusterState getClusterState() {
return cluster.getSolrClient().getZkStateReader().getClusterState();
}
private DocCollection getCollectionState() {
return getClusterState().getCollection(PrometheusExporterTestBase.COLLECTION);
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
executor = ExecutorUtil.newMDCAwareFixedThreadPool(25, new DefaultSolrThreadFactory("solr-cloud-scraper-tests"));
configuration = Helpers.loadConfiguration("conf/prometheus-solr-exporter-scraper-test-config.xml");
solrCloudScraper = createSolrCloudScraper();
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
IOUtils.closeQuietly(solrCloudScraper);
executor.shutdownNow();
}
@Test
public void pingCollections() throws Exception {
Map<String, MetricSamples> collectionMetrics = solrCloudScraper.pingAllCollections(
configuration.getPingConfiguration().get(0));
assertEquals(1, collectionMetrics.size());
assertTrue(collectionMetrics.containsKey(PrometheusExporterTestBase.COLLECTION));
List<Collector.MetricFamilySamples> collectionSamples = collectionMetrics.get(PrometheusExporterTestBase.COLLECTION).asList();
assertEquals(1, collectionSamples.size());
Collector.MetricFamilySamples collection1Metrics = collectionSamples.get(0);
assertEquals("solr_ping", collection1Metrics.name);
assertEquals(1, collection1Metrics.samples.size());
assertEquals(1.0, collection1Metrics.samples.get(0).value, 0.001);
assertEquals(Collections.singletonList("zk_host"), collection1Metrics.samples.get(0).labelNames);
assertEquals(Collections.singletonList(cluster.getZkServer().getZkAddress()), collection1Metrics.samples.get(0).labelValues);
}
@Test
public void pingCores() throws Exception {
Map<String, MetricSamples> allCoreMetrics = solrCloudScraper.pingAllCores(
configuration.getPingConfiguration().get(0));
Map<String, DocCollection> collectionStates = getClusterState().getCollectionsMap();
long coreCount = collectionStates.entrySet()
.stream()
.mapToInt(entry -> entry.getValue().getReplicas().size())
.sum();
assertEquals(coreCount, allCoreMetrics.size());
for (Map.Entry<String, DocCollection> entry : collectionStates.entrySet()) {
String coreName = entry.getValue().getReplicas().get(0).getCoreName();
assertTrue(allCoreMetrics.containsKey(coreName));
List<Collector.MetricFamilySamples> coreMetrics = allCoreMetrics.get(coreName).asList();
assertEquals(1, coreMetrics.size());
assertEquals("solr_ping", coreMetrics.get(0).name);
assertEquals(1, coreMetrics.get(0).samples.size());
assertEquals(1.0, coreMetrics.get(0).samples.get(0).value, 0.001);
}
}
@Test
public void queryCollections() throws Exception {
List<Collector.MetricFamilySamples> collection1Metrics = solrCloudScraper.collections(
configuration.getCollectionsConfiguration().get(0)).asList();
assertEquals(2, collection1Metrics.size());
Collector.MetricFamilySamples liveNodeSamples = collection1Metrics.get(0);
assertEquals("solr_collections_live_nodes", liveNodeSamples.name);
assertEquals("See following URL: https://lucene.apache.org/solr/guide/collections-api.html#clusterstatus", liveNodeSamples.help);
assertEquals(1, liveNodeSamples.samples.size());
assertEquals(
getClusterState().getLiveNodes().size(),
liveNodeSamples.samples.get(0).value, 0.001);
Collector.MetricFamilySamples shardLeaderSamples = collection1Metrics.get(1);
DocCollection collection = getCollectionState();
List<Replica> allReplicas = collection.getReplicas();
assertEquals(allReplicas.size(), shardLeaderSamples.samples.size());
Collection<Slice> slices = getCollectionState().getSlices();
Set<String> leaderCoreNames = slices.stream()
.map(slice -> collection.getLeader(slice.getName()).getCoreName())
.collect(Collectors.toSet());
for (Collector.MetricFamilySamples.Sample sample : shardLeaderSamples.samples) {
assertEquals("solr_collections_shard_leader", sample.name);
assertEquals(Arrays.asList("collection", "shard", "replica", "core", "type", "zk_host"), sample.labelNames);
assertEquals(leaderCoreNames.contains(sample.labelValues.get(3)) ? 1.0 : 0.0, sample.value, 0.001);
}
}
@Test
public void metricsForEachHost() throws Exception {
Map<String, MetricSamples> metricsByHost = solrCloudScraper.metricsForAllHosts(configuration.getMetricsConfiguration().get(0));
List<Replica> replicas = getCollectionState().getReplicas();
assertEquals(replicas.size(), metricsByHost.size());
for (Replica replica : replicas) {
List<Collector.MetricFamilySamples> replicaSamples = metricsByHost.get(replica.getBaseUrl()).asList();
assertEquals(1, replicaSamples.size());
assertEquals("solr_metrics_jvm_buffers", replicaSamples.get(0).name);
}
}
@Test
public void search() throws Exception {
List<Collector.MetricFamilySamples> samples = solrCloudScraper.search(configuration.getSearchConfiguration().get(0)).asList();
assertEquals(1, samples.size());
Collector.MetricFamilySamples sampleFamily = samples.get(0);
assertEquals("solr_facets_category", sampleFamily.name);
assertEquals(FACET_VALUES.size(), sampleFamily.samples.size());
for (Collector.MetricFamilySamples.Sample sample : sampleFamily.samples) {
assertEquals(FACET_VALUES.get(sample.labelValues.get(0)), sample.value, 0.001);
}
}
}

View File

@ -0,0 +1,152 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.scraper;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import io.prometheus.client.Collector;
import org.apache.commons.io.FileUtils;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.impl.NoOpResponseParser;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.prometheus.PrometheusExporterTestBase;
import org.apache.solr.prometheus.collector.MetricSamples;
import org.apache.solr.prometheus.exporter.MetricsConfiguration;
import org.apache.solr.prometheus.utils.Helpers;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.RestTestBase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class SolrStandaloneScraperTest extends RestTestBase {
private static MetricsConfiguration configuration;
private static SolrStandaloneScraper solrScraper;
private static ExecutorService executor;
private static HttpSolrClient solrClient;
@BeforeClass
public static void setupBeforeClass() throws Exception {
File tmpSolrHome = createTempDir().toFile();
tmpSolrHome.deleteOnExit();
FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile());
initCore("solrconfig.xml", "managed-schema");
createJettyAndHarness(
tmpSolrHome.getAbsolutePath(),
"solrconfig.xml",
"managed-schema",
"/solr",
true,
null);
executor = ExecutorUtil.newMDCAwareFixedThreadPool(25, new DefaultSolrThreadFactory("solr-cloud-scraper-tests"));
configuration = Helpers.loadConfiguration("conf/prometheus-solr-exporter-scraper-test-config.xml");
solrClient = getHttpSolrClient(restTestHarness.getAdminURL());
solrScraper = new SolrStandaloneScraper(solrClient, executor);
NoOpResponseParser responseParser = new NoOpResponseParser();
responseParser.setWriterType("json");
solrClient.setParser(responseParser);
Helpers.indexAllDocs(solrClient);
}
@AfterClass
public static void cleanUp() throws Exception {
IOUtils.closeQuietly(solrScraper);
IOUtils.closeQuietly(solrClient);
cleanUpHarness();
executor.shutdownNow();
jetty.stop();
}
@Test
public void pingCollections() throws IOException {
Map<String, MetricSamples> collectionMetrics = solrScraper.pingAllCollections(
configuration.getPingConfiguration().get(0));
assertTrue(collectionMetrics.isEmpty());
}
@Test
public void pingCores() throws Exception {
Map<String, MetricSamples> allCoreMetrics = solrScraper.pingAllCores(
configuration.getPingConfiguration().get(0));
assertEquals(1, allCoreMetrics.size());
List<Collector.MetricFamilySamples> allSamples = allCoreMetrics.get("collection1").asList();
Collector.MetricFamilySamples samples = allSamples.get(0);
assertEquals("solr_ping", samples.name);
assertEquals(1, samples.samples.size());
assertEquals(1.0, samples.samples.get(0).value, 0.001);
assertEquals(Collections.singletonList("base_url"), samples.samples.get(0).labelNames);
assertEquals(Collections.singletonList(restTestHarness.getAdminURL()), samples.samples.get(0).labelValues);
}
@Test
public void queryCollections() throws Exception {
List<Collector.MetricFamilySamples> collection1Metrics = solrScraper.collections(
configuration.getCollectionsConfiguration().get(0)).asList();
assertTrue(collection1Metrics.isEmpty());
}
@Test
public void metricsForHost() throws Exception {
Map<String, MetricSamples> metricsByHost = solrScraper.metricsForAllHosts(configuration.getMetricsConfiguration().get(0));
assertEquals(1, metricsByHost.size());
List<Collector.MetricFamilySamples> replicaSamples = metricsByHost.get(restTestHarness.getAdminURL()).asList();
assertEquals(1, replicaSamples.size());
assertEquals(1, replicaSamples.size());
assertEquals("solr_metrics_jvm_buffers", replicaSamples.get(0).name);
}
@Test
public void search() throws Exception {
List<Collector.MetricFamilySamples> samples = solrScraper.search(configuration.getSearchConfiguration().get(0)).asList();
assertEquals(1, samples.size());
Collector.MetricFamilySamples sampleFamily = samples.get(0);
assertEquals("solr_facets_category", sampleFamily.name);
assertEquals(PrometheusExporterTestBase.FACET_VALUES.size(), sampleFamily.samples.size());
for (Collector.MetricFamilySamples.Sample sample : sampleFamily.samples) {
assertEquals(PrometheusExporterTestBase.FACET_VALUES.get(sample.labelValues.get(0)), sample.value, 0.001);
}
}
}

View File

@ -0,0 +1,56 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.prometheus.utils;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.core.XmlConfigFile;
import org.apache.solr.prometheus.PrometheusExporterTestBase;
import org.apache.solr.prometheus.exporter.MetricsConfiguration;
public class Helpers {
public static MetricsConfiguration loadConfiguration(String path) throws Exception {
Path configPath = Paths.get(path);
try (SolrResourceLoader loader = new SolrResourceLoader(configPath.getParent())) {
XmlConfigFile config = new XmlConfigFile(loader, configPath.getFileName().toString());
return MetricsConfiguration.from(config);
}
}
public static void indexAllDocs(SolrClient client) throws IOException, SolrServerException {
File exampleDocsDir = new File(SolrTestCaseJ4.getFile("exampledocs").getAbsolutePath());
File[] xmlFiles = Objects.requireNonNull(exampleDocsDir.listFiles((dir, name) -> name.endsWith(".xml")));
for (File xml : xmlFiles) {
ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update");
req.addFile(xml, "application/xml");
client.request(req, PrometheusExporterTestBase.COLLECTION);
}
client.commit(PrometheusExporterTestBase.COLLECTION);
}
}

View File

@ -103,6 +103,9 @@ The path to the configuration file that defines the Solr metrics to read. The de
`-n`, `--num-threads`::
The number of threads. The `solr-exporter` creates thread pools for requests to Solr. Request latency can be improved by increasing the number of threads. The default is `1`.
`-s`, `--scrape-interval`::
The number of seconds between collecting metrics from Solr. The `solr-exporter` collects metrics from Solr every few seconds controlled by this setting. These metrics are cached and returned regardless of how frequently prometheus is configured to pull metrics from this tool. The freshness of the metrics can be improved by reducing the scrape interval but do not set it to a very low value because metrics collection can be expensive and can execute arbitrary searches to ping Solr. The default value is 60 seconds.
The Solr's metrics exposed by `solr-exporter` can be seen at: `\http://localhost:9983/solr/admin/metrics`.
== Exporter Configuration