Merge branch 'master' into enhancement/node_client_setting_removal

Original commit: elastic/x-pack-elasticsearch@c8a896b7e3
This commit is contained in:
javanna 2016-03-23 17:51:54 +01:00 committed by Luca Cavanna
commit 57114ae4f0
65 changed files with 3687 additions and 275 deletions

View File

@ -1,19 +1,38 @@
apply plugin: 'elasticsearch.esplugin'
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.VersionProperties
esplugin {
description 'a very basic implementation of a custom realm to validate it works'
classname 'org.elasticsearch.example.ExampleRealmPlugin'
isolated false
}
apply plugin: 'elasticsearch.build'
dependencies {
provided "org.elasticsearch:elasticsearch:${versions.elasticsearch}"
testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}"
provided project(path: ':x-plugins:elasticsearch:x-pack', configuration: 'runtime')
}
compileJava.options.compilerArgs << "-Xlint:-rawtypes"
//compileTestJava.options.compilerArgs << "-Xlint:-rawtypes"
Map generateSubstitutions() {
def stringSnap = { version ->
if (version.endsWith("-SNAPSHOT")) {
return version.substring(0, version.length() - 9)
}
return version
}
return [
'version': stringSnap(version),
'xpack.version': stringSnap(VersionProperties.elasticsearch),
'java.version': targetCompatibility as String
]
}
integTest {
processResources {
MavenFilteringHack.filter(it, generateSubstitutions())
}
task buildZip(type:Zip, dependsOn: [jar]) {
from 'build/resources/main/xpack-extension-descriptor.properties'
from project.jar
}
task integTest(type: org.elasticsearch.gradle.test.RestIntegTestTask, dependsOn: buildZip) {
cluster {
plugin 'x-pack', project(':x-plugins:elasticsearch:x-pack')
// TODO: these should be settings?
@ -24,6 +43,8 @@ integTest {
setupCommand 'setupDummyUser',
'bin/xpack/esusers', 'useradd', 'test_user', '-p', 'changeme', '-r', 'admin'
setupCommand 'installExtension',
'bin/xpack/extension', 'install', 'file:' + buildZip.archivePath
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}",
@ -36,4 +57,5 @@ integTest {
}
}
}
check.dependsOn integTest
integTest.mustRunAfter precommit

View File

@ -8,11 +8,10 @@ package org.elasticsearch.example;
import org.elasticsearch.example.realm.CustomAuthenticationFailureHandler;
import org.elasticsearch.example.realm.CustomRealm;
import org.elasticsearch.example.realm.CustomRealmFactory;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.shield.authc.AuthenticationModule;
import org.elasticsearch.xpack.extensions.XPackExtension;
public class ExampleRealmPlugin extends Plugin {
public class ExampleRealmExtension extends XPackExtension {
@Override
public String name() {
return "custom realm example";

View File

@ -0,0 +1,6 @@
description=Custom Realm Extension
version=${version}
name=examplerealm
classname=org.elasticsearch.example.ExampleRealmExtension
java.version=${java.version}
xpack.version=${xpack.version}

View File

@ -13,6 +13,7 @@ import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.Environment;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.rest.client.http.HttpResponse;
@ -64,6 +65,7 @@ public class CustomRealmIT extends ESIntegTestCase {
Settings settings = Settings.builder()
.put("cluster.name", clusterName)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
.put(ThreadContext.PREFIX + "." + CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER)
.put(ThreadContext.PREFIX + "." + CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW)
.build();
@ -83,6 +85,7 @@ public class CustomRealmIT extends ESIntegTestCase {
Settings settings = Settings.builder()
.put("cluster.name", clusterName)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
.put(ThreadContext.PREFIX + "." + CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER + randomAsciiOfLength(1))
.put(ThreadContext.PREFIX + "." + CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW)
.build();

View File

@ -76,7 +76,12 @@ REM JAVA_OPTS=%JAVA_OPTS% -XX:HeapDumpPath=$ES_HOME/logs/heapdump.hprof
REM Disables explicit GC
set JAVA_OPTS=%JAVA_OPTS% -XX:+DisableExplicitGC
set ES_CLASSPATH=%ES_CLASSPATH%;%ES_HOME%/lib/*;%ES_HOME%/lib/sigar/*;%ES_HOME%/plugins/xpack/*
REM Avoid empty elements in classpath to make JarHell happy
if "%ES_CLASSPATH%" == "" (
set ES_CLASSPATH=%ES_HOME%/lib/*;%ES_HOME%/plugins/xpack/*
) else (
set ES_CLASSPATH=%ES_CLASSPATH%;%ES_HOME%/lib/*;%ES_HOME%/plugins/xpack/*
)
set ES_PARAMS=-Des.path.home="%ES_HOME%"
SET HOSTNAME=%COMPUTERNAME%

View File

@ -0,0 +1,116 @@
#!/bin/sh
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
SCRIPT="$0"
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
# Drop everything prior to ->
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
# determine elasticsearch home
ES_HOME=`dirname "$SCRIPT"`/../..
# make ELASTICSEARCH_HOME absolute
ES_HOME=`cd "$ES_HOME"; pwd`
# If an include wasn't specified in the environment, then search for one...
if [ "x$ES_INCLUDE" = "x" ]; then
# Locations (in order) to use when searching for an include file.
for include in /usr/share/elasticsearch/elasticsearch.in.sh \
/usr/local/share/elasticsearch/elasticsearch.in.sh \
/opt/elasticsearch/elasticsearch.in.sh \
~/.elasticsearch.in.sh \
"`dirname "$0"`"/../elasticsearch.in.sh \
"$ES_HOME/bin/elasticsearch.in.sh"; do
if [ -r "$include" ]; then
. "$include"
break
fi
done
# ...otherwise, source the specified include.
elif [ -r "$ES_INCLUDE" ]; then
. "$ES_INCLUDE"
fi
if [ -x "$JAVA_HOME/bin/java" ]; then
JAVA="$JAVA_HOME/bin/java"
else
JAVA=`which java`
fi
if [ ! -x "$JAVA" ]; then
echo "Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME"
exit 1
fi
if [ -z "$ES_CLASSPATH" ]; then
echo "You must set the ES_CLASSPATH var" >&2
exit 1
fi
# Try to read package config files
if [ -f "/etc/sysconfig/elasticsearch" ]; then
CONF_DIR=/etc/elasticsearch
CONF_FILE=$CONF_DIR/elasticsearch.yml
. "/etc/sysconfig/elasticsearch"
elif [ -f "/etc/default/elasticsearch" ]; then
CONF_DIR=/etc/elasticsearch
CONF_FILE=$CONF_DIR/elasticsearch.yml
. "/etc/default/elasticsearch"
fi
# Parse any long getopt options and put them into properties before calling getopt below
# Be dash compatible to make sure running under ubuntu works
ARGCOUNT=$#
COUNT=0
while [ $COUNT -lt $ARGCOUNT ]
do
case $1 in
--*=*) properties="$properties -Des.${1#--}"
shift 1; COUNT=$(($COUNT+1))
;;
--*) properties="$properties -Des.${1#--}=$2"
shift ; shift; COUNT=$(($COUNT+2))
;;
*) set -- "$@" "$1"; shift; COUNT=$(($COUNT+1))
esac
done
# check if properties already has a config file or config dir
if [ -e "$CONF_DIR" ]; then
case "$properties" in
*-Des.default.path.conf=*) ;;
*)
if [ ! -d "$CONF_DIR/xpack" ]; then
echo "ERROR: The configuration directory [$CONF_DIR/xpack] does not exist. The extension tool expects security configuration files in that location."
echo "The plugin may not have been installed with the correct configuration path. If [$ES_HOME/config/xpack] exists, please copy the 'xpack' directory to [$CONF_DIR]"
exit 1
fi
properties="$properties -Des.default.path.conf=$CONF_DIR"
;;
esac
fi
export HOSTNAME=`hostname -s`
# include x-pack jars in classpath
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/plugins/xpack/*"
cd "$ES_HOME" > /dev/null
"$JAVA" $ES_JAVA_OPTS -cp "$ES_CLASSPATH" -Des.path.home="$ES_HOME" $properties org.elasticsearch.xpack.extensions.XPackExtensionCli "$@"
status=$?
cd - > /dev/null
exit $status

View File

@ -0,0 +1,9 @@
@echo off
rem Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
rem or more contributor license agreements. Licensed under the Elastic License;
rem you may not use this file except in compliance with the Elastic License.
PUSHD "%~dp0"
CALL "%~dp0.in.bat" org.elasticsearch.xpack.extensions.XPackExtensionCli %*
POPD

View File

@ -6,8 +6,6 @@ esplugin {
name 'xpack'
description 'Elasticsearch Expanded Pack Plugin'
classname 'org.elasticsearch.xpack.XPackPlugin'
// FIXME we still can't be isolated due to shield custom realms
isolated false
}
ext.versions = [

View File

@ -5,59 +5,73 @@
*/
package org.elasticsearch.marvel;
import org.elasticsearch.client.Client;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.marvel.action.MonitoringBulkAction;
import org.elasticsearch.marvel.action.TransportMonitoringBulkAction;
import org.elasticsearch.marvel.agent.AgentService;
import org.elasticsearch.marvel.agent.collector.CollectorModule;
import org.elasticsearch.marvel.agent.exporter.ExporterModule;
import org.elasticsearch.marvel.cleaner.CleanerService;
import org.elasticsearch.marvel.client.MonitoringClientModule;
import org.elasticsearch.marvel.license.LicenseModule;
import org.elasticsearch.marvel.license.MarvelLicensee;
import org.elasticsearch.marvel.rest.action.RestMonitoringBulkAction;
import org.elasticsearch.marvel.support.init.proxy.MonitoringClientProxy;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.common.init.LazyInitializationModule;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* This class activates/deactivates the monitoring modules depending if we're running a node client, transport client or tribe client:
* - node clients: all modules are binded
* - transport clients: only action/transport actions are binded
* - tribe clients: everything is disables by default but can be enabled per tribe cluster
*/
public class Marvel {
private static final ESLogger logger = Loggers.getLogger(XPackPlugin.class);
public static final String NAME = "monitoring";
private final Settings settings;
private final boolean enabled;
private final boolean transportClientMode;
public Marvel(Settings settings) {
this.settings = settings;
this.enabled = enabled(settings);
this.enabled = MarvelSettings.ENABLED.get(settings);
this.transportClientMode = XPackPlugin.transportClientMode(settings);
}
boolean isEnabled() {
return enabled;
}
public Collection<Module> nodeModules() {
List<Module> modules = new ArrayList<>();
boolean isTransportClient() {
return transportClientMode;
}
if (enabled) {
modules.add(new MarvelModule());
modules.add(new LicenseModule());
modules.add(new CollectorModule());
modules.add(new ExporterModule(settings));
public Collection<Module> nodeModules() {
if (enabled == false || transportClientMode) {
return Collections.emptyList();
}
return Collections.unmodifiableList(modules);
return Arrays.<Module>asList(
new MarvelModule(),
new LicenseModule(),
new CollectorModule(),
new ExporterModule(settings),
new MonitoringClientModule());
}
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
if (enabled == false) {
if (enabled == false || transportClientMode) {
return Collections.emptyList();
}
return Arrays.<Class<? extends LifecycleComponent>>asList(MarvelLicensee.class,
@ -65,15 +79,25 @@ public class Marvel {
CleanerService.class);
}
public static boolean enabled(Settings settings) {
if ("node".equals(settings.get(Client.CLIENT_TYPE_SETTING_S.getKey())) == false) {
logger.trace("monitoring cannot be started on a transport client");
return false;
}
return MarvelSettings.ENABLED.get(settings);
}
public void onModule(SettingsModule module) {
MarvelSettings.register(module);
}
public void onModule(ActionModule module) {
if (enabled) {
module.registerAction(MonitoringBulkAction.INSTANCE, TransportMonitoringBulkAction.class);
}
}
public void onModule(NetworkModule module) {
if (enabled && transportClientMode == false) {
module.registerRestHandler(RestMonitoringBulkAction.class);
}
}
public void onModule(LazyInitializationModule module) {
if (enabled) {
module.registerLazyInitializable(MonitoringClientProxy.class);
}
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.ElasticsearchClient;
public class MonitoringBulkAction extends Action<MonitoringBulkRequest, MonitoringBulkResponse, MonitoringBulkRequestBuilder> {
public static final MonitoringBulkAction INSTANCE = new MonitoringBulkAction();
public static final String NAME = "cluster:admin/xpack/monitoring/bulk";
private MonitoringBulkAction() {
super(NAME);
}
@Override
public MonitoringBulkRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new MonitoringBulkRequestBuilder(client);
}
@Override
public MonitoringBulkResponse newResponse() {
return new MonitoringBulkResponse();
}
}

View File

@ -0,0 +1,80 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
import java.io.IOException;
public class MonitoringBulkDoc extends MonitoringDoc {
private String index;
private String type;
private String id;
private BytesReference source;
public MonitoringBulkDoc(String monitoringId, String monitoringVersion) {
super(monitoringId, monitoringVersion);
}
public MonitoringBulkDoc(StreamInput in) throws IOException {
super(in);
index = in.readOptionalString();
type = in.readOptionalString();
id = in.readOptionalString();
source = in.readBytesReference();
}
public String getIndex() {
return index;
}
public void setIndex(String index) {
this.index = index;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public BytesReference getSource() {
return source;
}
public void setSource(BytesReference source) {
this.source = source;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(index);
out.writeOptionalString(type);
out.writeOptionalString(id);
out.writeBytesReference(source);
}
@Override
public MonitoringBulkDoc readFrom(StreamInput in) throws IOException {
return new MonitoringBulkDoc(in);
}
}

View File

@ -0,0 +1,128 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
* A monitoring bulk request holds one or more {@link MonitoringBulkDoc}s.
* <p>
* Every monitoring document added to the request is associated to a monitoring system id and version. If this {id, version} pair is
* supported by the monitoring plugin, the monitoring documents will be indexed in a single batch using a normal bulk request.
* <p>
* The monitoring {id, version} pair is used by {org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver} to resolve the index,
* type and id of the final document to be indexed. A {@link MonitoringBulkDoc} can also hold its own index/type/id values but there's no
* guarantee that these information will be effectively used.
*/
public class MonitoringBulkRequest extends ActionRequest<MonitoringBulkRequest> {
final List<MonitoringBulkDoc> docs = new ArrayList<>();
/**
* @return the list of monitoring documents to be indexed
*/
public Collection<MonitoringBulkDoc> getDocs() {
return Collections.unmodifiableCollection(new ArrayList<>(this.docs));
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (docs.isEmpty()) {
validationException = addValidationError("no monitoring documents added", validationException);
}
for (int i = 0; i < docs.size(); i++) {
MonitoringBulkDoc doc = docs.get(i);
if (Strings.hasLength(doc.getMonitoringId()) == false) {
validationException = addValidationError("monitored system id is missing for monitoring document [" + i + "]",
validationException);
}
if (Strings.hasLength(doc.getMonitoringVersion()) == false) {
validationException = addValidationError("monitored system version is missing for monitoring document [" + i + "]",
validationException);
}
if (Strings.hasLength(doc.getType()) == false) {
validationException = addValidationError("type is missing for monitoring document [" + i + "]",
validationException);
}
if (doc.getSource() == null || doc.getSource().length() == 0) {
validationException = addValidationError("source is missing for monitoring document [" + i + "]", validationException);
}
}
return validationException;
}
/**
* Adds a monitoring document to the list of documents to be indexed.
*/
public MonitoringBulkRequest add(MonitoringBulkDoc doc) {
docs.add(doc);
return this;
}
/**
* Parses a monitoring bulk request and builds the list of documents to be indexed.
*/
public MonitoringBulkRequest add(BytesReference content, String defaultMonitoringId, String defaultMonitoringVersion,
String defaultIndex, String defaultType) throws Exception {
// MonitoringBulkRequest accepts a body request that has the same format as the BulkRequest:
// instead of duplicating the parsing logic here we use a new BulkRequest instance to parse the content.
BulkRequest bulkRequest = Requests.bulkRequest().add(content, defaultIndex, defaultType);
for (ActionRequest request : bulkRequest.requests()) {
if (request instanceof IndexRequest) {
IndexRequest indexRequest = (IndexRequest) request;
// builds a new monitoring document based on the index request
MonitoringBulkDoc doc = new MonitoringBulkDoc(defaultMonitoringId, defaultMonitoringVersion);
doc.setIndex(indexRequest.index());
doc.setType(indexRequest.type());
doc.setId(indexRequest.id());
doc.setSource(indexRequest.source());
add(doc);
} else {
throw new IllegalArgumentException("monitoring bulk requests should only contain index requests");
}
}
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
add(new MonitoringBulkDoc(in));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(docs.size());
for (MonitoringBulkDoc doc : docs) {
doc.writeTo(out);
}
}
}

View File

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.bytes.BytesReference;
public class MonitoringBulkRequestBuilder
extends ActionRequestBuilder<MonitoringBulkRequest, MonitoringBulkResponse, MonitoringBulkRequestBuilder> {
public MonitoringBulkRequestBuilder(ElasticsearchClient client) {
super(client, MonitoringBulkAction.INSTANCE, new MonitoringBulkRequest());
}
public MonitoringBulkRequestBuilder add(MonitoringBulkDoc doc) {
request.add(doc);
return this;
}
public MonitoringBulkRequestBuilder add(BytesReference content, String defaultId, String defaultVersion, String defaultIndex,
String defaultType) throws Exception {
request.add(content, defaultId, defaultVersion, defaultIndex, defaultType);
return this;
}
}

View File

@ -0,0 +1,138 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.Objects;
public class MonitoringBulkResponse extends ActionResponse {
private long tookInMillis;
private Error error;
MonitoringBulkResponse() {
}
public MonitoringBulkResponse(long tookInMillis) {
this(tookInMillis, null);
}
public MonitoringBulkResponse(long tookInMillis, Error error) {
this.tookInMillis = tookInMillis;
this.error = error;
}
public TimeValue getTook() {
return new TimeValue(tookInMillis);
}
public long getTookInMillis() {
return tookInMillis;
}
/**
* Returns HTTP status
* <ul>
* <li>{@link RestStatus#OK} if monitoring bulk request was successful</li>
* <li>{@link RestStatus#INTERNAL_SERVER_ERROR} if monitoring bulk request was partially successful or failed completely</li>
* </ul>
*/
public RestStatus status() {
return error == null ? RestStatus.OK : RestStatus.INTERNAL_SERVER_ERROR;
}
public Error getError() {
return error;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
tookInMillis = in.readVLong();
error = in.readOptionalWritable(Error::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVLong(tookInMillis);
out.writeOptionalWriteable(error);
}
public static class Error implements Writeable<Error>, ToXContent {
private final Throwable cause;
private final RestStatus status;
public Error(Throwable t) {
cause = Objects.requireNonNull(t);
status = ExceptionsHelper.status(t);
}
Error(StreamInput in) throws IOException {
this(in.<Throwable>readThrowable());
}
/**
* The failure message.
*/
public String getMessage() {
return this.cause.toString();
}
/**
* The rest status.
*/
public RestStatus getStatus() {
return this.status;
}
/**
* The actual cause of the failure.
*/
public Throwable getCause() {
return cause;
}
@Override
public Error readFrom(StreamInput in) throws IOException {
return new Error(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeThrowable(getCause());
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
ElasticsearchException.toXContent(builder, params, cause);
builder.endObject();
return builder;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("Error [");
sb.append("cause=").append(cause);
sb.append(", status=").append(status);
sb.append(']');
return sb.toString();
}
}
}

View File

@ -0,0 +1,127 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.marvel.agent.exporter.Exporters;
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
public class TransportMonitoringBulkAction extends HandledTransportAction<MonitoringBulkRequest, MonitoringBulkResponse> {
private final ClusterService clusterService;
private final Exporters exportService;
@Inject
public TransportMonitoringBulkAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, Exporters exportService) {
super(settings, MonitoringBulkAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
MonitoringBulkRequest::new);
this.clusterService = clusterService;
this.exportService = exportService;
}
@Override
protected void doExecute(MonitoringBulkRequest request, ActionListener<MonitoringBulkResponse> listener) {
clusterService.state().blocks().globalBlockedRaiseException(ClusterBlockLevel.WRITE);
new AsyncAction(request, listener, exportService, clusterService).start();
}
class AsyncAction {
private final MonitoringBulkRequest request;
private final ActionListener<MonitoringBulkResponse> listener;
private final Exporters exportService;
private final ClusterService clusterService;
public AsyncAction(MonitoringBulkRequest request, ActionListener<MonitoringBulkResponse> listener,
Exporters exportService, ClusterService clusterService) {
this.request = request;
this.listener = listener;
this.exportService = exportService;
this.clusterService = clusterService;
}
void start() {
executeExport(prepareForExport(request.getDocs()), System.nanoTime(), listener);
}
/**
* Iterate over the documents and set the values of common fields if needed:
* - cluster UUID
* - timestamp
* - source node
*/
Collection<MonitoringDoc> prepareForExport(Collection<? extends MonitoringDoc> docs) {
final String clusterUUID = clusterService.state().metaData().clusterUUID();
Function<MonitoringDoc, MonitoringDoc> updateClusterUUID = doc -> {
if (doc.getClusterUUID() == null) {
doc.setClusterUUID(clusterUUID);
}
return doc;
};
final long timestamp = System.currentTimeMillis();
Function<MonitoringDoc, MonitoringDoc> updateTimestamp = doc -> {
if (doc.getTimestamp() == 0) {
doc.setTimestamp(timestamp);
}
return doc;
};
final DiscoveryNode sourceNode = clusterService.localNode();
Function<MonitoringDoc, MonitoringDoc> updateSourceNode = doc -> {
if (doc.getSourceNode() == null) {
doc.setSourceNode(sourceNode);
}
return doc;
};
return docs.stream()
.map(updateClusterUUID.andThen(updateTimestamp.andThen(updateSourceNode)))
.collect(Collectors.toList());
}
/**
* Exports the documents
*/
void executeExport(final Collection<MonitoringDoc> docs, final long startTimeNanos,
final ActionListener<MonitoringBulkResponse> listener) {
threadPool.generic().execute(new AbstractRunnable() {
@Override
protected void doRun() throws Exception {
exportService.export(docs);
listener.onResponse(new MonitoringBulkResponse(buildTookInMillis(startTimeNanos)));
}
@Override
public void onFailure(Throwable t) {
listener.onResponse(new MonitoringBulkResponse(buildTookInMillis(startTimeNanos), new MonitoringBulkResponse.Error(t)));
}
});
}
}
private long buildTookInMillis(long startTimeNanos) {
return TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNanos);
}
}

View File

@ -17,6 +17,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.marvel.MarvelSettings;
import org.elasticsearch.marvel.agent.collector.Collector;
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStatsCollector;
import org.elasticsearch.marvel.agent.exporter.ExportException;
import org.elasticsearch.marvel.agent.exporter.Exporter;
import org.elasticsearch.marvel.agent.exporter.Exporters;
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
@ -199,6 +200,8 @@ public class AgentService extends AbstractLifecycleComponent<AgentService> {
exporters.export(docs);
}
} catch (ExportException e) {
logger.error("exception when exporting documents", e);
} catch (InterruptedException e) {
logger.trace("interrupted");
Thread.currentThread().interrupt();

View File

@ -5,8 +5,6 @@
*/
package org.elasticsearch.marvel.agent.exporter;
import org.elasticsearch.ElasticsearchException;
import java.util.Collection;
/**
@ -25,18 +23,14 @@ public abstract class ExportBulk {
return name;
}
public abstract ExportBulk add(Collection<MonitoringDoc> docs) throws Exception;
public abstract ExportBulk add(Collection<MonitoringDoc> docs) throws ExportException;
public abstract void flush() throws Exception;
public abstract void flush() throws ExportException;
public final void close(boolean flush) throws Exception {
Exception exception = null;
public final void close(boolean flush) throws ExportException {
ExportException exception = null;
if (flush) {
try {
flush();
} catch (Exception e) {
exception = e;
}
flush();
}
// now closing
@ -46,7 +40,7 @@ public abstract class ExportBulk {
if (exception != null) {
exception.addSuppressed(e);
} else {
exception = e;
exception = new ExportException("Exception when closing export bulk", e);
}
}
@ -69,24 +63,35 @@ public abstract class ExportBulk {
}
@Override
public ExportBulk add(Collection<MonitoringDoc> docs) throws Exception {
public ExportBulk add(Collection<MonitoringDoc> docs) throws ExportException {
ExportException exception = null;
for (ExportBulk bulk : bulks) {
bulk.add(docs);
try {
bulk.add(docs);
} catch (ExportException e) {
if (exception == null) {
exception = new ExportException("failed to add documents to export bulks");
}
exception.addExportException(e);
}
}
if (exception != null) {
throw exception;
}
return this;
}
@Override
public void flush() throws Exception {
Exception exception = null;
public void flush() throws ExportException {
ExportException exception = null;
for (ExportBulk bulk : bulks) {
try {
bulk.flush();
} catch (Exception e) {
} catch (ExportException e) {
if (exception == null) {
exception = new ElasticsearchException("failed to flush exporter bulks");
exception = new ExportException("failed to flush export bulks");
}
exception.addSuppressed(new ElasticsearchException("failed to flush [{}] exporter bulk", e, bulk.name));
exception.addExportException(e);
}
}
if (exception != null) {

View File

@ -0,0 +1,76 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.agent.exporter;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class ExportException extends ElasticsearchException implements Iterable<ExportException> {
private final List<ExportException> exceptions = new ArrayList<>();
public ExportException(Throwable throwable) {
super(throwable);
}
public ExportException(String msg, Object... args) {
super(msg, args);
}
public ExportException(String msg, Throwable throwable, Object... args) {
super(msg, throwable, args);
}
public ExportException(StreamInput in) throws IOException {
super(in);
for (int i = in.readVInt(); i > 0; i--) {
exceptions.add(new ExportException(in));
}
}
public boolean addExportException(ExportException e) {
return exceptions.add(e);
}
public boolean hasExportExceptions() {
return exceptions.size() > 0;
}
@Override
public Iterator<ExportException> iterator() {
return exceptions.iterator();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(exceptions.size());
for (ExportException e : exceptions) {
e.writeTo(out);
}
}
@Override
protected void innerToXContent(XContentBuilder builder, Params params) throws IOException {
super.innerToXContent(builder, params);
if (hasExportExceptions()) {
builder.startArray("exceptions");
for (ExportException exception : exceptions) {
builder.startObject();
exception.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
}
}
}

View File

@ -122,7 +122,7 @@ public class Exporters extends AbstractLifecycleComponent<Exporters> implements
bulks.add(bulk);
}
} catch (Exception e) {
logger.error("exporter [{}] failed to export monitoring data", e, exporter.name());
logger.error("exporter [{}] failed to open exporting bulk", e, exporter.name());
}
}
return bulks.isEmpty() ? null : new ExportBulk.Compound(bulks);
@ -179,9 +179,9 @@ public class Exporters extends AbstractLifecycleComponent<Exporters> implements
/**
* Exports a collection of monitoring documents using the configured exporters
*/
public synchronized void export(Collection<MonitoringDoc> docs) throws Exception {
public synchronized void export(Collection<MonitoringDoc> docs) throws ExportException {
if (this.lifecycleState() != Lifecycle.State.STARTED) {
throw new IllegalStateException("Export service is not started");
throw new ExportException("Export service is not started");
}
if (docs != null && docs.size() > 0) {
ExportBulk bulk = openBulk();
@ -191,7 +191,6 @@ public class Exporters extends AbstractLifecycleComponent<Exporters> implements
}
try {
logger.debug("exporting [{}] monitoring documents", docs.size());
bulk.add(docs);
} finally {
bulk.close(lifecycleState() == Lifecycle.State.STARTED);

View File

@ -22,8 +22,6 @@ import java.io.IOException;
*/
public class MonitoringDoc implements Writeable<MonitoringDoc> {
private static final MonitoringDoc PROTO = new MonitoringDoc();
private final String monitoringId;
private final String monitoringVersion;
@ -31,16 +29,18 @@ public class MonitoringDoc implements Writeable<MonitoringDoc> {
private long timestamp;
private Node sourceNode;
// Used by {@link #PROTO} instance and tests
MonitoringDoc() {
this(null, null);
}
public MonitoringDoc(String monitoringId, String monitoringVersion) {
this.monitoringId = monitoringId;
this.monitoringVersion = monitoringVersion;
}
public MonitoringDoc(StreamInput in) throws IOException {
this(in.readOptionalString(), in.readOptionalString());
clusterUUID = in.readOptionalString();
timestamp = in.readVLong();
sourceNode = in.readOptionalWritable(Node::new);
}
public String getClusterUUID() {
return clusterUUID;
}
@ -80,7 +80,7 @@ public class MonitoringDoc implements Writeable<MonitoringDoc> {
@Override
public String toString() {
return "marvel document [class=" + getClass().getName() +
return "monitoring document [class=" + getClass().getSimpleName() +
", monitoring id=" + getMonitoringId() +
", monitoring version=" + getMonitoringVersion() +
"]";
@ -92,33 +92,16 @@ public class MonitoringDoc implements Writeable<MonitoringDoc> {
out.writeOptionalString(getMonitoringVersion());
out.writeOptionalString(getClusterUUID());
out.writeVLong(getTimestamp());
if (getSourceNode() != null) {
out.writeBoolean(true);
getSourceNode().writeTo(out);
} else {
out.writeBoolean(false);
}
out.writeOptionalWriteable(getSourceNode());
}
@Override
public MonitoringDoc readFrom(StreamInput in) throws IOException {
MonitoringDoc doc = new MonitoringDoc(in.readOptionalString(), in.readOptionalString());
doc.setClusterUUID(in.readOptionalString());
doc.setTimestamp(in.readVLong());
if (in.readBoolean()) {
doc.setSourceNode(Node.PROTO.readFrom(in));
}
return doc;
}
public static MonitoringDoc readMonitoringDoc(StreamInput in) throws IOException {
return PROTO.readFrom(in);
return new MonitoringDoc(in);
}
public static class Node implements Writeable<Node>, ToXContent {
public static final Node PROTO = new Node();
private String uuid;
private String host;
private String transportAddress;
@ -126,10 +109,6 @@ public class MonitoringDoc implements Writeable<MonitoringDoc> {
private String name;
private ImmutableOpenMap<String, String> attributes;
// Used by the {@link #PROTO} instance
Node() {
}
public Node(String uuid, String host, String transportAddress, String ip, String name,
ImmutableOpenMap<String, String> attributes) {
this.uuid = uuid;
@ -147,6 +126,20 @@ public class MonitoringDoc implements Writeable<MonitoringDoc> {
this.attributes = builder.build();
}
public Node(StreamInput in) throws IOException {
uuid = in.readOptionalString();
host = in.readOptionalString();
transportAddress = in.readOptionalString();
ip = in.readOptionalString();
name = in.readOptionalString();
int size = in.readVInt();
ImmutableOpenMap.Builder<String, String> attributes = ImmutableOpenMap.builder(size);
for (int i = 0; i < size; i++) {
attributes.put(in.readOptionalString(), in.readOptionalString());
}
this.attributes = attributes.build();
}
public String getUUID() {
return uuid;
}
@ -208,19 +201,7 @@ public class MonitoringDoc implements Writeable<MonitoringDoc> {
@Override
public Node readFrom(StreamInput in) throws IOException {
Node node = new Node();
node.uuid = in.readOptionalString();
node.host = in.readOptionalString();
node.transportAddress = in.readOptionalString();
node.ip = in.readOptionalString();
node.name = in.readOptionalString();
int size = in.readVInt();
ImmutableOpenMap.Builder<String, String> attributes = ImmutableOpenMap.builder(size);
for (int i = 0; i < size; i++) {
attributes.put(in.readOptionalString(), in.readOptionalString());
}
node.attributes = attributes.build();
return node;
return new Node(in);
}
@Override

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.marvel.agent.exporter.ExportBulk;
import org.elasticsearch.marvel.agent.exporter.ExportException;
import org.elasticsearch.marvel.agent.exporter.Exporter;
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
@ -79,7 +80,9 @@ public class HttpExporter extends Exporter {
public static final String SSL_TRUSTSTORE_ALGORITHM_SETTING = "truststore.algorithm";
public static final String SSL_HOSTNAME_VERIFICATION_SETTING = SSL_SETTING + ".hostname_verification";
/** Minimum supported version of the remote monitoring cluster **/
/**
* Minimum supported version of the remote monitoring cluster
**/
public static final Version MIN_SUPPORTED_CLUSTER_VERSION = Version.V_2_0_0_beta2;
private static final XContentType CONTENT_TYPE = XContentType.JSON;
@ -89,19 +92,24 @@ public class HttpExporter extends Exporter {
final TimeValue connectionReadTimeout;
final BasicAuth auth;
/** https support * */
/**
* https support *
*/
final SSLSocketFactory sslSocketFactory;
final boolean hostnameVerification;
final Environment env;
final ResolversRegistry resolvers;
final @Nullable TimeValue templateCheckTimeout;
@Nullable
final TimeValue templateCheckTimeout;
volatile boolean checkedAndUploadedIndexTemplate = false;
volatile boolean supportedClusterVersion = false;
/** Version number of built-in templates **/
/**
* Version number of built-in templates
**/
private final Integer templateVersion;
boolean keepAlive;
@ -218,9 +226,9 @@ public class HttpExporter extends Exporter {
logger.trace("http exporter [{}] - added index request [index={}, type={}, id={}]",
name(), index, type, id);
}
} else {
logger.warn("http exporter [{}] - unable to render monitoring document of type [{}]: no renderer found in registry",
name(), doc);
} else if (logger.isTraceEnabled()) {
logger.trace("http exporter [{}] - no resolver found for monitoring document [class={}, id={}, version={}]",
name(), doc.getClass().getName(), doc.getMonitoringId(), doc.getMonitoringVersion());
}
} catch (Exception e) {
logger.warn("http exporter [{}] - failed to render document [{}], skipping it", e, name(), doc);
@ -318,7 +326,9 @@ public class HttpExporter extends Exporter {
return null;
}
/** open a connection to the given hosts, returning null when not successful * */
/**
* open a connection to the given hosts, returning null when not successful *
*/
private HttpURLConnection openConnection(String host, String method, String path, @Nullable String contentType) {
try {
final URL url = HttpExporterUtils.parseHostWithPath(host, path);
@ -450,7 +460,7 @@ public class HttpExporter extends Exporter {
// 200 means that the template has been found, 404 otherwise
if (connection.getResponseCode() == 200) {
logger.debug("monitoring template [{}] found",templateName);
logger.debug("monitoring template [{}] found", templateName);
return true;
}
} catch (Exception e) {
@ -543,7 +553,9 @@ public class HttpExporter extends Exporter {
}
}
/** SSL Initialization * */
/**
* SSL Initialization *
*/
public SSLSocketFactory createSSLSocketFactory(Settings settings) {
if (settings.names().isEmpty()) {
logger.trace("no ssl context configured");
@ -693,47 +705,54 @@ public class HttpExporter extends Exporter {
}
@Override
public Bulk add(Collection<MonitoringDoc> docs) throws Exception {
if (connection == null) {
connection = openExportingConnection();
}
if ((docs != null) && (!docs.isEmpty())) {
if (out == null) {
out = connection.getOutputStream();
}
public Bulk add(Collection<MonitoringDoc> docs) throws ExportException {
try {
if ((docs != null) && (!docs.isEmpty())) {
if (connection == null) {
connection = openExportingConnection();
if (connection == null) {
throw new IllegalStateException("No connection available to export documents");
}
}
if (out == null) {
out = connection.getOutputStream();
}
// We need to use a buffer to render each monitoring document
// because the renderer might close the outputstream (ex: XContentBuilder)
try (BytesStreamOutput buffer = new BytesStreamOutput()) {
for (MonitoringDoc monitoringDoc : docs) {
try {
render(monitoringDoc, buffer);
// write the result to the connection
out.write(buffer.bytes().toBytes());
} finally {
buffer.reset();
// We need to use a buffer to render each monitoring document
// because the renderer might close the outputstream (ex: XContentBuilder)
try (BytesStreamOutput buffer = new BytesStreamOutput()) {
for (MonitoringDoc monitoringDoc : docs) {
try {
render(monitoringDoc, buffer);
// write the result to the connection
out.write(buffer.bytes().toBytes());
} finally {
buffer.reset();
}
}
}
}
} catch (Exception e) {
throw new ExportException("failed to add documents to export bulk [{}]", name);
}
return this;
}
@Override
public void flush() throws IOException {
public void flush() throws ExportException {
if (connection != null) {
flush(connection);
connection = null;
try {
flush(connection);
} catch (Exception e) {
throw new ExportException("failed to flush export bulk [{}]", e, name);
} finally {
connection = null;
}
}
}
private void flush(HttpURLConnection connection) throws IOException {
try {
sendCloseExportingConnection(connection);
} catch (IOException e) {
logger.error("failed sending data to [{}]: {}", connection.getURL(), ExceptionsHelper.detailedMessage(e));
throw e;
}
sendCloseExportingConnection(connection);
}
}

View File

@ -5,20 +5,20 @@
*/
package org.elasticsearch.marvel.agent.exporter.local;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.marvel.agent.exporter.ExportBulk;
import org.elasticsearch.marvel.agent.exporter.ExportException;
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
import org.elasticsearch.marvel.agent.resolver.ResolversRegistry;
import org.elasticsearch.marvel.support.init.proxy.MonitoringClientProxy;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicReference;
@ -28,13 +28,13 @@ import java.util.concurrent.atomic.AtomicReference;
public class LocalBulk extends ExportBulk {
private final ESLogger logger;
private final Client client;
private final MonitoringClientProxy client;
private final ResolversRegistry resolvers;
BulkRequestBuilder requestBuilder;
AtomicReference<State> state = new AtomicReference<>();
public LocalBulk(String name, ESLogger logger, Client client, ResolversRegistry resolvers) {
public LocalBulk(String name, ESLogger logger, MonitoringClientProxy client, ResolversRegistry resolvers) {
super(name);
this.logger = logger;
this.client = client;
@ -43,7 +43,9 @@ public class LocalBulk extends ExportBulk {
}
@Override
public synchronized ExportBulk add(Collection<MonitoringDoc> docs) throws Exception {
public synchronized ExportBulk add(Collection<MonitoringDoc> docs) throws ExportException {
ExportException exception = null;
for (MonitoringDoc doc : docs) {
if (state.get() != State.ACTIVE) {
return this;
@ -54,42 +56,61 @@ public class LocalBulk extends ExportBulk {
try {
MonitoringIndexNameResolver<MonitoringDoc> resolver = resolvers.getResolver(doc);
if (resolver != null) {
IndexRequest request = new IndexRequest(resolver.index(doc), resolver.type(doc), resolver.id(doc));
request.source(resolver.source(doc, XContentType.SMILE));
requestBuilder.add(request);
IndexRequest request = new IndexRequest(resolver.index(doc), resolver.type(doc), resolver.id(doc));
request.source(resolver.source(doc, XContentType.SMILE));
requestBuilder.add(request);
if (logger.isTraceEnabled()) {
logger.trace("local exporter [{}] - added index request [index={}, type={}, id={}]",
name, request.index(), request.type(), request.id());
}
} else {
logger.warn("local exporter [{}] - unable to render monitoring document of type [{}]: no renderer found in registry",
name, doc);
if (logger.isTraceEnabled()) {
logger.trace("local exporter [{}] - added index request [index={}, type={}, id={}]",
name, request.index(), request.type(), request.id());
}
} catch (Exception e) {
logger.warn("local exporter [{}] - failed to add document [{}], skipping it", e, name, doc);
if (exception == null) {
exception = new ExportException("failed to add documents to export bulk [{}]", name);
}
exception.addExportException(new ExportException("failed to add document [{}]", e, doc, name));
}
}
if (exception != null) {
throw exception;
}
return this;
}
@Override
public void flush() throws IOException {
if (state.get() != State.ACTIVE || requestBuilder == null) {
public void flush() throws ExportException {
if (state.get() != State.ACTIVE || requestBuilder == null || requestBuilder.numberOfActions() == 0) {
return;
}
try {
logger.trace("exporter [{}] - exporting {} documents", name, requestBuilder.numberOfActions());
BulkResponse bulkResponse = requestBuilder.get();
if (bulkResponse.hasFailures()) {
throw new ElasticsearchException(buildFailureMessage(bulkResponse));
throwExportException(bulkResponse.getItems());
}
} catch (Exception e) {
throw new ExportException("failed to flush export bulk [{}]", e, name);
} finally {
requestBuilder = null;
}
}
void throwExportException(BulkItemResponse[] bulkItemResponses) {
ExportException exception = new ExportException("bulk [{}] reports failures when exporting documents", name);
Arrays.stream(bulkItemResponses)
.filter(BulkItemResponse::isFailed)
.map(item -> new ExportException(item.getFailure().getCause()))
.forEach(exception::addExportException);
if (exception.hasExportExceptions()) {
throw exception;
}
}
void terminate() {
state.set(State.TERMINATING);
synchronized (this) {
@ -98,31 +119,6 @@ public class LocalBulk extends ExportBulk {
}
}
/**
* In case of something goes wrong and there's a lot of shards/indices,
* we limit the number of failures displayed in log.
*/
private String buildFailureMessage(BulkResponse bulkResponse) {
BulkItemResponse[] items = bulkResponse.getItems();
if (logger.isDebugEnabled() || (items.length < 100)) {
return bulkResponse.buildFailureMessage();
}
StringBuilder sb = new StringBuilder();
sb.append("failure in bulk execution, only the first 100 failures are printed:");
for (int i = 0; i < items.length && i < 100; i++) {
BulkItemResponse item = items[i];
if (item.isFailed()) {
sb.append("\n[").append(i)
.append("]: index [").append(item.getIndex()).append("], type [").append(item.getType())
.append("], id [").append(item.getId()).append("], message [").append(item.getFailureMessage())
.append("]");
}
}
return sb.toString();
}
enum State {
ACTIVE,
TERMINATING,

View File

@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.ClusterState;
@ -34,7 +33,7 @@ import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
import org.elasticsearch.marvel.agent.resolver.ResolversRegistry;
import org.elasticsearch.marvel.cleaner.CleanerService;
import org.elasticsearch.shield.InternalClient;
import org.elasticsearch.marvel.support.init.proxy.MonitoringClientProxy;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@ -52,7 +51,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
public static final String TYPE = "local";
private final Client client;
private final MonitoringClientProxy client;
private final ClusterService clusterService;
private final ResolversRegistry resolvers;
private final CleanerService cleanerService;
@ -63,7 +62,8 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
/** Version number of built-in templates **/
private final Integer templateVersion;
public LocalExporter(Exporter.Config config, Client client, ClusterService clusterService, CleanerService cleanerService) {
public LocalExporter(Exporter.Config config, MonitoringClientProxy client,
ClusterService clusterService, CleanerService cleanerService) {
super(TYPE, config);
this.client = client;
this.clusterService = clusterService;
@ -282,7 +282,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
.distinct()
.toArray(String[]::new);
MonitoringDoc monitoringDoc = new MonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString());
MonitoringDoc monitoringDoc = new MonitoringDoc(null, null);
monitoringDoc.setTimestamp(System.currentTimeMillis());
// Get the names of the current monitoring indices
@ -344,12 +344,12 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
public static class Factory extends Exporter.Factory<LocalExporter> {
private final InternalClient client;
private final MonitoringClientProxy client;
private final ClusterService clusterService;
private final CleanerService cleanerService;
@Inject
public Factory(InternalClient client, ClusterService clusterService, CleanerService cleanerService) {
public Factory(MonitoringClientProxy client, ClusterService clusterService, CleanerService cleanerService) {
super(TYPE, true);
this.client = client;
this.clusterService = clusterService;

View File

@ -5,8 +5,10 @@
*/
package org.elasticsearch.marvel.agent.resolver;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.marvel.MonitoredSystem;
import org.elasticsearch.marvel.action.MonitoringBulkDoc;
import org.elasticsearch.marvel.agent.collector.cluster.ClusterInfoMonitoringDoc;
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMonitoringDoc;
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateNodeMonitoringDoc;
@ -19,6 +21,7 @@ import org.elasticsearch.marvel.agent.collector.node.NodeStatsMonitoringDoc;
import org.elasticsearch.marvel.agent.collector.shards.ShardMonitoringDoc;
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
import org.elasticsearch.marvel.agent.resolver.bulk.MonitoringBulkResolver;
import org.elasticsearch.marvel.agent.resolver.cluster.ClusterInfoResolver;
import org.elasticsearch.marvel.agent.resolver.cluster.ClusterStateNodeResolver;
import org.elasticsearch.marvel.agent.resolver.cluster.ClusterStateResolver;
@ -46,8 +49,8 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
// register built-in defaults resolvers
registerBuiltIn(ES, MarvelTemplateUtils.TEMPLATE_VERSION, settings);
// register resolvers for external applications, something like:
//registrations.add(resolveByIdVersion(MonitoringIds.KIBANA, "4.4.1", new KibanaDocResolver(KIBANA, 0, settings)));
// register resolvers for external applications
registerKibana(settings);
}
/**
@ -66,6 +69,14 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
registrations.add(resolveByClass(ShardMonitoringDoc.class, new ShardsResolver(id, version, settings)));
}
/**
* Registers resolvers for Kibana
*/
private void registerKibana(Settings settings) {
final MonitoringBulkResolver kibana = new MonitoringBulkResolver(MonitoredSystem.KIBANA, 0, settings);
registrations.add(resolveByClassSystemVersion(MonitoringBulkDoc.class, MonitoredSystem.KIBANA, Version.CURRENT, kibana));
}
/**
* @return a Resolver that is able to resolver the given monitoring document
*/
@ -75,8 +86,7 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
return registration.resolver();
}
}
throw new IllegalArgumentException("No resolver found for monitoring document [class=" + document.getClass().getName()
+ ", id=" + document.getMonitoringId() + ", version=" + document.getMonitoringVersion() + "]");
throw new IllegalArgumentException("No resolver found for monitoring document");
}
@Override
@ -88,6 +98,23 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
return new Registration(resolver, type::isInstance);
}
static Registration resolveByClassSystemVersion(Class<? extends MonitoringDoc> type, MonitoredSystem system, Version version,
MonitoringIndexNameResolver resolver) {
return new Registration(resolver, doc -> {
try {
if (type.isInstance(doc) == false) {
return false;
}
if (system != MonitoredSystem.fromSystem(doc.getMonitoringId())) {
return false;
}
return version == Version.fromString(doc.getMonitoringVersion());
} catch (Exception e) {
return false;
}
});
}
static class Registration {
private final MonitoringIndexNameResolver resolver;
@ -106,5 +133,4 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
return resolver;
}
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.agent.resolver.bulk;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.marvel.MonitoredSystem;
import org.elasticsearch.marvel.action.MonitoringBulkDoc;
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
import java.io.IOException;
public class MonitoringBulkResolver extends MonitoringIndexNameResolver.Timestamped<MonitoringBulkDoc> {
public MonitoringBulkResolver(MonitoredSystem id, int version, Settings settings) {
super(id, version, settings);
}
@Override
public String type(MonitoringBulkDoc document) {
return document.getType();
}
@Override
protected void buildXContent(MonitoringBulkDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
BytesReference source = document.getSource();
if (source != null && source.length() > 0) {
builder.rawField(type(document), source);
}
}
}

View File

@ -0,0 +1,60 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.client;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.marvel.action.MonitoringBulkAction;
import org.elasticsearch.marvel.action.MonitoringBulkRequest;
import org.elasticsearch.marvel.action.MonitoringBulkRequestBuilder;
import org.elasticsearch.marvel.action.MonitoringBulkResponse;
import java.util.Map;
public class MonitoringClient {
private final Client client;
@Inject
public MonitoringClient(Client client) {
this.client = client;
}
/**
* Creates a request builder that bulk index monitoring documents.
*
* @return The request builder
*/
public MonitoringBulkRequestBuilder prepareMonitoringBulk() {
return new MonitoringBulkRequestBuilder(client);
}
/**
* Executes a bulk of index operations that concern monitoring documents.
*
* @param request The monitoring bulk request
* @param listener A listener to be notified with a result
*/
public void bulk(MonitoringBulkRequest request, ActionListener<MonitoringBulkResponse> listener) {
client.execute(MonitoringBulkAction.INSTANCE, request, listener);
}
/**
* Executes a bulk of index operations that concern monitoring documents.
*
* @param request The monitoring bulk request
*/
public ActionFuture<MonitoringBulkResponse> bulk(MonitoringBulkRequest request) {
return client.execute(MonitoringBulkAction.INSTANCE, request);
}
public MonitoringClient filterWithHeader(Map<String, String> headers) {
return new MonitoringClient(client.filterWithHeader(headers));
}
}

View File

@ -0,0 +1,16 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.client;
import org.elasticsearch.common.inject.AbstractModule;
public class MonitoringClientModule extends AbstractModule {
@Override
protected void configure() {
bind(MonitoringClient.class).asEagerSingleton();
}
}

View File

@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.rest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.marvel.client.MonitoringClient;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.xpack.XPackPlugin;
import java.util.Locale;
public abstract class MonitoringRestHandler extends BaseRestHandler {
protected static String URI_BASE = String.format(Locale.ROOT, "/_%s/monitoring", XPackPlugin.NAME);
public MonitoringRestHandler(Settings settings, Client client) {
super(settings, client);
}
@Override
protected final void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception {
handleRequest(request, channel, new MonitoringClient(client));
}
protected abstract void handleRequest(RestRequest request, RestChannel channel, MonitoringClient client) throws Exception;
}

View File

@ -0,0 +1,86 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.rest.action;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.marvel.action.MonitoringBulkRequestBuilder;
import org.elasticsearch.marvel.action.MonitoringBulkResponse;
import org.elasticsearch.marvel.client.MonitoringClient;
import org.elasticsearch.marvel.rest.MonitoringRestHandler;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.rest.action.support.RestBuilderListener;
public class RestMonitoringBulkAction extends MonitoringRestHandler {
public static final String MONITORING_ID = "system_id";
public static final String MONITORING_VERSION = "system_version";
@Inject
public RestMonitoringBulkAction(Settings settings, RestController controller, Client client) {
super(settings, client);
controller.registerHandler(RestRequest.Method.POST, URI_BASE + "/_bulk", this);
controller.registerHandler(RestRequest.Method.PUT, URI_BASE + "/_bulk", this);
controller.registerHandler(RestRequest.Method.POST, URI_BASE + "/{index}/_bulk", this);
controller.registerHandler(RestRequest.Method.PUT, URI_BASE + "/{index}/_bulk", this);
controller.registerHandler(RestRequest.Method.POST, URI_BASE + "/{index}/{type}/_bulk", this);
controller.registerHandler(RestRequest.Method.PUT, URI_BASE + "/{index}/{type}/_bulk", this);
}
@Override
protected void handleRequest(RestRequest request, RestChannel channel, MonitoringClient client) throws Exception {
String defaultIndex = request.param("index");
String defaultType = request.param("type");
String id = request.param(MONITORING_ID);
if (Strings.hasLength(id) == false) {
throw new IllegalArgumentException("no monitoring id for monitoring bulk request");
}
String version = request.param(MONITORING_VERSION);
if (Strings.hasLength(version) == false) {
throw new IllegalArgumentException("no monitoring version for monitoring bulk request");
}
if (!RestActions.hasBodyContent(request)) {
throw new ElasticsearchParseException("no body content for monitoring bulk request");
}
MonitoringBulkRequestBuilder requestBuilder = client.prepareMonitoringBulk();
requestBuilder.add(request.content(), id, version, defaultIndex, defaultType);
requestBuilder.execute(new RestBuilderListener<MonitoringBulkResponse>(channel) {
@Override
public RestResponse buildResponse(MonitoringBulkResponse response, XContentBuilder builder) throws Exception {
builder.startObject();
builder.field(Fields.TOOK, response.getTookInMillis());
MonitoringBulkResponse.Error error = response.getError();
builder.field(Fields.ERRORS, error != null);
if (error != null) {
builder.field(Fields.ERROR, response.getError());
}
builder.endObject();
return new BytesRestResponse(response.status(), builder);
}
});
}
static final class Fields {
static final XContentBuilderString TOOK = new XContentBuilderString("took");
static final XContentBuilderString ERRORS = new XContentBuilderString("errors");
static final XContentBuilderString ERROR = new XContentBuilderString("error");
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.support.init.proxy;
import org.elasticsearch.client.Client;
import org.elasticsearch.shield.InternalClient;
import org.elasticsearch.xpack.common.init.proxy.ClientProxy;
public class MonitoringClientProxy extends ClientProxy {
/**
* Creates a proxy to the given internal client (can be used for testing)
*/
public static MonitoringClientProxy of(Client client) {
MonitoringClientProxy proxy = new MonitoringClientProxy();
proxy.client = client instanceof InternalClient ? (InternalClient) client : new InternalClient.Insecure(client);
return proxy;
}
}

View File

@ -1,7 +1,6 @@
{
"template": ".monitoring-data-${monitoring.template.version}",
"settings": {
"index.xpack.version": "${project.version}",
"index.number_of_shards": 1,
"index.number_of_replicas": 1,
"index.codec": "best_compression",
@ -9,7 +8,10 @@
},
"mappings": {
"cluster_info": {
"enabled": false
"enabled": false,
"_meta": {
"xpack.version": "${project.version}"
}
},
"node": {
"enabled": false

View File

@ -1,7 +1,6 @@
{
"template": ".monitoring-es-${monitoring.template.version}-*",
"settings": {
"index.xpack.version": "${project.version}",
"index.number_of_shards": 1,
"index.number_of_replicas": 1,
"index.codec": "best_compression",

View File

@ -22,7 +22,8 @@ public class MarvelPluginClientTests extends ESTestCase {
.build();
Marvel plugin = new Marvel(settings);
assertThat(plugin.isEnabled(), is(false));
assertThat(plugin.isEnabled(), is(true));
assertThat(plugin.isTransportClient(), is(true));
Collection<Module> modules = plugin.nodeModules();
assertThat(modules.size(), is(0));
}
@ -34,7 +35,8 @@ public class MarvelPluginClientTests extends ESTestCase {
.build();
Marvel plugin = new Marvel(settings);
assertThat(plugin.isEnabled(), is(true));
assertThat(plugin.isTransportClient(), is(false));
Collection<Module> modules = plugin.nodeModules();
assertThat(modules.size(), is(4));
assertThat(modules.size(), is(5));
}
}

View File

@ -0,0 +1,104 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.Matchers.equalTo;
public class MonitoringBulkDocTests extends ESTestCase {
public void testSerialization() throws IOException {
int iterations = randomIntBetween(5, 50);
for (int i = 0; i < iterations; i++) {
MonitoringBulkDoc doc = newRandomMonitoringBulkDoc();
boolean hasSourceNode = randomBoolean();
if (hasSourceNode) {
doc.setSourceNode(newRandomSourceNode());
}
BytesStreamOutput output = new BytesStreamOutput();
Version outputVersion = randomVersion(random());
output.setVersion(outputVersion);
doc.writeTo(output);
StreamInput streamInput = StreamInput.wrap(output.bytes());
streamInput.setVersion(randomVersion(random()));
MonitoringBulkDoc doc2 = new MonitoringBulkDoc(streamInput);
assertThat(doc2.getMonitoringId(), equalTo(doc.getMonitoringId()));
assertThat(doc2.getMonitoringVersion(), equalTo(doc.getMonitoringVersion()));
assertThat(doc2.getClusterUUID(), equalTo(doc.getClusterUUID()));
assertThat(doc2.getTimestamp(), equalTo(doc.getTimestamp()));
assertThat(doc2.getSourceNode(), equalTo(doc.getSourceNode()));
assertThat(doc2.getIndex(), equalTo(doc.getIndex()));
assertThat(doc2.getType(), equalTo(doc.getType()));
assertThat(doc2.getId(), equalTo(doc.getId()));
if (doc.getSource() == null) {
assertThat(doc2.getSource(), equalTo(BytesArray.EMPTY));
} else {
assertThat(doc2.getSource(), equalTo(doc.getSource()));
}
}
}
private MonitoringBulkDoc newRandomMonitoringBulkDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(randomAsciiOfLength(2), randomAsciiOfLength(2));
if (frequently()) {
doc.setClusterUUID(randomAsciiOfLength(5));
doc.setType(randomAsciiOfLength(5));
}
if (randomBoolean()) {
doc.setTimestamp(System.currentTimeMillis());
doc.setSource(new BytesArray("{\"key\" : \"value\"}"));
}
if (rarely()) {
doc.setIndex(randomAsciiOfLength(5));
doc.setId(randomAsciiOfLength(2));
}
return doc;
}
private MonitoringDoc.Node newRandomSourceNode() {
String uuid = null;
String name = null;
String ip = null;
String transportAddress = null;
String host = null;
ImmutableOpenMap<String, String> attributes = null;
if (frequently()) {
uuid = randomAsciiOfLength(5);
name = randomAsciiOfLength(5);
}
if (randomBoolean()) {
ip = randomAsciiOfLength(5);
transportAddress = randomAsciiOfLength(5);
host = randomAsciiOfLength(3);
}
if (rarely()) {
int nbAttributes = randomIntBetween(0, 5);
ImmutableOpenMap.Builder<String, String> builder = ImmutableOpenMap.builder();
for (int i = 0; i < nbAttributes; i++) {
builder.put("key#" + i, String.valueOf(i));
}
attributes = builder.build();
}
return new MonitoringDoc.Node(uuid, host, transportAddress, ip, name, attributes);
}
}

View File

@ -0,0 +1,193 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Matcher;
import java.io.IOException;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.nullValue;
public class MonitoringBulkRequestTests extends ESTestCase {
private static final BytesArray SOURCE = new BytesArray("{\"key\" : \"value\"}");
public void testValidateRequestNoDocs() {
assertValidationErrors(new MonitoringBulkRequest(), hasItems("no monitoring documents added"));
}
public void testValidateRequestSingleDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(null, null);
assertValidationErrors(new MonitoringBulkRequest().add(doc), hasItems("monitored system id is missing for monitoring document [0]",
"monitored system version is missing for monitoring document [0]",
"type is missing for monitoring document [0]",
"source is missing for monitoring document [0]"));
doc = new MonitoringBulkDoc("id", null);
assertValidationErrors(new MonitoringBulkRequest().add(doc),
hasItems("monitored system version is missing for monitoring document [0]",
"type is missing for monitoring document [0]",
"source is missing for monitoring document [0]"));
doc = new MonitoringBulkDoc("id", "version");
assertValidationErrors(new MonitoringBulkRequest().add(doc), hasItems("type is missing for monitoring document [0]",
"source is missing for monitoring document [0]"));
doc.setType("type");
assertValidationErrors(new MonitoringBulkRequest().add(doc), hasItems("source is missing for monitoring document [0]"));
doc.setSource(SOURCE);
assertValidationErrors(new MonitoringBulkRequest().add(doc), nullValue());
}
public void testValidateRequestMultiDocs() {
MonitoringBulkRequest request = new MonitoringBulkRequest();
// Doc0 is complete
MonitoringBulkDoc doc0 = new MonitoringBulkDoc(randomAsciiOfLength(2), randomAsciiOfLength(2));
doc0.setType(randomAsciiOfLength(5));
doc0.setSource(SOURCE);
request.add(doc0);
// Doc1 has no type
MonitoringBulkDoc doc1 = new MonitoringBulkDoc(randomAsciiOfLength(2), randomAsciiOfLength(2));
doc1.setSource(SOURCE);
request.add(doc1);
// Doc2 has no source
MonitoringBulkDoc doc2 = new MonitoringBulkDoc(randomAsciiOfLength(2), randomAsciiOfLength(2));
doc2.setType(randomAsciiOfLength(5));
doc2.setSource(BytesArray.EMPTY);
request.add(doc2);
// Doc3 has no version
MonitoringBulkDoc doc3 = new MonitoringBulkDoc(randomAsciiOfLength(2), null);
doc3.setType(randomAsciiOfLength(5));
doc3.setSource(SOURCE);
request.add(doc3);
// Doc4 has no id
MonitoringBulkDoc doc4 = new MonitoringBulkDoc(null, randomAsciiOfLength(2));
doc4.setType(randomAsciiOfLength(5));
doc4.setSource(SOURCE);
request.add(doc4);
assertValidationErrors(request, hasItems("type is missing for monitoring document [1]",
"source is missing for monitoring document [2]",
"monitored system version is missing for monitoring document [3]",
"monitored system id is missing for monitoring document [4]"));
}
public void testAddSingleDoc() {
MonitoringBulkRequest request = new MonitoringBulkRequest();
final int nbDocs = randomIntBetween(1, 20);
for (int i = 0; i < nbDocs; i++) {
request.add(new MonitoringBulkDoc(String.valueOf(i), String.valueOf(i)));
}
assertThat(request.getDocs(), hasSize(nbDocs));
}
public void testAddMultipleDocs() throws Exception {
final int nbDocs = randomIntBetween(3, 20);
final XContentType xContentType = XContentType.JSON;
try (BytesStreamOutput content = new BytesStreamOutput()) {
try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType, content)) {
for (int i = 0; i < nbDocs; i++) {
builder.startObject().startObject("index").endObject().endObject().flush();
content.write(xContentType.xContent().streamSeparator());
builder.startObject().field("foo").value(i).endObject().flush();
content.write(xContentType.xContent().streamSeparator());
}
}
String defaultMonitoringId = randomBoolean() ? randomAsciiOfLength(2) : null;
String defaultMonitoringVersion = randomBoolean() ? randomAsciiOfLength(3) : null;
String defaultIndex = randomBoolean() ? randomAsciiOfLength(5) : null;
String defaultType = randomBoolean() ? randomAsciiOfLength(4) : null;
MonitoringBulkRequest request = new MonitoringBulkRequest();
request.add(content.bytes(), defaultMonitoringId, defaultMonitoringVersion, defaultIndex, defaultType);
assertThat(request.getDocs(), hasSize(nbDocs));
for (MonitoringBulkDoc doc : request.getDocs()) {
assertThat(doc.getMonitoringId(), equalTo(defaultMonitoringId));
assertThat(doc.getMonitoringVersion(), equalTo(defaultMonitoringVersion));
assertThat(doc.getIndex(), equalTo(defaultIndex));
assertThat(doc.getType(), equalTo(defaultType));
}
}
}
public void testSerialization() throws IOException {
MonitoringBulkRequest request = new MonitoringBulkRequest();
int numDocs = iterations(10, 30);
for (int i = 0; i < numDocs; i++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(randomAsciiOfLength(2), randomVersion(random()).toString());
doc.setType(randomFrom("type1", "type2", "type3"));
doc.setSource(SOURCE);
if (randomBoolean()) {
doc.setIndex("index");
}
if (randomBoolean()) {
doc.setId(randomAsciiOfLength(3));
}
if (rarely()) {
doc.setClusterUUID(randomAsciiOfLength(5));
}
request.add(doc);
}
BytesStreamOutput out = new BytesStreamOutput();
out.setVersion(randomVersion(random()));
request.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes());
in.setVersion(out.getVersion());
MonitoringBulkRequest request2 = new MonitoringBulkRequest();
request2.readFrom(in);
assertThat(request2.docs.size(), CoreMatchers.equalTo(request.docs.size()));
for (int i = 0; i < request2.docs.size(); i++) {
MonitoringBulkDoc doc = request.docs.get(i);
MonitoringBulkDoc doc2 = request2.docs.get(i);
assertThat(doc2.getMonitoringId(), equalTo(doc.getMonitoringId()));
assertThat(doc2.getMonitoringVersion(), equalTo(doc.getMonitoringVersion()));
assertThat(doc2.getClusterUUID(), equalTo(doc.getClusterUUID()));
assertThat(doc2.getIndex(), equalTo(doc.getIndex()));
assertThat(doc2.getType(), equalTo(doc.getType()));
assertThat(doc2.getId(), equalTo(doc.getId()));
assertThat(doc2.getSource(), equalTo(doc.getSource()));
}
}
@SuppressWarnings("unchecked")
private static <T> void assertValidationErrors(MonitoringBulkRequest request, Matcher<? super T> matcher) {
ActionRequestValidationException validation = request.validate();
if (validation != null) {
assertThat((T) validation.validationErrors(), matcher);
} else {
assertThat(null, matcher);
}
}
}

View File

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.marvel.agent.exporter.ExportException;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class MonitoringBulkResponseTests extends ESTestCase {
public void testResponseStatus() {
final long took = Math.abs(randomLong());
MonitoringBulkResponse response = new MonitoringBulkResponse(took);
assertThat(response.getTookInMillis(), equalTo(took));
assertThat(response.getError(), is(nullValue()));
assertThat(response.status(), equalTo(RestStatus.OK));
ExportException exception = new ExportException(randomAsciiOfLength(10));
response = new MonitoringBulkResponse(took, new MonitoringBulkResponse.Error(exception));
assertThat(response.getTookInMillis(), equalTo(took));
assertThat(response.getError(), is(notNullValue()));
assertThat(response.status(), equalTo(RestStatus.INTERNAL_SERVER_ERROR));
}
public void testSerialization() throws IOException {
int iterations = randomIntBetween(5, 50);
for (int i = 0; i < iterations; i++) {
MonitoringBulkResponse response;
if (randomBoolean()) {
response = new MonitoringBulkResponse(Math.abs(randomLong()));
} else {
Exception exception = randomFrom(
new ExportException(randomAsciiOfLength(5), new IllegalStateException(randomAsciiOfLength(5))),
new IllegalStateException(randomAsciiOfLength(5)),
new IllegalArgumentException(randomAsciiOfLength(5)));
response = new MonitoringBulkResponse(Math.abs(randomLong()), new MonitoringBulkResponse.Error(exception));
}
BytesStreamOutput output = new BytesStreamOutput();
Version outputVersion = randomVersion(random());
output.setVersion(outputVersion);
response.writeTo(output);
StreamInput streamInput = StreamInput.wrap(output.bytes());
streamInput.setVersion(randomVersion(random()));
MonitoringBulkResponse response2 = new MonitoringBulkResponse();
response2.readFrom(streamInput);
assertThat(response2.getTookInMillis(), equalTo(response.getTookInMillis()));
if (response.getError() == null) {
assertThat(response2.getError(), is(nullValue()));
} else {
assertThat(response2.getError(), is(notNullValue()));
}
}
}
}

View File

@ -0,0 +1,147 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.Version;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.marvel.MonitoredSystem;
import org.elasticsearch.marvel.agent.resolver.bulk.MonitoringBulkResolver;
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
import org.elasticsearch.search.SearchHit;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class MonitoringBulkTests extends MarvelIntegTestCase {
@Override
protected Settings transportClientSettings() {
return super.transportClientSettings();
}
public void testMonitoringBulkIndexing() throws Exception {
MonitoringBulkRequestBuilder requestBuilder = monitoringClient().prepareMonitoringBulk();
String[] types = {"type1", "type2", "type3"};
int numDocs = scaledRandomIntBetween(100, 5000);
for (int i = 0; i < numDocs; i++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString());
doc.setType(randomFrom(types));
doc.setSource(jsonBuilder().startObject().field("num", numDocs).endObject().bytes());
requestBuilder.add(doc);
}
MonitoringBulkResponse response = requestBuilder.get();
assertThat(response.getError(), is(nullValue()));
refresh();
SearchResponse searchResponse = client().prepareSearch().setTypes(types).setSize(numDocs).get();
assertHitCount(searchResponse, numDocs);
for (SearchHit searchHit : searchResponse.getHits()) {
Map<String, Object> source = searchHit.sourceAsMap();
assertNotNull(source.get(MonitoringBulkResolver.Fields.CLUSTER_UUID.underscore().toString()));
assertNotNull(source.get(MonitoringBulkResolver.Fields.TIMESTAMP.underscore().toString()));
assertNotNull(source.get(MonitoringBulkResolver.Fields.SOURCE_NODE.underscore().toString()));
}
}
/**
* This test creates N threads that execute a random number of monitoring bulk requests.
*/
public void testConcurrentRequests() throws Exception {
final Thread[] threads = new Thread[3 + randomInt(7)];
final List<Throwable> exceptions = new CopyOnWriteArrayList<>();
AtomicInteger total = new AtomicInteger(0);
logger.info("--> using {} concurrent clients to execute requests", threads.length);
for (int i = 0; i < threads.length; i++) {
final int nbRequests = randomIntBetween(3, 10);
threads[i] = new Thread(new AbstractRunnable() {
@Override
public void onFailure(Throwable t) {
logger.error("unexpected error in exporting thread", t);
exceptions.add(t);
}
@Override
protected void doRun() throws Exception {
for (int j = 0; j < nbRequests; j++) {
MonitoringBulkRequestBuilder requestBuilder = monitoringClient().prepareMonitoringBulk();
int numDocs = scaledRandomIntBetween(10, 1000);
for (int k = 0; k < numDocs; k++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString());
doc.setType("concurrent");
doc.setSource(jsonBuilder().startObject().field("num", k).endObject().bytes());
requestBuilder.add(doc);
}
total.addAndGet(numDocs);
MonitoringBulkResponse response = requestBuilder.get();
assertThat(response.getError(), is(nullValue()));
}
}
}, "export_thread_" + i);
threads[i].start();
}
for (Thread thread : threads) {
thread.join();
}
assertThat(exceptions, empty());
refresh();
SearchResponse countResponse = client().prepareSearch().setTypes("concurrent").setSize(0).get();
assertHitCount(countResponse, total.get());
}
public void testUnsupportedSystem() throws Exception {
MonitoringBulkRequestBuilder requestBuilder = monitoringClient().prepareMonitoringBulk();
String[] types = {"type1", "type2", "type3"};
int totalDocs = randomIntBetween(10, 1000);
int unsupportedDocs = 0;
for (int i = 0; i < totalDocs; i++) {
MonitoringBulkDoc doc;
if (randomBoolean()) {
doc = new MonitoringBulkDoc("unknown", Version.CURRENT.toString());
unsupportedDocs++;
} else {
doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString());
}
doc.setType(randomFrom(types));
doc.setSource(jsonBuilder().startObject().field("num", i).endObject().bytes());
requestBuilder.add(doc);
}
MonitoringBulkResponse response = requestBuilder.get();
if (unsupportedDocs == 0) {
assertThat(response.getError(), is(nullValue()));
} else {
assertThat(response.getError(), is(notNullValue()));
}
refresh();
SearchResponse countResponse = client().prepareSearch().setTypes(types).setSize(0).get();
assertHitCount(countResponse, totalDocs - unsupportedDocs);
}
}

View File

@ -0,0 +1,293 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.action;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.NodeConnectionsService;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.DummyTransportAddress;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.marvel.MarvelSettings;
import org.elasticsearch.marvel.MonitoredSystem;
import org.elasticsearch.marvel.agent.exporter.ExportException;
import org.elasticsearch.marvel.agent.exporter.Exporters;
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.IsEqual.equalTo;
public class TransportMonitoringBulkActionTests extends ESTestCase {
private static ThreadPool threadPool;
@Rule
public ExpectedException expectedException = ExpectedException.none();
private ClusterService clusterService;
private TransportService transportService;
private CapturingExporters exportService;
private TransportMonitoringBulkAction action;
@BeforeClass
public static void beforeClass() {
threadPool = new ThreadPool(TransportMonitoringBulkActionTests.class.getSimpleName());
}
@AfterClass
public static void afterClass() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
@Before
public void setUp() throws Exception {
super.setUp();
CapturingTransport transport = new CapturingTransport();
clusterService = new ClusterService(Settings.EMPTY, null, new ClusterSettings(Settings.EMPTY,
ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadPool,
new ClusterName(TransportMonitoringBulkActionTests.class.getName()));
clusterService.setLocalNode(new DiscoveryNode("node", DummyTransportAddress.INSTANCE, Version.CURRENT));
clusterService.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) {
@Override
public void connectToAddedNodes(ClusterChangedEvent event) {
// skip
}
@Override
public void disconnectFromRemovedNodes(ClusterChangedEvent event) {
// skip
}
});
clusterService.setClusterStatePublisher((event, ackListener) -> {});
clusterService.start();
transportService = new TransportService(transport, threadPool);
transportService.start();
transportService.acceptIncomingRequests();
exportService = new CapturingExporters();
action = new TransportMonitoringBulkAction(
Settings.EMPTY,
threadPool,
clusterService,
transportService,
new ActionFilters(Collections.emptySet()),
new IndexNameExpressionResolver(Settings.EMPTY),
exportService
);
}
@After
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
transportService.close();
}
public void testGlobalBlock() throws Exception {
expectedException.expect(ExecutionException.class);
expectedException.expect(hasToString(containsString("ClusterBlockException[blocked by: [SERVICE_UNAVAILABLE/2/no master]")));
final ClusterBlocks.Builder block = ClusterBlocks.builder().addGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ALL);
final CountDownLatch latch = new CountDownLatch(1);
clusterService.submitStateUpdateTask("add blocks to cluster state", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
// make sure we increment versions as listener may depend on it for change
return ClusterState.builder(currentState).blocks(block).version(currentState.version() + 1).build();
}
@Override
public boolean runOnlyOnMaster() {
return false;
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
latch.countDown();
}
@Override
public void onFailure(String source, Throwable t) {
fail("unexpected exception: " + t);
}
});
try {
latch.await();
} catch (InterruptedException e) {
throw new ElasticsearchException("unexpected interruption", e);
}
MonitoringBulkRequest request = randomRequest();
action.execute(request).get();
}
public void testEmptyRequest() throws Exception {
expectedException.expect(ExecutionException.class);
expectedException.expect(hasToString(containsString("no monitoring documents added")));
MonitoringBulkRequest request = randomRequest(0);
action.execute(request).get();
assertThat(exportService.getExported(), hasSize(0));
}
public void testBasicRequest() throws Exception {
MonitoringBulkRequest request = randomRequest();
action.execute(request).get();
assertThat(exportService.getExported(), hasSize(request.getDocs().size()));
}
public void testAsyncActionPrepareDocs() throws Exception {
final PlainActionFuture<MonitoringBulkResponse> listener = new PlainActionFuture<>();
final MonitoringBulkRequest request = randomRequest();
Collection<MonitoringDoc> results = action.new AsyncAction(request, listener, exportService, clusterService)
.prepareForExport(request.getDocs());
assertThat(results, hasSize(request.getDocs().size()));
for (MonitoringDoc exported : results) {
assertThat(exported.getClusterUUID(), equalTo(clusterService.state().metaData().clusterUUID()));
assertThat(exported.getTimestamp(), greaterThan(0L));
assertThat(exported.getSourceNode(), notNullValue());
assertThat(exported.getSourceNode().getUUID(), equalTo(clusterService.localNode().getId()));
assertThat(exported.getSourceNode().getName(), equalTo(clusterService.localNode().getName()));
}
}
public void testAsyncActionExecuteExport() throws Exception {
final PlainActionFuture<MonitoringBulkResponse> listener = new PlainActionFuture<>();
final MonitoringBulkRequest request = randomRequest();
final Collection<MonitoringDoc> docs = Collections.unmodifiableCollection(request.getDocs());
action.new AsyncAction(request, listener, exportService, clusterService).executeExport(docs, 0L, listener);
assertThat(listener.get().getError(), nullValue());
Collection<MonitoringDoc> exported = exportService.getExported();
assertThat(exported, hasSize(request.getDocs().size()));
}
public void testAsyncActionExportThrowsException() throws Exception {
final PlainActionFuture<MonitoringBulkResponse> listener = new PlainActionFuture<>();
final MonitoringBulkRequest request = randomRequest();
final Exporters exporters = new ConsumingExporters(docs -> {
throw new IllegalStateException();
});
action.new AsyncAction(request, listener, exporters, clusterService).start();
assertThat(listener.get().getError(), notNullValue());
assertThat(listener.get().getError().getCause(), instanceOf(IllegalStateException.class));
}
/**
* @return a new MonitoringBulkRequest instance with random number of documents
*/
private static MonitoringBulkRequest randomRequest() throws IOException {
return randomRequest(scaledRandomIntBetween(1, 100));
}
/**
* @return a new MonitoringBulkRequest instance with given number of documents
*/
private static MonitoringBulkRequest randomRequest(final int numDocs) throws IOException {
MonitoringBulkRequest request = new MonitoringBulkRequest();
for (int i = 0; i < numDocs; i++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(randomFrom(MonitoredSystem.values()).getSystem(),
randomVersion(random()).toString());
doc.setType(randomFrom("type1", "type2"));
doc.setSource(jsonBuilder().startObject().field("num", i).endObject().bytes());
request.add(doc);
}
return request;
}
/**
* A Exporters implementation that captures the documents to export
*/
class CapturingExporters extends Exporters {
private final Collection<MonitoringDoc> exported = ConcurrentCollections.newConcurrentSet();
public CapturingExporters() {
super(Settings.EMPTY, Collections.emptyMap(), clusterService,
new ClusterSettings(Settings.EMPTY, Collections.singleton(MarvelSettings.EXPORTERS_SETTINGS)));
}
@Override
public synchronized void export(Collection<MonitoringDoc> docs) throws ExportException {
exported.addAll(docs);
}
public Collection<MonitoringDoc> getExported() {
return exported;
}
}
/**
* A Exporters implementation that applies a Consumer when exporting documents
*/
class ConsumingExporters extends Exporters {
private final Consumer<Collection<? extends MonitoringDoc>> consumer;
public ConsumingExporters(Consumer<Collection<? extends MonitoringDoc>> consumer) {
super(Settings.EMPTY, Collections.emptyMap(), clusterService,
new ClusterSettings(Settings.EMPTY, Collections.singleton(MarvelSettings.EXPORTERS_SETTINGS)));
this.consumer = consumer;
}
@Override
public synchronized void export(Collection<MonitoringDoc> docs) throws ExportException {
consumer.accept(docs);
}
}
public static void setState(ClusterService clusterService, ClusterState clusterState) {
}
}

View File

@ -17,7 +17,7 @@ import org.elasticsearch.marvel.MarvelSettings;
import org.elasticsearch.marvel.MonitoredSystem;
import org.elasticsearch.marvel.agent.exporter.local.LocalExporter;
import org.elasticsearch.marvel.cleaner.CleanerService;
import org.elasticsearch.shield.InternalClient;
import org.elasticsearch.marvel.support.init.proxy.MonitoringClientProxy;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -67,7 +67,7 @@ public class ExportersTests extends ESTestCase {
clusterService = mock(ClusterService.class);
// we always need to have the local exporter as it serves as the default one
factories.put(LocalExporter.TYPE, new LocalExporter.Factory(new InternalClient.Insecure(client), clusterService,
factories.put(LocalExporter.TYPE, new LocalExporter.Factory(MonitoringClientProxy.of(client), clusterService,
mock(CleanerService.class)));
clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(MarvelSettings.COLLECTORS,
MarvelSettings.INTERVAL, MarvelSettings.EXPORTERS_SETTINGS)));
@ -327,7 +327,6 @@ public class ExportersTests extends ESTestCase {
}
}
static class TestFactory extends Exporter.Factory<TestFactory.TestExporter> {
public TestFactory(String type, boolean singleton) {
super(type, singleton);
@ -424,13 +423,13 @@ public class ExportersTests extends ESTestCase {
}
@Override
public ExportBulk add(Collection<MonitoringDoc> docs) throws Exception {
public ExportBulk add(Collection<MonitoringDoc> docs) throws ExportException {
count.addAndGet(docs.size());
return this;
}
@Override
public void flush() throws Exception {
public void flush() throws ExportException {
}
AtomicInteger getCount() {

View File

@ -43,7 +43,7 @@ public class MonitoringDocTests extends ESTestCase {
StreamInput streamInput = StreamInput.wrap(output.bytes());
streamInput.setVersion(randomVersion(random()));
MonitoringDoc monitoringDoc2 = MonitoringDoc.readMonitoringDoc(streamInput);
MonitoringDoc monitoringDoc2 = new MonitoringDoc(streamInput);
assertThat(monitoringDoc2.getMonitoringId(), equalTo(monitoringDoc.getMonitoringId()));
assertThat(monitoringDoc2.getMonitoringVersion(), equalTo(monitoringDoc.getMonitoringVersion()));
@ -64,7 +64,7 @@ public class MonitoringDocTests extends ESTestCase {
public void testSetSourceNode() {
int iterations = randomIntBetween(5, 50);
for (int i = 0; i < iterations; i++) {
MonitoringDoc monitoringDoc = new MonitoringDoc();
MonitoringDoc monitoringDoc = new MonitoringDoc(null, null);
if (randomBoolean()) {
DiscoveryNode discoveryNode = newRandomDiscoveryNode();

View File

@ -18,6 +18,7 @@ import org.elasticsearch.marvel.MarvelSettings;
import org.elasticsearch.marvel.MonitoredSystem;
import org.elasticsearch.marvel.agent.collector.cluster.ClusterStateMonitoringDoc;
import org.elasticsearch.marvel.agent.collector.indices.IndexRecoveryMonitoringDoc;
import org.elasticsearch.marvel.agent.exporter.ExportException;
import org.elasticsearch.marvel.agent.exporter.Exporter;
import org.elasticsearch.marvel.agent.exporter.Exporters;
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
@ -39,7 +40,6 @@ import java.util.concurrent.atomic.AtomicLong;
import static org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils.dataTemplateName;
import static org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils.indexTemplateName;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
@ -167,7 +167,14 @@ public class LocalExporterTests extends MarvelIntegTestCase {
logger.debug("--> exporting a second monitoring doc");
exporter.export(Collections.singletonList(newRandomMarvelDoc()));
} catch (ElasticsearchException e) {
assertThat(e.getMessage(), allOf(containsString("failure in bulk execution"), containsString("IndexClosedException[closed]")));
assertThat(e.getMessage(), containsString("failed to flush export bulk [_local]"));
assertThat(e.getCause(), instanceOf(ExportException.class));
ExportException cause = (ExportException) e.getCause();
assertTrue(cause.hasExportExceptions());
for (ExportException c : cause) {
assertThat(c.getMessage(), containsString("IndexClosedException[closed]"));
}
assertNull(exporter.getBulk().requestBuilder);
}
}

View File

@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.startsWith;
public abstract class MonitoringIndexNameResolverTestCase<M extends MonitoringDoc, R extends MonitoringIndexNameResolver<M>>
extends ESTestCase {
private final ResolversRegistry resolversRegistry = new ResolversRegistry(Settings.EMPTY);
protected final ResolversRegistry resolversRegistry = new ResolversRegistry(Settings.EMPTY);
/**
* @return the {@link MonitoringIndexNameResolver} to test

View File

@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.marvel.agent.resolver.bulk;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.transport.DummyTransportAddress;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.marvel.MonitoredSystem;
import org.elasticsearch.marvel.action.MonitoringBulkDoc;
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolverTestCase;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class MonitoringBulkResolverTests extends MonitoringIndexNameResolverTestCase<MonitoringBulkDoc, MonitoringBulkResolver> {
@Override
protected MonitoringBulkDoc newMarvelDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString());
doc.setClusterUUID(randomAsciiOfLength(5));
doc.setTimestamp(Math.abs(randomLong()));
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT));
doc.setType("kibana_stats");
doc.setSource(new BytesArray("{\"field1\" : \"value1\"}"));
return doc;
}
@Override
protected boolean checkResolvedId() {
return false;
}
@Override
protected boolean checkFilters() {
return false;
}
public void testMonitoringBulkResolver() throws Exception {
MonitoringBulkDoc doc = newMarvelDoc();
doc.setTimestamp(1437580442979L);
if (randomBoolean()) {
doc.setIndex(randomAsciiOfLength(5));
}
if (randomBoolean()) {
doc.setId(randomAsciiOfLength(35));
}
if (randomBoolean()) {
doc.setClusterUUID(randomAsciiOfLength(5));
}
MonitoringBulkResolver resolver = newResolver();
assertThat(resolver.index(doc), equalTo(".monitoring-kibana-0-2015.07.22"));
assertThat(resolver.type(doc), equalTo(doc.getType()));
assertThat(resolver.id(doc), nullValue());
assertSource(resolver.source(doc, XContentType.JSON),
"cluster_uuid",
"timestamp",
"source_node",
"kibana_stats",
"kibana_stats.field1");
}
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.marvel.agent.AgentService;
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
import org.elasticsearch.marvel.agent.exporter.MonitoringDoc;
import org.elasticsearch.marvel.agent.resolver.MonitoringIndexNameResolver;
import org.elasticsearch.marvel.client.MonitoringClient;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.shield.authc.esusers.ESUsersRealm;
import org.elasticsearch.shield.authc.support.Hasher;
@ -34,6 +35,7 @@ import org.elasticsearch.test.store.MockFSIndexStore;
import org.elasticsearch.test.transport.AssertingLocalTransport;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.watcher.Watcher;
import org.elasticsearch.xpack.XPackClient;
import org.elasticsearch.xpack.XPackPlugin;
import org.hamcrest.Matcher;
import org.jboss.netty.util.internal.SystemPropertyUtil;
@ -135,6 +137,11 @@ public abstract class MarvelIntegTestCase extends ESIntegTestCase {
return client -> (client instanceof NodeClient) ? client.filterWithHeader(headers) : client;
}
protected MonitoringClient monitoringClient() {
Client client = shieldEnabled ? internalCluster().transportClient() : client();
return randomBoolean() ? new XPackClient(client).monitoring() : new MonitoringClient(client);
}
@Override
protected Set<String> excludeTemplates() {
Set<String> templates = new HashSet<>();
@ -435,9 +442,9 @@ public abstract class MarvelIntegTestCase extends ESIntegTestCase {
public static final String ROLES =
"test:\n" + // a user for the test infra.
" cluster: [ 'cluster:monitor/nodes/info', 'cluster:monitor/state', 'cluster:monitor/health', 'cluster:monitor/stats'," +
" 'cluster:admin/settings/update', 'cluster:admin/repository/delete', 'cluster:monitor/nodes/liveness'," +
" 'indices:admin/template/get', 'indices:admin/template/put', 'indices:admin/template/delete'," +
" 'cluster:monitor/task']\n" +
" 'cluster:admin/settings/update', 'cluster:admin/repository/delete', 'cluster:monitor/nodes/liveness'," +
" 'indices:admin/template/get', 'indices:admin/template/put', 'indices:admin/template/delete'," +
" 'cluster:monitor/task', 'cluster:admin/xpack/monitoring/bulk' ]\n" +
" indices:\n" +
" - names: '*'\n" +
" privileges: [ all ]\n" +

View File

@ -26,7 +26,8 @@ import java.io.IOException;
*/
public final class DocumentSubsetReader extends FilterLeafReader {
public static DirectoryReader wrap(DirectoryReader in, BitsetFilterCache bitsetFilterCache, Query roleQuery) throws IOException {
public static DocumentSubsetDirectoryReader wrap(DirectoryReader in, BitsetFilterCache bitsetFilterCache,
Query roleQuery) throws IOException {
return new DocumentSubsetDirectoryReader(in, bitsetFilterCache, roleQuery);
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.shield.authz.accesscontrol;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BulkScorer;
@ -156,50 +157,7 @@ public class ShieldIndexSearcherWrapper extends IndexSearcherWrapper {
// The reasons why we return a custom searcher:
// 1) in the case the role query is sparse then large part of the main query can be skipped
// 2) If the role query doesn't match with any docs in a segment, that a segment can be skipped
IndexSearcher indexSearcher = new IndexSearcher(directoryReader) {
@Override
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
for (LeafReaderContext ctx : leaves) { // search each subreader
final LeafCollector leafCollector;
try {
leafCollector = collector.getLeafCollector(ctx);
} catch (CollectionTerminatedException e) {
// there is no doc of interest in this reader context
// continue with the following leaf
continue;
}
// The reader is always of type DocumentSubsetReader when we get here:
DocumentSubsetReader reader = (DocumentSubsetReader) ctx.reader();
BitSet roleQueryBits = reader.getRoleQueryBits();
if (roleQueryBits == null) {
// nothing matches with the role query, so skip this segment:
continue;
}
Scorer scorer = weight.scorer(ctx);
if (scorer != null) {
try {
// if the role query result set is sparse then we should use the SparseFixedBitSet for advancing:
if (roleQueryBits instanceof SparseFixedBitSet) {
SparseFixedBitSet sparseFixedBitSet = (SparseFixedBitSet) roleQueryBits;
Bits realLiveDocs = reader.getWrappedLiveDocs();
intersectScorerAndRoleBits(scorer, sparseFixedBitSet, leafCollector, realLiveDocs);
} else {
BulkScorer bulkScorer = weight.bulkScorer(ctx);
Bits liveDocs = reader.getLiveDocs();
bulkScorer.score(leafCollector, liveDocs);
}
} catch (CollectionTerminatedException e) {
// collection was terminated prematurely
// continue with the following leaf
}
}
}
}
};
IndexSearcher indexSearcher = new IndexSearcherWrapper((DocumentSubsetDirectoryReader) directoryReader);
indexSearcher.setQueryCache(indexSearcher.getQueryCache());
indexSearcher.setQueryCachingPolicy(indexSearcher.getQueryCachingPolicy());
indexSearcher.setSimilarity(indexSearcher.getSimilarity(true));
@ -208,6 +166,61 @@ public class ShieldIndexSearcherWrapper extends IndexSearcherWrapper {
return searcher;
}
static class IndexSearcherWrapper extends IndexSearcher {
public IndexSearcherWrapper(DocumentSubsetDirectoryReader r) {
super(r);
}
@Override
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
for (LeafReaderContext ctx : leaves) { // search each subreader
final LeafCollector leafCollector;
try {
leafCollector = collector.getLeafCollector(ctx);
} catch (CollectionTerminatedException e) {
// there is no doc of interest in this reader context
// continue with the following leaf
continue;
}
// The reader is always of type DocumentSubsetReader when we get here:
DocumentSubsetReader reader = (DocumentSubsetReader) ctx.reader();
BitSet roleQueryBits = reader.getRoleQueryBits();
if (roleQueryBits == null) {
// nothing matches with the role query, so skip this segment:
continue;
}
// if the role query result set is sparse then we should use the SparseFixedBitSet for advancing:
if (roleQueryBits instanceof SparseFixedBitSet) {
Scorer scorer = weight.scorer(ctx);
if (scorer != null) {
SparseFixedBitSet sparseFixedBitSet = (SparseFixedBitSet) roleQueryBits;
Bits realLiveDocs = reader.getWrappedLiveDocs();
try {
intersectScorerAndRoleBits(scorer, sparseFixedBitSet, leafCollector, realLiveDocs);
} catch (CollectionTerminatedException e) {
// collection was terminated prematurely
// continue with the following leaf
}
}
} else {
BulkScorer bulkScorer = weight.bulkScorer(ctx);
if (bulkScorer != null) {
Bits liveDocs = reader.getLiveDocs();
try {
bulkScorer.score(leafCollector, liveDocs);
} catch (CollectionTerminatedException e) {
// collection was terminated prematurely
// continue with the following leaf
}
}
}
}
}
}
public Set<String> getAllowedMetaFields() {
return allowedMetaFields;
}

View File

@ -549,7 +549,7 @@ public class DocumentLevelSecurityTests extends ShieldIntegTestCase {
searchResponse = client().prepareSearch("test")
.setQuery(hasParentQuery("parent", matchAllQuery()))
.addSort("_id", SortOrder.ASC)
.addSort("_uid", SortOrder.ASC)
.get();
assertHitCount(searchResponse, 3L);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1"));

View File

@ -23,14 +23,12 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
@ -59,12 +57,13 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
IndexSettings indexSettings = createIndexSettings();
CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
String name = "_field";
FieldDataType fieldDataType = new StringFieldMapper.StringFieldType().fieldDataType();
indexFieldDataCache = new DummyAccountingFieldDataCache();
sortedSetDVOrdinalsIndexFieldData = new SortedSetDVOrdinalsIndexFieldData(indexSettings,indexFieldDataCache, name,
circuitBreakerService, fieldDataType);
pagedBytesIndexFieldData = new PagedBytesIndexFieldData(indexSettings, name, fieldDataType, indexFieldDataCache,
circuitBreakerService);
pagedBytesIndexFieldData = new PagedBytesIndexFieldData(indexSettings, name, indexFieldDataCache,
circuitBreakerService, TextFieldMapper.Defaults.FIELDDATA_MIN_FREQUENCY,
TextFieldMapper.Defaults.FIELDDATA_MAX_FREQUENCY,
TextFieldMapper.Defaults.FIELDDATA_MIN_SEGMENT_SIZE);
dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(null);

View File

@ -9,7 +9,9 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
@ -17,15 +19,21 @@ import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.BulkScorer;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.SparseFixedBitSet;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
@ -43,6 +51,7 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.shield.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader;
import org.elasticsearch.shield.license.ShieldLicenseState;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@ -51,6 +60,8 @@ import org.junit.Before;
import java.io.IOException;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.Set;
import static java.util.Collections.emptySet;
import static java.util.Collections.singleton;
@ -58,6 +69,7 @@ import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.shield.authz.accesscontrol.ShieldIndexSearcherWrapper.intersectScorerAndRoleBits;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.sameInstance;
@ -370,4 +382,152 @@ public class ShieldIndexSearcherWrapperUnitTests extends ESTestCase {
}
}
public void testIndexSearcherWrapperSparseNoDeletions() throws IOException {
doTestIndexSearcherWrapper(true, false);
}
public void testIndexSearcherWrapperDenseNoDeletions() throws IOException {
doTestIndexSearcherWrapper(false, false);
}
public void testIndexSearcherWrapperSparseWithDeletions() throws IOException {
doTestIndexSearcherWrapper(true, true);
}
public void testIndexSearcherWrapperDenseWithDeletions() throws IOException {
doTestIndexSearcherWrapper(false, true);
}
static class CreateScorerOnceWeight extends Weight {
private final Weight weight;
private final Set<Object> seenLeaves = Collections.newSetFromMap(new IdentityHashMap<>());
protected CreateScorerOnceWeight(Weight weight) {
super(weight.getQuery());
this.weight = weight;
}
@Override
public void extractTerms(Set<Term> terms) {
weight.extractTerms(terms);
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return weight.explain(context, doc);
}
@Override
public float getValueForNormalization() throws IOException {
return weight.getValueForNormalization();
}
@Override
public void normalize(float norm, float boost) {
weight.normalize(norm, boost);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
assertTrue(seenLeaves.add(context.reader().getCoreCacheKey()));
return weight.scorer(context);
}
@Override
public BulkScorer bulkScorer(LeafReaderContext context)
throws IOException {
assertTrue(seenLeaves.add(context.reader().getCoreCacheKey()));
return weight.bulkScorer(context);
}
}
static class CreateScorerOnceQuery extends Query {
private final Query query;
CreateScorerOnceQuery(Query query) {
this.query = query;
}
@Override
public String toString(String field) {
return query.toString(field);
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query queryRewritten = query.rewrite(reader);
if (query != queryRewritten) {
return new CreateScorerOnceQuery(queryRewritten);
}
return super.rewrite(reader);
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new CreateScorerOnceWeight(query.createWeight(searcher, needsScores));
}
}
public void doTestIndexSearcherWrapper(boolean sparse, boolean deletions) throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(null));
Document doc = new Document();
StringField allowedField = new StringField("allowed", "yes", Store.NO);
doc.add(allowedField);
StringField fooField = new StringField("foo", "bar", Store.NO);
doc.add(fooField);
StringField deleteField = new StringField("delete", "no", Store.NO);
doc.add(deleteField);
w.addDocument(doc);
if (deletions) {
// add a document that matches foo:bar but will be deleted
deleteField.setStringValue("yes");
w.addDocument(doc);
deleteField.setStringValue("no");
}
allowedField.setStringValue("no");
w.addDocument(doc);
if (sparse) {
for (int i = 0; i < 1000; ++i) {
w.addDocument(doc);
}
w.forceMerge(1);
}
w.deleteDocuments(new Term("delete", "yes"));
DirectoryReader reader = DirectoryReader.open(w);
IndexSettings settings = IndexSettingsModule.newIndexSettings("index", Settings.EMPTY);
BitsetFilterCache.Listener listener = new BitsetFilterCache.Listener() {
@Override
public void onCache(ShardId shardId, Accountable accountable) {
}
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {
}
};
BitsetFilterCache cache = new BitsetFilterCache(settings, listener);
Query roleQuery = new TermQuery(new Term("allowed", "yes"));
BitSet bitSet = cache.getBitSetProducer(roleQuery).getBitSet(reader.leaves().get(0));
if (sparse) {
assertThat(bitSet, instanceOf(SparseFixedBitSet.class));
} else {
assertThat(bitSet, instanceOf(FixedBitSet.class));
}
DocumentSubsetDirectoryReader filteredReader = DocumentSubsetReader.wrap(reader, cache, roleQuery);
IndexSearcher searcher = new ShieldIndexSearcherWrapper.IndexSearcherWrapper(filteredReader);
// Searching a non-existing term will trigger a null scorer
assertEquals(0, searcher.count(new TermQuery(new Term("non_existing_field", "non_existing_value"))));
assertEquals(1, searcher.count(new TermQuery(new Term("foo", "bar"))));
// make sure scorers are created only once, see #1725
assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery())));
IOUtils.close(reader, w, dir);
}
}

View File

@ -6,13 +6,12 @@
package org.elasticsearch.xpack;
import org.elasticsearch.client.Client;
import org.elasticsearch.marvel.client.MonitoringClient;
import org.elasticsearch.shield.authc.support.SecuredString;
import org.elasticsearch.shield.authc.support.UsernamePasswordToken;
import org.elasticsearch.shield.client.SecurityClient;
import org.elasticsearch.watcher.client.WatcherClient;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.shield.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER;
@ -24,15 +23,22 @@ import static org.elasticsearch.shield.authc.support.UsernamePasswordToken.basic
public class XPackClient {
private final Client client;
private final MonitoringClient monitoringClient;
private final SecurityClient securityClient;
private final WatcherClient watcherClient;
public XPackClient(Client client) {
this.client = client;
this.monitoringClient = new MonitoringClient(client);
this.securityClient = new SecurityClient(client);
this.watcherClient = new WatcherClient(client);
}
public MonitoringClient monitoring() {
return monitoringClient;
}
public SecurityClient security() {
return securityClient;
}

View File

@ -22,15 +22,19 @@ import org.elasticsearch.marvel.Marvel;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.shield.Shield;
import org.elasticsearch.shield.authc.AuthenticationModule;
import org.elasticsearch.watcher.Watcher;
import org.elasticsearch.xpack.common.init.LazyInitializationModule;
import org.elasticsearch.xpack.common.init.LazyInitializationService;
import org.elasticsearch.xpack.extensions.XPackExtension;
import org.elasticsearch.xpack.extensions.XPackExtensionsService;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
public class XPackPlugin extends Plugin {
@ -67,6 +71,7 @@ public class XPackPlugin extends Plugin {
}
protected final Settings settings;
protected final XPackExtensionsService extensionsService;
protected Licensing licensing;
protected Shield shield;
@ -81,6 +86,14 @@ public class XPackPlugin extends Plugin {
this.marvel = new Marvel(settings);
this.watcher = new Watcher(settings);
this.graph = new Graph(settings);
// Check if the node is a transport client.
if (transportClientMode(settings) == false) {
Environment env = new Environment(settings);
this.extensionsService =
new XPackExtensionsService(settings, resolveXPackExtensionsFile(env), getExtensions());
} else {
this.extensionsService = null;
}
}
@Override public String name() {
@ -91,6 +104,11 @@ public class XPackPlugin extends Plugin {
return "Elastic X-Pack";
}
// For tests only
public Collection<Class<? extends XPackExtension>> getExtensions() {
return Collections.emptyList();
}
@Override
public Collection<Module> nodeModules() {
ArrayList<Module> modules = new ArrayList<>();
@ -145,6 +163,7 @@ public class XPackPlugin extends Plugin {
public void onModule(NetworkModule module) {
licensing.onModule(module);
marvel.onModule(module);
shield.onModule(module);
watcher.onModule(module);
graph.onModule(module);
@ -152,17 +171,25 @@ public class XPackPlugin extends Plugin {
public void onModule(ActionModule module) {
licensing.onModule(module);
marvel.onModule(module);
shield.onModule(module);
watcher.onModule(module);
graph.onModule(module);
}
public void onModule(AuthenticationModule module) {
if (extensionsService != null) {
extensionsService.onModule(module);
}
}
public void onIndexModule(IndexModule module) {
shield.onIndexModule(module);
graph.onIndexModule(module);
}
public void onModule(LazyInitializationModule module) {
marvel.onModule(module);
watcher.onModule(module);
}
@ -221,4 +248,8 @@ public class XPackPlugin extends Plugin {
settingsModule.registerSetting(Setting.boolSetting(legacyFeatureEnabledSetting(featureName),
defaultValue, Setting.Property.NodeScope));
}
public static Path resolveXPackExtensionsFile(Environment env) {
return env.pluginsFile().resolve("xpack").resolve("extensions");
}
}

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.common.init.proxy;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.AdminClient;
import org.elasticsearch.client.Client;
@ -36,6 +37,10 @@ public class ClientProxy implements LazyInitializable {
client.bulk(preProcess(request), listener);
}
public BulkRequestBuilder prepareBulk() {
return client.prepareBulk();
}
protected <M extends TransportMessage> M preProcess(M message) {
return message;
}

View File

@ -0,0 +1,190 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.bootstrap.JarHell;
import org.elasticsearch.cli.Command;
import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.cli.UserError;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.env.Environment;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
import java.util.Arrays;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import static org.elasticsearch.xpack.XPackPlugin.resolveXPackExtensionsFile;
import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE;
/**
* A command for the extension cli to install an extension into x-pack.
*
* The install command takes a URL to an extension zip.
*
* Extensions are packaged as zip files. Each packaged extension must contain an
* extension properties file. See {@link XPackExtensionInfo}.
* <p>
* The installation process first extracts the extensions files into a temporary
* directory in order to verify the extension satisfies the following requirements:
* <ul>
* <li>The property file exists and contains valid metadata. See {@link XPackExtensionInfo#readFromProperties(Path)}</li>
* <li>Jar hell does not exist, either between the extension's own jars or with the parent classloader (elasticsearch + xpack)</li>
* </ul>
*/
class InstallXPackExtensionCommand extends Command {
private final Environment env;
private final OptionSpec<Void> batchOption;
private final OptionSpec<String> arguments;
InstallXPackExtensionCommand(Environment env) {
super("Install a plugin");
this.env = env;
this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"),
"Enable batch mode explicitly, automatic confirmation of security permission");
this.arguments = parser.nonOptions("plugin id");
}
@Override
protected void execute(Terminal terminal, OptionSet options) throws Exception {
// TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args
List<String> args = arguments.values(options);
if (args.size() != 1) {
throw new UserError(ExitCodes.USAGE, "Must supply a single extension id argument");
}
String extensionURL = args.get(0);
boolean isBatch = options.has(batchOption) || System.console() == null;
execute(terminal, extensionURL, isBatch);
}
// pkg private for testing
void execute(Terminal terminal, String extensionId, boolean isBatch) throws Exception {
if (Files.exists(resolveXPackExtensionsFile(env)) == false) {
terminal.println("xpack extensions directory [" + resolveXPackExtensionsFile(env) + "] does not exist. Creating...");
Files.createDirectories(resolveXPackExtensionsFile(env));
}
Path extensionZip = download(terminal, extensionId, env.tmpFile());
Path extractedZip = unzip(extensionZip, resolveXPackExtensionsFile(env));
install(terminal, extractedZip, env);
}
/** Downloads the extension and returns the file it was downloaded to. */
private Path download(Terminal terminal, String extensionURL, Path tmpDir) throws Exception {
terminal.println("-> Downloading " + URLDecoder.decode(extensionURL, "UTF-8"));
URL url = new URL(extensionURL);
Path zip = Files.createTempFile(tmpDir, null, ".zip");
try (InputStream in = url.openStream()) {
// must overwrite since creating the temp file above actually created the file
Files.copy(in, zip, StandardCopyOption.REPLACE_EXISTING);
}
return zip;
}
private Path unzip(Path zip, Path extensionDir) throws IOException, UserError {
// unzip extension to a staging temp dir
Path target = Files.createTempDirectory(extensionDir, ".installing-");
Files.createDirectories(target);
// TODO: we should wrap this in a try/catch and try deleting the target dir on failure?
try (ZipInputStream zipInput = new ZipInputStream(Files.newInputStream(zip))) {
ZipEntry entry;
byte[] buffer = new byte[8192];
while ((entry = zipInput.getNextEntry()) != null) {
Path targetFile = target.resolve(entry.getName());
// TODO: handle name being an absolute path
// be on the safe side: do not rely on that directories are always extracted
// before their children (although this makes sense, but is it guaranteed?)
Files.createDirectories(targetFile.getParent());
if (entry.isDirectory() == false) {
try (OutputStream out = Files.newOutputStream(targetFile)) {
int len;
while((len = zipInput.read(buffer)) >= 0) {
out.write(buffer, 0, len);
}
}
}
zipInput.closeEntry();
}
}
Files.delete(zip);
return target;
}
/** Load information about the extension, and verify it can be installed with no errors. */
private XPackExtensionInfo verify(Terminal terminal, Path extensionRoot, Environment env) throws Exception {
// read and validate the extension descriptor
XPackExtensionInfo info = XPackExtensionInfo.readFromProperties(extensionRoot);
terminal.println(VERBOSE, info.toString());
// check for jar hell before any copying
jarHellCheck(extensionRoot);
return info;
}
/** check a candidate extension for jar hell before installing it */
private void jarHellCheck(Path candidate) throws Exception {
// create list of current jars in classpath
// including the x-pack jars (see $ES_CLASSPATH in bin/extension script)
final List<URL> jars = new ArrayList<>();
jars.addAll(Arrays.asList(JarHell.parseClassPath()));
// add extension jars to the list
Path extensionJars[] = FileSystemUtils.files(candidate, "*.jar");
for (Path jar : extensionJars) {
jars.add(jar.toUri().toURL());
}
// TODO: no jars should be an error
// TODO: verify the classname exists in one of the jars!
// check combined (current classpath + new jars to-be-added)
JarHell.checkJarHell(jars.toArray(new URL[jars.size()]));
}
/**
* Installs the extension from {@code tmpRoot} into the extensions dir.
*/
private void install(Terminal terminal, Path tmpRoot, Environment env) throws Exception {
List<Path> deleteOnFailure = new ArrayList<>();
deleteOnFailure.add(tmpRoot);
try {
XPackExtensionInfo info = verify(terminal, tmpRoot, env);
final Path destination = resolveXPackExtensionsFile(env).resolve(info.getName());
if (Files.exists(destination)) {
throw new UserError(ExitCodes.USAGE,
"extension directory " + destination.toAbsolutePath() +
" already exists. To update the extension, uninstall it first using 'remove " +
info.getName() + "' command");
}
Files.move(tmpRoot, destination, StandardCopyOption.ATOMIC_MOVE);
terminal.println("-> Installed " + info.getName());
} catch (Exception installProblem) {
try {
IOUtils.rm(deleteOnFailure.toArray(new Path[0]));
} catch (IOException exceptionWhileRemovingFiles) {
installProblem.addSuppressed(exceptionWhileRemovingFiles);
}
throw installProblem;
}
}
}

View File

@ -0,0 +1,46 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import joptsimple.OptionSet;
import org.elasticsearch.cli.Command;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.env.Environment;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.elasticsearch.xpack.XPackPlugin.resolveXPackExtensionsFile;
import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE;
/**
* A command for the extension cli to list extensions installed in x-pack.
*/
class ListXPackExtensionCommand extends Command {
private final Environment env;
ListXPackExtensionCommand(Environment env) {
super("Lists installed x-pack extensions");
this.env = env;
}
@Override
protected void execute(Terminal terminal, OptionSet options) throws Exception {
if (Files.exists(resolveXPackExtensionsFile(env)) == false) {
throw new IOException("Extensions directory missing: " + resolveXPackExtensionsFile(env));
}
terminal.println(VERBOSE, "Extensions directory: " + resolveXPackExtensionsFile(env));
try (DirectoryStream<Path> stream = Files.newDirectoryStream(resolveXPackExtensionsFile(env))) {
for (Path extension : stream) {
terminal.println(extension.getFileName().toString());
}
}
}
}

View File

@ -0,0 +1,70 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.cli.Command;
import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.cli.UserError;
import org.elasticsearch.common.Strings;
import org.elasticsearch.env.Environment;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.xpack.XPackPlugin.resolveXPackExtensionsFile;
import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE;
/**
* A command for the extension cli to remove an extension from x-pack.
*/
class RemoveXPackExtensionCommand extends Command {
private final Environment env;
private final OptionSpec<String> arguments;
RemoveXPackExtensionCommand(Environment env) {
super("Removes an extension from x-pack");
this.env = env;
this.arguments = parser.nonOptions("extension name");
}
@Override
protected void execute(Terminal terminal, OptionSet options) throws Exception {
// TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args
List<String> args = arguments.values(options);
if (args.size() != 1) {
throw new UserError(ExitCodes.USAGE, "Must supply a single extension id argument");
}
execute(terminal, args.get(0));
}
// pkg private for testing
void execute(Terminal terminal, String extensionName) throws Exception {
terminal.println("-> Removing " + Strings.coalesceToEmpty(extensionName) + "...");
Path extensionDir = resolveXPackExtensionsFile(env).resolve(extensionName);
if (Files.exists(extensionDir) == false) {
throw new UserError(ExitCodes.USAGE,
"Extension " + extensionName + " not found. Run 'bin/xpack/extension list' to get list of installed extensions.");
}
List<Path> extensionPaths = new ArrayList<>();
terminal.println(VERBOSE, "Removing: " + extensionDir);
Path tmpExtensionDir = resolveXPackExtensionsFile(env).resolve(".removing-" + extensionName);
Files.move(extensionDir, tmpExtensionDir, StandardCopyOption.ATOMIC_MOVE);
extensionPaths.add(tmpExtensionDir);
IOUtils.rm(extensionPaths.toArray(new Path[extensionPaths.size()]));
}
}

View File

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import org.elasticsearch.shield.authc.AuthenticationModule;
/**
* An extension point allowing to plug in custom functionality in x-pack authentication module.
*/
public abstract class XPackExtension {
/**
* The name of the plugin.
*/
public abstract String name();
/**
* The description of the plugin.
*/
public abstract String description();
/**
* Implement this function to register custom extensions in the authentication module.
*/
public void onModule(AuthenticationModule module) {}
}

View File

@ -0,0 +1,33 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.varia.NullAppender;
import org.elasticsearch.cli.MultiCommand;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
/**
* A cli tool for adding, removing and listing extensions for x-pack.
*/
public class XPackExtensionCli extends MultiCommand {
public XPackExtensionCli(Environment env) {
super("A tool for managing installed x-pack extensions");
subcommands.put("list", new ListXPackExtensionCommand(env));
subcommands.put("install", new InstallXPackExtensionCommand(env));
subcommands.put("remove", new RemoveXPackExtensionCommand(env));
}
public static void main(String[] args) throws Exception {
BasicConfigurator.configure(new NullAppender());
Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, Terminal.DEFAULT);
exit(new XPackExtensionCli(env).main(args, Terminal.DEFAULT));
}
}

View File

@ -0,0 +1,124 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import org.elasticsearch.Version;
import org.elasticsearch.bootstrap.JarHell;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Properties;
public class XPackExtensionInfo {
public static final String XPACK_EXTENSION_PROPERTIES = "xpack-extension-descriptor.properties";
private String name;
private String description;
private String version;
private String classname;
public XPackExtensionInfo() {
}
/**
* Information about extensions
*
* @param name Its name
* @param description Its description
* @param version Version number
*/
XPackExtensionInfo(String name, String description, String version, String classname) {
this.name = name;
this.description = description;
this.version = version;
this.classname = classname;
}
/** reads (and validates) extension metadata descriptor file */
public static XPackExtensionInfo readFromProperties(Path dir) throws IOException {
Path descriptor = dir.resolve(XPACK_EXTENSION_PROPERTIES);
Properties props = new Properties();
try (InputStream stream = Files.newInputStream(descriptor)) {
props.load(stream);
}
String name = props.getProperty("name");
if (name == null || name.isEmpty()) {
throw new IllegalArgumentException("Property [name] is missing in [" + descriptor + "]");
}
String description = props.getProperty("description");
if (description == null) {
throw new IllegalArgumentException("Property [description] is missing for extension [" + name + "]");
}
String version = props.getProperty("version");
if (version == null) {
throw new IllegalArgumentException("Property [version] is missing for extension [" + name + "]");
}
String xpackVersionString = props.getProperty("xpack.version");
if (xpackVersionString == null) {
throw new IllegalArgumentException("Property [xpack.version] is missing for extension [" + name + "]");
}
Version xpackVersion = Version.fromString(xpackVersionString);
if (xpackVersion.equals(Version.CURRENT) == false) {
throw new IllegalArgumentException("extension [" + name + "] is incompatible with Elasticsearch [" +
Version.CURRENT.toString() + "]. Was designed for version [" + xpackVersionString + "]");
}
String javaVersionString = props.getProperty("java.version");
if (javaVersionString == null) {
throw new IllegalArgumentException("Property [java.version] is missing for extension [" + name + "]");
}
JarHell.checkVersionFormat(javaVersionString);
JarHell.checkJavaVersion(name, javaVersionString);
String classname = props.getProperty("classname");
if (classname == null) {
throw new IllegalArgumentException("Property [classname] is missing for extension [" + name + "]");
}
return new XPackExtensionInfo(name, description, version, classname);
}
/**
* @return Extension's name
*/
public String getName() {
return name;
}
/**
* @return Extension's description if any
*/
public String getDescription() {
return description;
}
/**
* @return extension's classname
*/
public String getClassname() {
return classname;
}
/**
* @return Version number for the extension
*/
public String getVersion() {
return version;
}
@Override
public String toString() {
final StringBuilder information = new StringBuilder()
.append("- XPack Extension information:\n")
.append("Name: ").append(name).append("\n")
.append("Description: ").append(description).append("\n")
.append("Version: ").append(version).append("\n")
.append(" * Classname: ").append(classname);
return information.toString();
}
}

View File

@ -0,0 +1,186 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.bootstrap.JarHell;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.authc.AuthenticationModule;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Arrays;
import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory;
/**
*
*/
public class XPackExtensionsService {
private final Settings settings;
/**
* We keep around a list of extensions
*/
private final List<Tuple<XPackExtensionInfo, XPackExtension> > extensions;
/**
* Constructs a new XPackExtensionsService
* @param settings The settings of the system
* @param extsDirectory The directory extensions exist in, or null if extensions should not be loaded from the filesystem
* @param classpathExtensions Extensions that exist in the classpath which should be loaded
*/
public XPackExtensionsService(Settings settings, Path extsDirectory, Collection<Class<? extends XPackExtension>> classpathExtensions) {
this.settings = settings;
List<Tuple<XPackExtensionInfo, XPackExtension>> extensionsLoaded = new ArrayList<>();
// first we load extensions that are on the classpath. this is for tests
for (Class<? extends XPackExtension> extClass : classpathExtensions) {
XPackExtension ext = loadExtension(extClass, settings);
XPackExtensionInfo extInfo = new XPackExtensionInfo(ext.name(), ext.description(), "NA", extClass.getName());
extensionsLoaded.add(new Tuple<>(extInfo, ext));
}
// now, find all the ones that are in plugins/xpack/extensions
if (extsDirectory != null) {
try {
List<Bundle> bundles = getExtensionBundles(extsDirectory);
List<Tuple<XPackExtensionInfo, XPackExtension>> loaded = loadBundles(bundles);
extensionsLoaded.addAll(loaded);
} catch (IOException ex) {
throw new IllegalStateException("Unable to initialize extensions", ex);
}
}
extensions = Collections.unmodifiableList(extensionsLoaded);
}
public void onModule(AuthenticationModule module) {
for (Tuple<XPackExtensionInfo, XPackExtension> tuple : extensions) {
tuple.v2().onModule(module);
}
}
// a "bundle" is a an extension in a single classloader.
static class Bundle {
XPackExtensionInfo info;
List<URL> urls = new ArrayList<>();
}
static List<Bundle> getExtensionBundles(Path extsDirectory) throws IOException {
ESLogger logger = Loggers.getLogger(XPackExtensionsService.class);
// TODO: remove this leniency, but tests bogusly rely on it
if (!isAccessibleDirectory(extsDirectory, logger)) {
return Collections.emptyList();
}
List<Bundle> bundles = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(extsDirectory)) {
for (Path extension : stream) {
if (FileSystemUtils.isHidden(extension)) {
logger.trace("--- skip hidden extension file[{}]", extension.toAbsolutePath());
continue;
}
logger.trace("--- adding extension [{}]", extension.toAbsolutePath());
final XPackExtensionInfo info;
try {
info = XPackExtensionInfo.readFromProperties(extension);
} catch (IOException e) {
throw new IllegalStateException("Could not load extension descriptor for existing extension ["
+ extension.getFileName() + "]. Was the extension built before 2.0?", e);
}
List<URL> urls = new ArrayList<>();
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(extension, "*.jar")) {
for (Path jar : jarStream) {
// normalize with toRealPath to get symlinks out of our hair
urls.add(jar.toRealPath().toUri().toURL());
}
}
final Bundle bundle = new Bundle();
bundles.add(bundle);
bundle.info = info;
bundle.urls.addAll(urls);
}
}
return bundles;
}
private List<Tuple<XPackExtensionInfo, XPackExtension> > loadBundles(List<Bundle> bundles) {
List<Tuple<XPackExtensionInfo, XPackExtension>> exts = new ArrayList<>();
for (Bundle bundle : bundles) {
// jar-hell check the bundle against the parent classloader and the x-pack classloader
// pluginmanager does it, but we do it again, in case lusers mess with jar files manually
try {
final List<URL> jars = new ArrayList<>();
// add the parent jars to the list
jars.addAll(Arrays.asList(JarHell.parseClassPath()));
// add the x-pack jars to the list
ClassLoader xpackLoader = getClass().getClassLoader();
// this class is loaded from the isolated x-pack plugin's classloader
if (xpackLoader instanceof URLClassLoader) {
jars.addAll(Arrays.asList(((URLClassLoader) xpackLoader).getURLs()));
}
jars.addAll(bundle.urls);
JarHell.checkJarHell(jars.toArray(new URL[0]));
} catch (Exception e) {
throw new IllegalStateException("failed to load bundle " + bundle.urls + " due to jar hell", e);
}
// create a child to load the extension in this bundle
ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader());
final Class<? extends XPackExtension> extClass = loadExtensionClass(bundle.info.getClassname(), loader);
final XPackExtension ext = loadExtension(extClass, settings);
exts.add(new Tuple<>(bundle.info, ext));
}
return Collections.unmodifiableList(exts);
}
private Class<? extends XPackExtension> loadExtensionClass(String className, ClassLoader loader) {
try {
return loader.loadClass(className).asSubclass(XPackExtension.class);
} catch (ClassNotFoundException e) {
throw new ElasticsearchException("Could not find extension class [" + className + "]", e);
}
}
private XPackExtension loadExtension(Class<? extends XPackExtension> extClass, Settings settings) {
try {
try {
return extClass.getConstructor(Settings.class).newInstance(settings);
} catch (NoSuchMethodException e) {
try {
return extClass.getConstructor().newInstance();
} catch (NoSuchMethodException e1) {
throw new ElasticsearchException("No constructor for [" + extClass + "]. An extension class must " +
"have either an empty default constructor or a single argument constructor accepting a " +
"Settings instance");
}
}
} catch (Throwable e) {
throw new ElasticsearchException("Failed to load extension class [" + extClass.getName() + "]", e);
}
}
}

View File

@ -0,0 +1,185 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Version;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cli.UserError;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.DirectoryStream;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.StandardCopyOption;
import java.nio.file.FileVisitResult;
import java.nio.file.NoSuchFileException;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
@LuceneTestCase.SuppressFileSystems("*")
public class InstallXPackExtensionCommandTests extends ESTestCase {
/**
* Creates a test environment with plugins and xpack extensions directories.
*/
static Environment createEnv() throws IOException {
Path home = createTempDir();
Files.createDirectories(home.resolve("org/elasticsearch/xpack/extensions").resolve("xpack").resolve("extensions"));
Settings settings = Settings.builder()
.put("path.home", home)
.build();
return new Environment(settings);
}
/**
* creates a fake jar file with empty class files
*/
static void writeJar(Path jar, String... classes) throws IOException {
try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(jar))) {
for (String clazz : classes) {
stream.putNextEntry(new ZipEntry(clazz + ".class")); // no package names, just support simple classes
}
}
}
static String writeZip(Path structure) throws IOException {
Path zip = createTempDir().resolve(structure.getFileName() + ".zip");
try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) {
Files.walkFileTree(structure, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
String target = structure.relativize(file).toString();
stream.putNextEntry(new ZipEntry(target));
Files.copy(file, stream);
return FileVisitResult.CONTINUE;
}
});
}
return zip.toUri().toURL().toString();
}
/**
* creates an extension .zip and returns the url for testing
*/
static String createExtension(String name, Path structure) throws IOException {
XPackExtensionTestUtil.writeProperties(structure,
"description", "fake desc",
"name", name,
"version", "1.0",
"xpack.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"),
"classname", "FakeExtension");
writeJar(structure.resolve("extension.jar"), "FakeExtension");
return writeZip(structure);
}
static MockTerminal installExtension(String extensionUrl, Environment env) throws Exception {
MockTerminal terminal = new MockTerminal();
new InstallXPackExtensionCommand(env).execute(terminal, extensionUrl, true);
return terminal;
}
void assertExtension(String name, Path original, Environment env) throws IOException {
Path got = env.pluginsFile().resolve("xpack").resolve("extensions").resolve(name);
assertTrue("dir " + name + " exists", Files.exists(got));
assertTrue("jar was copied", Files.exists(got.resolve("extension.jar")));
assertInstallCleaned(env);
}
void assertInstallCleaned(Environment env) throws IOException {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(env.pluginsFile().resolve("xpack").resolve("extensions"))) {
for (Path file : stream) {
if (file.getFileName().toString().startsWith(".installing")) {
fail("Installation dir still exists, " + file);
}
}
}
}
public void testSomethingWorks() throws Exception {
Environment env = createEnv();
Path extDir = createTempDir();
String extZip = createExtension("fake", extDir);
installExtension(extZip, env);
assertExtension("fake", extDir, env);
}
public void testSpaceInUrl() throws Exception {
Environment env = createEnv();
Path extDir = createTempDir();
String extZip = createExtension("fake", extDir);
Path extZipWithSpaces = createTempFile("foo bar", ".zip");
try (InputStream in = new URL(extZip).openStream()) {
Files.copy(in, extZipWithSpaces, StandardCopyOption.REPLACE_EXISTING);
}
installExtension(extZipWithSpaces.toUri().toURL().toString(), env);
assertExtension("fake", extDir, env);
}
public void testMalformedUrlNotMaven() throws Exception {
// has two colons, so it appears similar to maven coordinates
MalformedURLException e = expectThrows(MalformedURLException.class, () -> {
installExtension("://host:1234", createEnv());
});
assertTrue(e.getMessage(), e.getMessage().contains("no protocol"));
}
public void testJarHell() throws Exception {
Environment env = createEnv();
Path extDir = createTempDir();
writeJar(extDir.resolve("other.jar"), "FakeExtension");
String extZip = createExtension("fake", extDir); // adds extension.jar with FakeExtension
IllegalStateException e = expectThrows(IllegalStateException.class, () -> {
installExtension(extZip, env);
});
assertTrue(e.getMessage(), e.getMessage().contains("jar hell"));
assertInstallCleaned(env);
}
public void testIsolatedExtension() throws Exception {
Environment env = createEnv();
// these both share the same FakeExtension class
Path extDir1 = createTempDir();
String extZip1 = createExtension("fake1", extDir1);
installExtension(extZip1, env);
Path extDir2 = createTempDir();
String extZip2 = createExtension("fake2", extDir2);
installExtension(extZip2, env);
assertExtension("fake1", extDir1, env);
assertExtension("fake2", extDir2, env);
}
public void testExistingExtension() throws Exception {
Environment env = createEnv();
String extZip = createExtension("fake", createTempDir());
installExtension(extZip, env);
UserError e = expectThrows(UserError.class, () -> {
installExtension(extZip, env);
});
assertTrue(e.getMessage(), e.getMessage().contains("already exists"));
assertInstallCleaned(env);
}
public void testMissingDescriptor() throws Exception {
Environment env = createEnv();
Path extDir = createTempDir();
Files.createFile(extDir.resolve("fake.yml"));
String extZip = writeZip(extDir);
NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> {
installExtension(extZip, env);
});
assertTrue(e.getMessage(), e.getMessage().contains("xpack-extension-descriptor.properties"));
assertInstallCleaned(env);
}
}

View File

@ -0,0 +1,78 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@LuceneTestCase.SuppressFileSystems("*")
public class ListXPackExtensionCommandTests extends ESTestCase {
Environment createEnv() throws IOException {
Path home = createTempDir();
Settings settings = Settings.builder()
.put("path.home", home)
.build();
return new Environment(settings);
}
Path createExtensionDir(Environment env) throws IOException {
Path path = env.pluginsFile().resolve("xpack").resolve("extensions");
return Files.createDirectories(path);
}
static MockTerminal listExtensions(Environment env) throws Exception {
MockTerminal terminal = new MockTerminal();
String[] args = {};
int status = new ListXPackExtensionCommand(env).main(args, terminal);
assertEquals(ExitCodes.OK, status);
return terminal;
}
public void testExtensionsDirMissing() throws Exception {
Environment env = createEnv();
Path extDir = createExtensionDir(env);
Files.delete(extDir);
IOException e = expectThrows(IOException.class, () -> {
listExtensions(env);
});
assertTrue(e.getMessage(), e.getMessage().contains("Extensions directory missing"));
}
public void testNoExtensions() throws Exception {
Environment env = createEnv();
createExtensionDir(env);
MockTerminal terminal = listExtensions(env);
assertTrue(terminal.getOutput(), terminal.getOutput().isEmpty());
}
public void testOneExtension() throws Exception {
Environment env = createEnv();
Path extDir = createExtensionDir(env);
Files.createDirectory(extDir.resolve("fake"));
MockTerminal terminal = listExtensions(env);
assertTrue(terminal.getOutput(), terminal.getOutput().contains("fake"));
}
public void testTwoExtensions() throws Exception {
Environment env = createEnv();
Path extDir = createExtensionDir(env);
Files.createDirectory(extDir.resolve("fake1"));
Files.createDirectory(extDir.resolve("fake2"));
MockTerminal terminal = listExtensions(env);
String output = terminal.getOutput();
assertTrue(output, output.contains("fake1"));
assertTrue(output, output.contains("fake2"));
}
}

View File

@ -0,0 +1,75 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cli.UserError;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
@LuceneTestCase.SuppressFileSystems("*")
public class RemoveXPackExtensionCommandTests extends ESTestCase {
/** Creates a test environment with bin, config and plugins directories. */
static Environment createEnv() throws IOException {
Path home = createTempDir();
Settings settings = Settings.builder()
.put("path.home", home)
.build();
return new Environment(settings);
}
Path createExtensionDir(Environment env) throws IOException {
Path path = env.pluginsFile().resolve("xpack").resolve("extensions");
return Files.createDirectories(path);
}
static MockTerminal removeExtension(String name, Environment env) throws Exception {
MockTerminal terminal = new MockTerminal();
new RemoveXPackExtensionCommand(env).execute(terminal, name);
return terminal;
}
static void assertRemoveCleaned(Path extDir) throws IOException {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(extDir)) {
for (Path file : stream) {
if (file.getFileName().toString().startsWith(".removing")) {
fail("Removal dir still exists, " + file);
}
}
}
}
public void testMissing() throws Exception {
Environment env = createEnv();
Path extDir = createExtensionDir(env);
UserError e = expectThrows(UserError.class, () -> {
removeExtension("dne", env);
});
assertTrue(e.getMessage(), e.getMessage().contains("Extension dne not found"));
assertRemoveCleaned(extDir);
}
public void testBasic() throws Exception {
Environment env = createEnv();
Path extDir = createExtensionDir(env);
Files.createDirectory(extDir.resolve("fake"));
Files.createFile(extDir.resolve("fake").resolve("extension.jar"));
Files.createDirectory(extDir.resolve("fake").resolve("subdir"));
Files.createDirectory(extDir.resolve("other"));
removeExtension("fake", env);
assertFalse(Files.exists(extDir.resolve("fake")));
assertTrue(Files.exists(extDir.resolve("other")));
assertRemoveCleaned(extDir);
}
}

View File

@ -0,0 +1,161 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import org.elasticsearch.Version;
import org.elasticsearch.test.ESTestCase;
import java.nio.file.Path;
public class XPackExtensionInfoTests extends ESTestCase {
public void testReadFromProperties() throws Exception {
Path extensionDir = createTempDir().resolve("fake-extension");
XPackExtensionTestUtil.writeProperties(extensionDir,
"description", "fake desc",
"name", "my_extension",
"version", "1.0",
"xpack.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"),
"classname", "FakeExtension");
XPackExtensionInfo info = XPackExtensionInfo.readFromProperties(extensionDir);
assertEquals("my_extension", info.getName());
assertEquals("fake desc", info.getDescription());
assertEquals("1.0", info.getVersion());
assertEquals("FakeExtension", info.getClassname());
}
public void testReadFromPropertiesNameMissing() throws Exception {
Path extensionDir = createTempDir().resolve("fake-extension");
XPackExtensionTestUtil.writeProperties(extensionDir);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage().contains("Property [name] is missing in"));
XPackExtensionTestUtil.writeProperties(extensionDir, "name", "");
IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e1.getMessage().contains("Property [name] is missing in"));
}
public void testReadFromPropertiesDescriptionMissing() throws Exception {
Path extensionDir = createTempDir().resolve("fake-extension");
XPackExtensionTestUtil.writeProperties(extensionDir, "name", "fake-extension");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage().contains("[description] is missing"));
}
public void testReadFromPropertiesVersionMissing() throws Exception {
Path extensionDir = createTempDir().resolve("fake-extension");
XPackExtensionTestUtil.writeProperties(extensionDir, "description", "fake desc", "name", "fake-extension");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage().contains("[version] is missing"));
}
public void testReadFromPropertiesElasticsearchVersionMissing() throws Exception {
Path extensionDir = createTempDir().resolve("fake-extension");
XPackExtensionTestUtil.writeProperties(extensionDir,
"description", "fake desc",
"name", "my_extension",
"version", "1.0");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage().contains("[xpack.version] is missing"));
}
public void testReadFromPropertiesJavaVersionMissing() throws Exception {
Path extensionDir = createTempDir().resolve("fake-extension");
XPackExtensionTestUtil.writeProperties(extensionDir,
"description", "fake desc",
"name", "my_extension",
"xpack.version", Version.CURRENT.toString(),
"version", "1.0");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage().contains("[java.version] is missing"));
}
public void testReadFromPropertiesJavaVersionIncompatible() throws Exception {
String extensionName = "fake-extension";
Path extensionDir = createTempDir().resolve(extensionName);
XPackExtensionTestUtil.writeProperties(extensionDir,
"description", "fake desc",
"name", extensionName,
"xpack.version", Version.CURRENT.toString(),
"java.version", "1000000.0",
"classname", "FakeExtension",
"version", "1.0");
IllegalStateException e = expectThrows(IllegalStateException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage(), e.getMessage().contains(extensionName + " requires Java"));
}
public void testReadFromPropertiesBadJavaVersionFormat() throws Exception {
String extensionName = "fake-extension";
Path extensionDir = createTempDir().resolve(extensionName);
XPackExtensionTestUtil.writeProperties(extensionDir,
"description", "fake desc",
"name", extensionName,
"xpack.version", Version.CURRENT.toString(),
"java.version", "1.7.0_80",
"classname", "FakeExtension",
"version", "1.0");
IllegalStateException e = expectThrows(IllegalStateException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage(),
e.getMessage().equals("version string must be a sequence of nonnegative decimal " +
"integers separated by \".\"'s and may have leading zeros but was 1.7.0_80"));
}
public void testReadFromPropertiesBogusElasticsearchVersion() throws Exception {
Path extensionDir = createTempDir().resolve("fake-extension");
XPackExtensionTestUtil.writeProperties(extensionDir,
"description", "fake desc",
"version", "1.0",
"name", "my_extension",
"xpack.version", "bogus");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage().contains("version needs to contain major, minor, and revision"));
}
public void testReadFromPropertiesOldElasticsearchVersion() throws Exception {
Path extensionDir = createTempDir().resolve("fake-extension");
XPackExtensionTestUtil.writeProperties(extensionDir,
"description", "fake desc",
"name", "my_extension",
"version", "1.0",
"xpack.version", Version.V_2_0_0.toString());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage().contains("Was designed for version [2.0.0]"));
}
public void testReadFromPropertiesJvmMissingClassname() throws Exception {
Path extensionDir = createTempDir().resolve("fake-extension");
XPackExtensionTestUtil.writeProperties(extensionDir,
"description", "fake desc",
"name", "my_extension",
"version", "1.0",
"xpack.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
XPackExtensionInfo.readFromProperties(extensionDir);
});
assertTrue(e.getMessage().contains("Property [classname] is missing"));
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Properties;
/** Utility methods for testing extensions */
public class XPackExtensionTestUtil {
/** convenience method to write a plugin properties file */
public static void writeProperties(Path pluginDir, String... stringProps) throws IOException {
assert stringProps.length % 2 == 0;
Files.createDirectories(pluginDir);
Path propertiesFile = pluginDir.resolve(XPackExtensionInfo.XPACK_EXTENSION_PROPERTIES);
Properties properties = new Properties();
for (int i = 0; i < stringProps.length; i += 2) {
properties.put(stringProps[i], stringProps[i + 1]);
}
try (OutputStream out = Files.newOutputStream(propertiesFile)) {
properties.store(out, "");
}
}
}

View File

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.extensions;
import org.elasticsearch.test.ESTestCase;
import java.nio.file.Files;
import java.nio.file.Path;
public class XPackExtensionsServiceTests extends ESTestCase {
public void testExistingPluginMissingDescriptor() throws Exception {
Path extensionsDir = createTempDir();
Files.createDirectory(extensionsDir.resolve("extension-missing-descriptor"));
IllegalStateException e = expectThrows(IllegalStateException.class, () -> {
XPackExtensionsService.getExtensionBundles(extensionsDir);
});
assertTrue(e.getMessage(),
e.getMessage().contains("Could not load extension descriptor for existing extension"));
}
}