Merge branch 'master' into reporting/rich-completion-notifications
Original commit: elastic/x-pack-elasticsearch@c2b4626b7e
This commit is contained in:
commit
099d82cbb9
|
@ -1,9 +1,13 @@
|
|||
apply plugin: 'elasticsearch.build'
|
||||
|
||||
dependencies {
|
||||
compile project(':x-plugins:elasticsearch:license:base')
|
||||
compile project(':x-plugins:elasticsearch:x-pack')
|
||||
compile "org.elasticsearch:elasticsearch:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
}
|
||||
|
||||
project.forbiddenPatterns {
|
||||
exclude '**/*.key'
|
||||
}
|
||||
|
||||
dependencyLicenses.enabled = false
|
|
@ -7,13 +7,12 @@ package org.elasticsearch.license.licensor;
|
|||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.CryptUtils;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.CryptUtils;
|
||||
import org.elasticsearch.license.License;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
|
@ -20,8 +20,8 @@ import java.security.KeyPair;
|
|||
import java.security.KeyPairGenerator;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
import static org.elasticsearch.license.core.CryptUtils.writeEncryptedPrivateKey;
|
||||
import static org.elasticsearch.license.core.CryptUtils.writeEncryptedPublicKey;
|
||||
import static org.elasticsearch.license.CryptUtils.writeEncryptedPrivateKey;
|
||||
import static org.elasticsearch.license.CryptUtils.writeEncryptedPublicKey;
|
||||
|
||||
public class KeyPairGeneratorTool extends Command {
|
||||
|
|
@ -20,7 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.licensor.LicenseSigner;
|
||||
|
||||
public class LicenseGeneratorTool extends Command {
|
|
@ -20,8 +20,8 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.core.LicenseVerifier;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.LicenseVerifier;
|
||||
|
||||
public class LicenseVerificationTool extends Command {
|
||||
|
|
@ -6,9 +6,9 @@
|
|||
package org.elasticsearch.license.licensor;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.DateUtils;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.core.LicenseVerifier;
|
||||
import org.elasticsearch.license.DateUtils;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.LicenseVerifier;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
|
@ -13,8 +13,8 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.DateUtils;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.DateUtils;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.cli.Command;
|
|||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
import org.junit.Before;
|
||||
|
|
@ -14,7 +14,7 @@ import org.elasticsearch.cli.CommandTestCase;
|
|||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
import org.junit.Before;
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
elasticsearch-license
|
||||
=====================
|
||||
|
||||
Elasticsearch Licensing core, tools and plugin
|
||||
|
||||
## Core
|
||||
|
||||
Contains core data structures, utilities used by **Licensor** and **Plugin**.
|
||||
|
||||
See `core/` and `core-shaded/`
|
||||
|
||||
## Licensor
|
||||
|
||||
Contains a collection of tools to generate key-pairs, licenses and validate licenses.
|
||||
|
||||
See `licensor/`
|
||||
|
||||
see [wiki] (https://github.com/elasticsearch/elasticsearch-license/wiki) for documentation on
|
||||
[Licensing Tools Usage & Reference] (https://github.com/elasticsearch/elasticsearch-license/wiki/License-Tools-Usage-&-Reference)
|
||||
|
||||
## Plugin
|
||||
|
||||
**NOTE**: The license plugin has to be packaged with the right public key when being deployed to public repositories in maven
|
||||
or uploaded to s3. Use `-Dkeys.path=<PATH_TO_KEY_DIR>` with maven command to package the plugin with a specified key.
|
||||
|
||||
See `plugin/`
|
||||
|
||||
see [Getting Started] (https://github.com/elasticsearch/elasticsearch-license/blob/master/docs/getting-started.asciidoc) to install license plugin.
|
||||
|
||||
see [Licensing REST APIs] (https://github.com/elasticsearch/elasticsearch-license/blob/master/docs/license.asciidoc)
|
||||
to use the license plugin from an elasticsearch deployment.
|
||||
|
||||
see [wiki] (https://github.com/elasticsearch/elasticsearch-license/wiki) for documentation on
|
||||
- [License Plugin Consumer Interface] (https://github.com/elasticsearch/elasticsearch-license/wiki/License---Consumer-Interface)
|
||||
- [License Plugin Release Process] (https://github.com/elasticsearch/elasticsearch-license/wiki/Plugin-Release-Process)
|
||||
- [License Plugin Design] (https://github.com/elasticsearch/elasticsearch-license/wiki/License-Plugin--Design)
|
|
@ -1,20 +0,0 @@
|
|||
apply plugin: 'elasticsearch.build'
|
||||
|
||||
dependencies {
|
||||
compile "org.elasticsearch:elasticsearch:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
}
|
||||
|
||||
compactProfile = 'full'
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
|
||||
jar {
|
||||
baseName = 'license-core'
|
||||
}
|
||||
|
||||
modifyPom {
|
||||
project {
|
||||
artifactId 'license-core'
|
||||
}
|
||||
}
|
|
@ -1,201 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.ESTestCase.randomFrom;
|
||||
import static org.hamcrest.core.IsEqual.equalTo;
|
||||
|
||||
public class TestUtils {
|
||||
|
||||
private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private static final DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer();
|
||||
|
||||
public static String dateMathString(String time, final long now) {
|
||||
return dateTimeFormatter.print(dateMathParser.parse(time, new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return now;
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
public static long dateMath(String time, final long now) {
|
||||
return dateMathParser.parse(time, new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return now;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static LicenseSpec generateRandomLicenseSpec(int version) {
|
||||
boolean datesInMillis = randomBoolean();
|
||||
long now = System.currentTimeMillis();
|
||||
String uid = UUID.randomUUID().toString();
|
||||
String feature = "feature__" + randomInt();
|
||||
String issuer = "issuer__" + randomInt();
|
||||
String issuedTo = "issuedTo__" + randomInt();
|
||||
final String type;
|
||||
final String subscriptionType;
|
||||
if (version < License.VERSION_NO_FEATURE_TYPE) {
|
||||
subscriptionType = randomFrom("gold", "silver", "platinum");
|
||||
type = "subscription";//randomFrom("subscription", "internal", "development");
|
||||
} else {
|
||||
subscriptionType = null;
|
||||
type = randomFrom("basic", "dev", "gold", "silver", "platinum");
|
||||
}
|
||||
int maxNodes = randomIntBetween(5, 100);
|
||||
if (datesInMillis) {
|
||||
long issueDateInMillis = dateMath("now", now);
|
||||
long expiryDateInMillis = dateMath("now+10d/d", now);
|
||||
return new LicenseSpec(version, uid, feature, issueDateInMillis, expiryDateInMillis, type, subscriptionType, issuedTo, issuer,
|
||||
maxNodes);
|
||||
} else {
|
||||
String issueDate = dateMathString("now", now);
|
||||
String expiryDate = dateMathString("now+10d/d", now);
|
||||
return new LicenseSpec(version, uid, feature, issueDate, expiryDate, type, subscriptionType, issuedTo, issuer, maxNodes);
|
||||
}
|
||||
}
|
||||
|
||||
public static String generateLicenseSpecString(LicenseSpec licenseSpec) throws IOException {
|
||||
XContentBuilder licenses = jsonBuilder();
|
||||
licenses.startObject();
|
||||
licenses.startArray("licenses");
|
||||
licenses.startObject()
|
||||
.field("uid", licenseSpec.uid)
|
||||
.field("type", licenseSpec.type)
|
||||
.field("subscription_type", licenseSpec.subscriptionType)
|
||||
.field("issued_to", licenseSpec.issuedTo)
|
||||
.field("issuer", licenseSpec.issuer)
|
||||
.field("feature", licenseSpec.feature)
|
||||
.field("max_nodes", licenseSpec.maxNodes);
|
||||
|
||||
if (licenseSpec.issueDate != null) {
|
||||
licenses.field("issue_date", licenseSpec.issueDate);
|
||||
} else {
|
||||
licenses.field("issue_date_in_millis", licenseSpec.issueDateInMillis);
|
||||
}
|
||||
if (licenseSpec.expiryDate != null) {
|
||||
licenses.field("expiry_date", licenseSpec.expiryDate);
|
||||
} else {
|
||||
licenses.field("expiry_date_in_millis", licenseSpec.expiryDateInMillis);
|
||||
}
|
||||
licenses.field("version", licenseSpec.version);
|
||||
licenses.endObject();
|
||||
licenses.endArray();
|
||||
licenses.endObject();
|
||||
return licenses.string();
|
||||
}
|
||||
|
||||
public static License generateLicenses(LicenseSpec spec) {
|
||||
License.Builder builder = License.builder()
|
||||
.uid(spec.uid)
|
||||
.feature(spec.feature)
|
||||
.type(spec.type)
|
||||
.subscriptionType(spec.subscriptionType)
|
||||
.issuedTo(spec.issuedTo)
|
||||
.issuer(spec.issuer)
|
||||
.maxNodes(spec.maxNodes);
|
||||
|
||||
if (spec.expiryDate != null) {
|
||||
builder.expiryDate(DateUtils.endOfTheDay(spec.expiryDate));
|
||||
} else {
|
||||
builder.expiryDate(spec.expiryDateInMillis);
|
||||
}
|
||||
if (spec.issueDate != null) {
|
||||
builder.issueDate(DateUtils.beginningOfTheDay(spec.issueDate));
|
||||
} else {
|
||||
builder.issueDate(spec.issueDateInMillis);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static void assertLicenseSpec(LicenseSpec spec, License license) {
|
||||
MatcherAssert.assertThat(license.uid(), equalTo(spec.uid));
|
||||
MatcherAssert.assertThat(license.issuedTo(), equalTo(spec.issuedTo));
|
||||
MatcherAssert.assertThat(license.issuer(), equalTo(spec.issuer));
|
||||
MatcherAssert.assertThat(license.type(), equalTo(spec.type));
|
||||
MatcherAssert.assertThat(license.maxNodes(), equalTo(spec.maxNodes));
|
||||
if (spec.issueDate != null) {
|
||||
MatcherAssert.assertThat(license.issueDate(), equalTo(DateUtils.beginningOfTheDay(spec.issueDate)));
|
||||
} else {
|
||||
MatcherAssert.assertThat(license.issueDate(), equalTo(spec.issueDateInMillis));
|
||||
}
|
||||
if (spec.expiryDate != null) {
|
||||
MatcherAssert.assertThat(license.expiryDate(), equalTo(DateUtils.endOfTheDay(spec.expiryDate)));
|
||||
} else {
|
||||
MatcherAssert.assertThat(license.expiryDate(), equalTo(spec.expiryDateInMillis));
|
||||
}
|
||||
}
|
||||
|
||||
public static class LicenseSpec {
|
||||
public final int version;
|
||||
public final String feature;
|
||||
public final String issueDate;
|
||||
public final long issueDateInMillis;
|
||||
public final String expiryDate;
|
||||
public final long expiryDateInMillis;
|
||||
public final String uid;
|
||||
public final String type;
|
||||
public final String subscriptionType;
|
||||
public final String issuedTo;
|
||||
public final String issuer;
|
||||
public final int maxNodes;
|
||||
|
||||
public LicenseSpec(String issueDate, String expiryDate) {
|
||||
this(License.VERSION_CURRENT, UUID.randomUUID().toString(), "feature", issueDate, expiryDate, "trial", "none", "customer",
|
||||
"elasticsearch", 5);
|
||||
}
|
||||
|
||||
public LicenseSpec(int version, String uid, String feature, long issueDateInMillis, long expiryDateInMillis, String type,
|
||||
String subscriptionType, String issuedTo, String issuer, int maxNodes) {
|
||||
this.version = version;
|
||||
this.feature = feature;
|
||||
this.issueDateInMillis = issueDateInMillis;
|
||||
this.issueDate = null;
|
||||
this.expiryDateInMillis = expiryDateInMillis;
|
||||
this.expiryDate = null;
|
||||
this.uid = uid;
|
||||
this.type = type;
|
||||
this.subscriptionType = subscriptionType;
|
||||
this.issuedTo = issuedTo;
|
||||
this.issuer = issuer;
|
||||
this.maxNodes = maxNodes;
|
||||
}
|
||||
|
||||
public LicenseSpec(int version, String uid, String feature, String issueDate, String expiryDate, String type,
|
||||
String subscriptionType, String issuedTo, String issuer, int maxNodes) {
|
||||
this.version = version;
|
||||
this.feature = feature;
|
||||
this.issueDate = issueDate;
|
||||
this.issueDateInMillis = -1;
|
||||
this.expiryDate = expiryDate;
|
||||
this.expiryDateInMillis = -1;
|
||||
this.uid = uid;
|
||||
this.type = type;
|
||||
this.subscriptionType = subscriptionType;
|
||||
this.issuedTo = issuedTo;
|
||||
this.issuer = issuer;
|
||||
this.maxNodes = maxNodes;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
subprojects {
|
||||
project.afterEvaluate {
|
||||
project.forbiddenPatterns {
|
||||
exclude '**/*.key'
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
es.logger.level=INFO
|
||||
log4j.rootLogger=${es.logger.level}, out
|
||||
|
||||
log4j.logger.org.apache.http=INFO, out
|
||||
log4j.additivity.org.apache.http=false
|
||||
|
||||
log4j.logger.org.elasticsearch.license=TRACE
|
||||
|
||||
log4j.appender.out=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.out.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n
|
Binary file not shown.
|
@ -13,6 +13,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
@ -113,6 +114,7 @@ public class IndexAuditIT extends ESIntegTestCase {
|
|||
protected Settings externalClusterClientSettings() {
|
||||
return Settings.builder()
|
||||
.put(Security.USER_SETTING.getKey(), USER + ":" + PASS)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom("security3", "security4"))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
|
||||
/*
|
||||
* Messy tests that depend on mustache directly. Fix these!
|
||||
*/
|
||||
|
||||
apply plugin: 'elasticsearch.messy-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: ':x-plugins:elasticsearch:x-pack', configuration: 'testArtifacts')
|
||||
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
|
||||
}
|
|
@ -1,393 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.mustache.MustachePlugin;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.xpack.common.text.TextTemplate;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
|
||||
import org.elasticsearch.xpack.watcher.actions.ExecutableActions;
|
||||
import org.elasticsearch.xpack.watcher.condition.always.ExecutableAlwaysCondition;
|
||||
import org.elasticsearch.xpack.watcher.execution.TriggeredExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.input.Input;
|
||||
import org.elasticsearch.xpack.watcher.input.search.ExecutableSearchInput;
|
||||
import org.elasticsearch.xpack.watcher.input.search.SearchInput;
|
||||
import org.elasticsearch.xpack.watcher.input.search.SearchInputFactory;
|
||||
import org.elasticsearch.xpack.watcher.input.simple.ExecutableSimpleInput;
|
||||
import org.elasticsearch.xpack.watcher.input.simple.SimpleInput;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherScript;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest;
|
||||
import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateService;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStatus;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.chrono.ISOChronology;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.getRandomSupportedSearchType;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, randomDynamicTemplates = false, supportsDedicatedMasters = false,
|
||||
numDataNodes = 1)
|
||||
public class SearchInputIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
Collection<Class<? extends Plugin>> types = new ArrayList<>();
|
||||
types.addAll(super.nodePlugins());
|
||||
types.add(MustachePlugin.class);
|
||||
types.add(CustomScriptContextPlugin.class);
|
||||
return types;
|
||||
}
|
||||
|
||||
private static final String TEMPLATE_QUERY = "{\"query\":{\"bool\":{\"must\":{\"match\":{\"event_type\":{\"query\":\"a\"," +
|
||||
"\"type\":\"boolean\"}}},\"filter\":{\"range\":{\"_timestamp\":" +
|
||||
"{\"from\":\"{{ctx.trigger.scheduled_time}}||-{{seconds_param}}\",\"to\":\"{{ctx.trigger.scheduled_time}}\"," +
|
||||
"\"include_lower\":true,\"include_upper\":true}}}}}}";
|
||||
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
final Path tempDir = createTempDir();
|
||||
final Path configPath = tempDir.resolve("config");
|
||||
final Path scriptPath = configPath.resolve("scripts");
|
||||
try {
|
||||
Files.createDirectories(scriptPath);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("failed to create config dir");
|
||||
|
||||
}
|
||||
try (InputStream stream = SearchInputIT.class.getResourceAsStream("/org/elasticsearch/xpack/watcher/input/search/config/scripts" +
|
||||
"/test_disk_template.mustache");
|
||||
OutputStream out = Files.newOutputStream(scriptPath.resolve("test_disk_template.mustache"))) {
|
||||
Streams.copy(stream, out);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("failed to copy mustache template");
|
||||
}
|
||||
|
||||
|
||||
//Set path so ScriptService will pick up the test scripts
|
||||
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
|
||||
.put("path.conf", configPath).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings transportClientSettings() {
|
||||
return Settings.builder()
|
||||
.put(super.transportClientSettings())
|
||||
.build();
|
||||
}
|
||||
|
||||
public void testExecute() throws Exception {
|
||||
SearchSourceBuilder searchSourceBuilder = searchSource().query(
|
||||
boolQuery().must(matchQuery("event_type", "a")).must(rangeQuery("_timestamp")
|
||||
.from("{{ctx.trigger.scheduled_time}}||-30s").to("{{ctx.trigger.triggered_time}}")));
|
||||
SearchRequest searchRequest = client()
|
||||
.prepareSearch()
|
||||
.setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.request()
|
||||
.source(searchSourceBuilder);
|
||||
|
||||
WatcherSearchTemplateRequest request = new WatcherSearchTemplateRequest(searchRequest);
|
||||
ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), logger,
|
||||
WatcherClientProxy.of(client()), watcherSearchTemplateService(), null);
|
||||
WatchExecutionContext ctx = new TriggeredExecutionContext(
|
||||
new Watch("test-watch",
|
||||
new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))),
|
||||
new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger),
|
||||
new ExecutableAlwaysCondition(logger),
|
||||
null,
|
||||
null,
|
||||
new ExecutableActions(new ArrayList<ActionWrapper>()),
|
||||
null,
|
||||
new WatchStatus(new DateTime(0, UTC), emptyMap())),
|
||||
new DateTime(0, UTC),
|
||||
new ScheduleTriggerEvent("test-watch", new DateTime(0, UTC), new DateTime(0, UTC)),
|
||||
timeValueSeconds(5));
|
||||
SearchInput.Result result = searchInput.execute(ctx, new Payload.Simple());
|
||||
|
||||
assertThat(XContentMapValues.extractValue("hits.total", result.payload().data()), equalTo(0));
|
||||
assertNotNull(result.executedRequest());
|
||||
assertThat(result.status(), is(Input.Result.Status.SUCCESS));
|
||||
assertEquals(result.executedRequest().searchType(), request.getRequest().searchType());
|
||||
assertArrayEquals(result.executedRequest().indices(), request.getRequest().indices());
|
||||
assertEquals(result.executedRequest().indicesOptions(), request.getRequest().indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(result);
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.from"), equalTo("1970-01-01T00:00:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.to"), equalTo("1970-01-01T00:00:00.000Z"));
|
||||
}
|
||||
|
||||
public void testSearchInlineTemplate() throws Exception {
|
||||
WatchExecutionContext ctx = createContext();
|
||||
|
||||
Map<String, Object> triggerParams = new HashMap<String, Object>();
|
||||
triggerParams.put("triggered_time", new DateTime(1970, 01, 01, 00, 01, 00, 000, ISOChronology.getInstanceUTC()));
|
||||
triggerParams.put("scheduled_time", new DateTime(1970, 01, 01, 00, 01, 00, 000, ISOChronology.getInstanceUTC()));
|
||||
Map<String, Object> ctxParams = new HashMap<String, Object>();
|
||||
ctxParams.put("id", ctx.id().value());
|
||||
ctxParams.put("metadata", null);
|
||||
ctxParams.put("vars", new HashMap<String, Object>());
|
||||
ctxParams.put("watch_id", "test-watch");
|
||||
ctxParams.put("trigger", triggerParams);
|
||||
ctxParams.put("payload", new Payload.Simple().data());
|
||||
ctxParams.put("execution_time", new DateTime(1970, 01, 01, 00, 01, 00, 000, ISOChronology.getInstanceUTC()));
|
||||
Map<String, Object> expectedParams = new HashMap<String, Object>();
|
||||
expectedParams.put("seconds_param", "30s");
|
||||
expectedParams.put("ctx", ctxParams);
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("seconds_param", "30s");
|
||||
|
||||
WatcherScript template = WatcherScript.inline(TEMPLATE_QUERY).lang("mustache").params(params).build();
|
||||
|
||||
SearchRequest request = client().prepareSearch()
|
||||
.setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.setIndices("test-search-index").request();
|
||||
|
||||
SearchInput.Result executedResult = executeSearchInput(request, template, ctx);
|
||||
|
||||
assertNotNull(executedResult.executedRequest());
|
||||
assertThat(executedResult.status(), is(Input.Result.Status.SUCCESS));
|
||||
if (getNumShards("test-search-index").numPrimaries > 1) {
|
||||
assertEquals(executedResult.executedRequest().searchType(), request.searchType());
|
||||
}
|
||||
assertArrayEquals(executedResult.executedRequest().indices(), request.indices());
|
||||
assertEquals(executedResult.executedRequest().indicesOptions(), request.indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(executedResult);
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.from"), equalTo("1970-01-01T00:01:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.to"), equalTo("1970-01-01T00:01:00.000Z"));
|
||||
}
|
||||
|
||||
public void testSearchIndexedTemplate() throws Exception {
|
||||
WatchExecutionContext ctx = createContext();
|
||||
|
||||
PutStoredScriptRequest indexedScriptRequest = client().admin().cluster().preparePutStoredScript()
|
||||
.setId("test-template")
|
||||
.setScriptLang("mustache")
|
||||
.setSource(new BytesArray(TEMPLATE_QUERY))
|
||||
.request();
|
||||
assertThat(client().admin().cluster().putStoredScript(indexedScriptRequest).actionGet().isAcknowledged(), is(true));
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("seconds_param", "30s");
|
||||
|
||||
WatcherScript template = WatcherScript.indexed("test-template").lang("mustache").params(params).build();
|
||||
|
||||
jsonBuilder().value(TextTemplate.indexed("test-template").params(params).build()).bytes();
|
||||
SearchRequest request = client().prepareSearch().setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.setIndices("test-search-index").request();
|
||||
|
||||
SearchInput.Result executedResult = executeSearchInput(request, template, ctx);
|
||||
|
||||
assertNotNull(executedResult.executedRequest());
|
||||
assertThat(executedResult.status(), is(Input.Result.Status.SUCCESS));
|
||||
if (getNumShards("test-search-index").numPrimaries > 1) {
|
||||
assertEquals(executedResult.executedRequest().searchType(), request.searchType());
|
||||
}
|
||||
assertArrayEquals(executedResult.executedRequest().indices(), request.indices());
|
||||
assertEquals(executedResult.executedRequest().indicesOptions(), request.indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(executedResult);
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.from"), equalTo("1970-01-01T00:01:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.to"), equalTo("1970-01-01T00:01:00.000Z"));
|
||||
|
||||
}
|
||||
|
||||
public void testSearchOnDiskTemplate() throws Exception {
|
||||
WatchExecutionContext ctx = createContext();
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("seconds_param", "30s");
|
||||
|
||||
WatcherScript template = WatcherScript.file("test_disk_template").lang("mustache").params(params).build();
|
||||
SearchRequest request = client().prepareSearch().setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.setIndices("test-search-index").request();
|
||||
|
||||
SearchInput.Result executedResult = executeSearchInput(request, template, ctx);
|
||||
|
||||
assertNotNull(executedResult.executedRequest());
|
||||
assertThat(executedResult.status(), is(Input.Result.Status.SUCCESS));
|
||||
assertArrayEquals(executedResult.executedRequest().indices(), request.indices());
|
||||
assertEquals(executedResult.executedRequest().indicesOptions(), request.indicesOptions());
|
||||
}
|
||||
|
||||
public void testDifferentSearchType() throws Exception {
|
||||
SearchSourceBuilder searchSourceBuilder = searchSource().query(
|
||||
boolQuery().must(matchQuery("event_type", "a")).must(rangeQuery("_timestamp")
|
||||
.from("{{ctx.trigger.scheduled_time}}||-30s").to("{{ctx.trigger.triggered_time}}"))
|
||||
);
|
||||
SearchType searchType = getRandomSupportedSearchType();
|
||||
|
||||
SearchRequest searchRequest = client()
|
||||
.prepareSearch()
|
||||
.setSearchType(searchType)
|
||||
.request()
|
||||
.source(searchSourceBuilder);
|
||||
|
||||
WatcherSearchTemplateRequest request = new WatcherSearchTemplateRequest(searchRequest);
|
||||
|
||||
ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), logger,
|
||||
WatcherClientProxy.of(client()), watcherSearchTemplateService(), null);
|
||||
WatchExecutionContext ctx = new TriggeredExecutionContext(
|
||||
new Watch("test-watch",
|
||||
new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))),
|
||||
new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger),
|
||||
new ExecutableAlwaysCondition(logger),
|
||||
null,
|
||||
null,
|
||||
new ExecutableActions(new ArrayList<ActionWrapper>()),
|
||||
null,
|
||||
new WatchStatus(new DateTime(0, UTC), emptyMap())),
|
||||
new DateTime(0, UTC),
|
||||
new ScheduleTriggerEvent("test-watch", new DateTime(0, UTC), new DateTime(0, UTC)),
|
||||
timeValueSeconds(5));
|
||||
SearchInput.Result result = searchInput.execute(ctx, new Payload.Simple());
|
||||
|
||||
assertThat(XContentMapValues.extractValue("hits.total", result.payload().data()), equalTo(0));
|
||||
assertNotNull(result.executedRequest());
|
||||
assertThat(result.status(), is(Input.Result.Status.SUCCESS));
|
||||
assertEquals(result.executedRequest().searchType(), searchType);
|
||||
assertArrayEquals(result.executedRequest().indices(), searchRequest.indices());
|
||||
assertEquals(result.executedRequest().indicesOptions(), searchRequest.indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(result);
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.from"), equalTo("1970-01-01T00:00:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.to"), equalTo("1970-01-01T00:00:00.000Z"));
|
||||
}
|
||||
|
||||
public void testParserValid() throws Exception {
|
||||
SearchRequest searchRequest = client().prepareSearch()
|
||||
.setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.request()
|
||||
.source(searchSource()
|
||||
.query(boolQuery().must(matchQuery("event_type", "a")).must(rangeQuery("_timestamp")
|
||||
.from("{{ctx.trigger.scheduled_time}}||-30s").to("{{ctx.trigger.triggered_time}}"))));
|
||||
|
||||
TimeValue timeout = randomBoolean() ? TimeValue.timeValueSeconds(randomInt(10)) : null;
|
||||
XContentBuilder builder = jsonBuilder().value(
|
||||
new SearchInput(new WatcherSearchTemplateRequest(searchRequest), null, timeout, null));
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
IndicesQueriesRegistry indicesQueryRegistry = internalCluster().getInstance(IndicesQueriesRegistry.class);
|
||||
SearchInputFactory factory = new SearchInputFactory(Settings.EMPTY, WatcherClientProxy.of(client()), indicesQueryRegistry,
|
||||
null, null, scriptService());
|
||||
|
||||
SearchInput searchInput = factory.parseInput("_id", parser);
|
||||
assertEquals(SearchInput.TYPE, searchInput.type());
|
||||
assertThat(searchInput.getTimeout(), equalTo(timeout));
|
||||
}
|
||||
|
||||
private WatchExecutionContext createContext() {
|
||||
return new TriggeredExecutionContext(
|
||||
new Watch("test-watch",
|
||||
new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))),
|
||||
new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger),
|
||||
new ExecutableAlwaysCondition(logger),
|
||||
null,
|
||||
null,
|
||||
new ExecutableActions(new ArrayList<ActionWrapper>()),
|
||||
null,
|
||||
new WatchStatus(new DateTime(50000, UTC), emptyMap())),
|
||||
new DateTime(60000, UTC),
|
||||
new ScheduleTriggerEvent("test-watch", new DateTime(60000, UTC), new DateTime(60000, UTC)),
|
||||
timeValueSeconds(5));
|
||||
}
|
||||
|
||||
private SearchInput.Result executeSearchInput(SearchRequest request, WatcherScript template,
|
||||
WatchExecutionContext ctx) throws IOException {
|
||||
createIndex("test-search-index");
|
||||
ensureGreen("test-search-index");
|
||||
SearchInput.Builder siBuilder = SearchInput.builder(new WatcherSearchTemplateRequest(request, template));
|
||||
|
||||
SearchInput si = siBuilder.build();
|
||||
|
||||
ExecutableSearchInput searchInput = new ExecutableSearchInput(si, logger, WatcherClientProxy.of(client()),
|
||||
watcherSearchTemplateService(), null);
|
||||
return searchInput.execute(ctx, new Payload.Simple());
|
||||
}
|
||||
|
||||
protected WatcherSearchTemplateService watcherSearchTemplateService() {
|
||||
String master = internalCluster().getMasterName();
|
||||
return new WatcherSearchTemplateService(internalCluster().clusterService(master).getSettings(),
|
||||
internalCluster().getInstance(ScriptService.class, master),
|
||||
internalCluster().getInstance(IndicesQueriesRegistry.class, master),
|
||||
internalCluster().getInstance(AggregatorParsers.class, master),
|
||||
internalCluster().getInstance(Suggesters.class, master)
|
||||
);
|
||||
}
|
||||
|
||||
protected ScriptService scriptService() {
|
||||
return internalCluster().getInstance(ScriptService.class);
|
||||
}
|
||||
|
||||
private XContentSource toXContentSource(SearchInput.Result result) throws IOException {
|
||||
try (XContentBuilder builder = jsonBuilder()) {
|
||||
result.executedRequest().source().toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
return new XContentSource(builder);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom plugin that registers XPack script context.
|
||||
*/
|
||||
public static class CustomScriptContextPlugin extends Plugin implements ScriptPlugin {
|
||||
|
||||
@Override
|
||||
public ScriptContext.Plugin getCustomScriptContexts() {
|
||||
return WatcherScript.CTX_PLUGIN;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This package contains tests that use mustache to test what looks
|
||||
* to be unrelated functionality, or functionality that should be
|
||||
* tested with a mock instead. Instead of doing an epic battle
|
||||
* with these tests, they are temporarily moved here to the mustache
|
||||
* module's tests, but that is likely not where they belong. Please
|
||||
* help by cleaning them up and we can remove this package!
|
||||
*
|
||||
* <ul>
|
||||
* <li>If the test is testing templating integration with another core subsystem,
|
||||
* fix it to use a mock instead, so it can be in the core tests again</li>
|
||||
* <li>If the test is just being lazy, and does not really need templating to test
|
||||
* something, clean it up!</li>
|
||||
* </ul>
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must" : [
|
||||
{
|
||||
"match": {
|
||||
"event_type": {
|
||||
"query": "a",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"_timestamp": {
|
||||
"from": "{{ctx.trigger.scheduled_time}}||-{{seconds_param}}",
|
||||
"to": "{{ctx.trigger.scheduled_time}}",
|
||||
"include_lower": true,
|
||||
"include_upper": true
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must" : [
|
||||
{
|
||||
"match": {
|
||||
"event_type": {
|
||||
"query": "a",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"_timestamp": {
|
||||
"from": "{{ctx.trigger.scheduled_time}}||-{{seconds_param}}",
|
||||
"to": "{{ctx.trigger.scheduled_time}}",
|
||||
"include_lower": true,
|
||||
"include_upper": true
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -8,6 +8,8 @@ integTest {
|
|||
cluster {
|
||||
setting 'script.inline', 'true'
|
||||
plugin ':x-plugins:elasticsearch:x-pack'
|
||||
// Whitelist reindexing from the local node so we can test it.
|
||||
setting 'reindex.remote.whitelist', 'myself'
|
||||
extraConfigFile 'x-pack/roles.yml', 'roles.yml'
|
||||
[
|
||||
test_admin: 'superuser',
|
||||
|
|
|
@ -10,6 +10,8 @@ admin:
|
|||
# Search and write on both source and destination indices. It should work if you could just search on the source and
|
||||
# write to the destination but that isn't how security works.
|
||||
minimal:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: source
|
||||
privileges:
|
||||
|
@ -26,18 +28,24 @@ minimal:
|
|||
|
||||
# Read only operations on indices
|
||||
readonly:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: '*'
|
||||
privileges: [ read ]
|
||||
|
||||
# Write operations on destination index, none on source index
|
||||
dest_only:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: dest
|
||||
privileges: [ write ]
|
||||
|
||||
# Search and write on both source and destination indices with document level security filtering out some docs.
|
||||
can_not_see_hidden_docs:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: source
|
||||
privileges:
|
||||
|
@ -59,6 +67,8 @@ can_not_see_hidden_docs:
|
|||
|
||||
# Search and write on both source and destination indices with field level security.
|
||||
can_not_see_hidden_fields:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: source
|
||||
privileges:
|
||||
|
|
|
@ -147,10 +147,9 @@
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
headers: {es-security-runas-user: dest_only_user}
|
||||
headers: {es-security-runas-user: minimal_user}
|
||||
catch: forbidden
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
index: source
|
||||
|
|
|
@ -0,0 +1,418 @@
|
|||
---
|
||||
"Reindex from remote as superuser works":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: test_admin
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
- match: {created: 1}
|
||||
|
||||
---
|
||||
"Reindex from remote searching as user with minimal privileges works":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: minimal_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
- match: {created: 1}
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
text: test
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
---
|
||||
"Reindex from remote reading as readonly user works when the indexing user is allowed to index":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: readonly_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
text: test
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
---
|
||||
"Reindex from remote as user that can't read from the source is forbidden":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
catch: forbidden
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: dest_only_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
|
||||
---
|
||||
"Using a script to write to an index to which you don't have access is forbidden even if you read as a superuser":
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: tweet
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: tweet
|
||||
id: 2
|
||||
body: { "user": "another" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
headers: {es-security-runas-user: minimal_user}
|
||||
catch: forbidden
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: test_admin
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
script:
|
||||
inline: if (ctx._source.user == "kimchy") {ctx._index = 'other_dest'}
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# The index to which the user tried the unauthorized write didn't even get created
|
||||
- do:
|
||||
catch: missing
|
||||
search:
|
||||
index: other_dest
|
||||
|
||||
# Even the authorized index won't have made it because it was in the same batch as the unauthorized one.
|
||||
# If there had been lots of documents being copied then some might have made it into the authorized index.
|
||||
- do:
|
||||
catch: missing
|
||||
search:
|
||||
index: dest
|
||||
|
||||
---
|
||||
"Reindex from remote misses hidden docs":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 2
|
||||
body: { "text": "test", "hidden": true }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: can_not_see_hidden_docs_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
- match: {created: 1}
|
||||
|
||||
# We copied just one doc, presumably the one without the hidden field
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
text: test
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
# We didn't copy the doc with the hidden field
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
hidden: true
|
||||
- match: { hits.total: 0 }
|
||||
|
||||
---
|
||||
"Reindex misses hidden fields":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test", "foo": "z", "bar": "z" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: can_not_see_hidden_fields_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
- match: {created: 1}
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
foo: z
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
bar: z
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
text: test
|
||||
- match: { hits.total: 0 }
|
||||
|
||||
|
||||
---
|
||||
"Reindex from remote with bad password is unauthorized":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
catch: unauthorized
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: test_admin
|
||||
password: badpass
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
|
||||
---
|
||||
"Reindex from remote with no username or password is unauthorized":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
catch: unauthorized
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
|
@ -10,10 +10,11 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
|||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.transport.MockTransportClient;
|
||||
import org.elasticsearch.xpack.XPackTransportClient;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -39,6 +40,7 @@ public class SecurityTransportClientIT extends ESIntegTestCase {
|
|||
protected Settings externalClusterClientSettings() {
|
||||
return Settings.builder()
|
||||
.put(Security.USER_SETTING.getKey(), ADMIN_USER_PW)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom("security3", "security4"))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
@ -116,6 +118,6 @@ public class SecurityTransportClientIT extends ESIntegTestCase {
|
|||
.put("cluster.name", clusterName)
|
||||
.build();
|
||||
|
||||
return new MockTransportClient(settings, XPackPlugin.class).addTransportAddress(publishAddress);
|
||||
return new XPackTransportClient(settings).addTransportAddress(publishAddress);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
|
@ -22,7 +23,6 @@ import org.elasticsearch.test.ESIntegTestCase;
|
|||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackTransportClient;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -39,6 +39,7 @@ public class CustomRealmIT extends ESIntegTestCase {
|
|||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + "." + CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER)
|
||||
.put(ThreadContext.PREFIX + "." + CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom("security3", "security4"))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -154,12 +154,12 @@ project.rootProject.subprojects.findAll { it.path.startsWith(':plugins:') }.each
|
|||
integTest {
|
||||
cluster {
|
||||
setting 'xpack.monitoring.collection.interval', '3s'
|
||||
setting 'xpack.monitoring.collection.exporters._http.type', 'http'
|
||||
setting 'xpack.monitoring.collection.exporters._http.enabled', 'false'
|
||||
setting 'xpack.monitoring.collection.exporters._http.ssl.truststore.path', clientKeyStore.name
|
||||
setting 'xpack.monitoring.collection.exporters._http.ssl.truststore.password', 'keypass'
|
||||
setting 'xpack.monitoring.collection.exporters._http.auth.username', 'monitoring_agent'
|
||||
setting 'xpack.monitoring.collection.exporters._http.auth.password', 'changeme'
|
||||
setting 'xpack.monitoring.exporters._http.type', 'http'
|
||||
setting 'xpack.monitoring.exporters._http.enabled', 'false'
|
||||
setting 'xpack.monitoring.exporters._http.ssl.truststore.path', clientKeyStore.name
|
||||
setting 'xpack.monitoring.exporters._http.ssl.truststore.password', 'keypass'
|
||||
setting 'xpack.monitoring.exporters._http.auth.username', 'monitoring_agent'
|
||||
setting 'xpack.monitoring.exporters._http.auth.password', 'changeme'
|
||||
|
||||
setting 'xpack.security.transport.ssl.enabled', 'true'
|
||||
setting 'xpack.security.http.ssl.enabled', 'true'
|
||||
|
|
|
@ -47,6 +47,15 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
|||
*/
|
||||
public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
|
||||
|
||||
private boolean useSecurity3;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
useSecurity3 = randomBoolean();
|
||||
}
|
||||
|
||||
private static final String USER = "test_user";
|
||||
private static final String PASS = "changeme";
|
||||
private static final String KEYSTORE_PASS = "keypass";
|
||||
|
@ -59,13 +68,18 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected Settings externalClusterClientSettings() {
|
||||
return Settings.builder()
|
||||
final Settings.Builder builder =
|
||||
Settings.builder()
|
||||
.put(Security.USER_SETTING.getKey(), USER + ":" + PASS)
|
||||
.put(SecurityNetty3Transport.SSL_SETTING.getKey(), true)
|
||||
.put("xpack.security.ssl.keystore.path", clientKeyStore)
|
||||
.put("xpack.security.ssl.keystore.password", KEYSTORE_PASS)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, Security.NAME)
|
||||
.build();
|
||||
.put("xpack.security.ssl.keystore.password", KEYSTORE_PASS);
|
||||
if (useSecurity3) {
|
||||
builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Security.NAME3);
|
||||
} else {
|
||||
builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Security.NAME4);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -74,8 +88,8 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
|
|||
URI uri = new URI("https", null, httpAddress.getHostString(), httpAddress.getPort(), "/", null, null);
|
||||
|
||||
Settings exporterSettings = Settings.builder()
|
||||
.put("xpack.monitoring.collection.exporters._http.enabled", true)
|
||||
.put("xpack.monitoring.collection.exporters._http.host", uri.toString())
|
||||
.put("xpack.monitoring.exporters._http.enabled", true)
|
||||
.put("xpack.monitoring.exporters._http.host", uri.toString())
|
||||
.build();
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(exporterSettings));
|
||||
}
|
||||
|
@ -83,8 +97,8 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
|
|||
@After
|
||||
public void disableExporter() {
|
||||
Settings exporterSettings = Settings.builder()
|
||||
.putNull("xpack.monitoring.collection.exporters._http.enabled")
|
||||
.putNull("xpack.monitoring.collection.exporters._http.host")
|
||||
.putNull("xpack.monitoring.exporters._http.enabled")
|
||||
.putNull("xpack.monitoring.exporters._http.host")
|
||||
.build();
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(exporterSettings));
|
||||
}
|
||||
|
|
|
@ -11,9 +11,9 @@
|
|||
metric: [ settings ]
|
||||
|
||||
- is_true: nodes
|
||||
- is_true: nodes.$master.settings.xpack.monitoring.collection.exporters._http.type
|
||||
- is_true: nodes.$master.settings.xpack.monitoring.exporters._http.type
|
||||
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.auth.username
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.auth.password
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.ssl.truststore.path
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.ssl.truststore.password
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.exporters._http.auth.username
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.exporters._http.auth.password
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.exporters._http.ssl.truststore.path
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.exporters._http.ssl.truststore.password
|
||||
|
|
|
@ -35,7 +35,7 @@ import java.util.Map;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class WatcherTemplateTests extends ESTestCase {
|
||||
public class WatcherTemplateIT extends ESTestCase {
|
||||
|
||||
private TextTemplateEngine engine;
|
||||
|
|
@ -0,0 +1,157 @@
|
|||
---
|
||||
setup:
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
- do: {xpack.watcher.stats:{}}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 1
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-01T00:00:00",
|
||||
"value" : "val_1"
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 2
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-02T00:00:00",
|
||||
"value" : "val_2"
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 3
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-03T00:00:00",
|
||||
"value" : "val_3"
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 4
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-04T00:00:00",
|
||||
"value" : "val_4"
|
||||
}
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: idx
|
||||
|
||||
---
|
||||
"Test input mustache integration":
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
body: >
|
||||
{
|
||||
"trigger_data" : {
|
||||
"scheduled_time" : "2015-01-04T00:00:00"
|
||||
},
|
||||
"watch" : {
|
||||
"trigger" : { "schedule" : { "interval" : "10s" } },
|
||||
"actions" : {
|
||||
"dummy" : {
|
||||
"logging" : {
|
||||
"text" : "executed!"
|
||||
}
|
||||
}
|
||||
},
|
||||
"input" : {
|
||||
"search" : {
|
||||
"request" : {
|
||||
"indices" : "idx",
|
||||
"body" : {
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"filter" : [
|
||||
{
|
||||
"range" : {
|
||||
"date" : {
|
||||
"lte" : "{{ctx.trigger.scheduled_time}}",
|
||||
"gte" : "{{ctx.trigger.scheduled_time}}||-3d"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
- match: { "watch_record.result.input.type": "search" }
|
||||
- match: { "watch_record.result.input.status": "success" }
|
||||
- match: { "watch_record.result.input.payload.hits.total": 4 }
|
||||
# makes sure that the mustache template snippets have been resolved correctly:
|
||||
- match: { "watch_record.result.input.search.request.body.query.bool.filter.0.range.date.from": "2015-01-04T00:00:00.000Z||-3d" }
|
||||
- match: { "watch_record.result.input.search.request.body.query.bool.filter.0.range.date.to": "2015-01-04T00:00:00.000Z" }
|
||||
|
||||
---
|
||||
"Test transform mustache integration":
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
body: >
|
||||
{
|
||||
"trigger_data" : {
|
||||
"scheduled_time" : "2015-01-04T00:00:00"
|
||||
},
|
||||
"watch" : {
|
||||
"trigger" : { "schedule" : { "interval" : "10s" } },
|
||||
"input" : { "simple" : { "value" : "val_3" } },
|
||||
"actions" : {
|
||||
"dummy" : {
|
||||
"logging" : {
|
||||
"text" : "executed!"
|
||||
}
|
||||
}
|
||||
},
|
||||
"transform" : {
|
||||
"search" : {
|
||||
"request" : {
|
||||
"indices" : "idx",
|
||||
"body" : {
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"filter" : [
|
||||
{
|
||||
"range" : {
|
||||
"date" : {
|
||||
"lte" : "{{ctx.trigger.scheduled_time}}",
|
||||
"gte" : "{{ctx.trigger.scheduled_time}}||-1d"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term" : {
|
||||
"value" : "{{ctx.payload.value}}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
- match: { "watch_record.result.transform.type": "search" }
|
||||
- match: { "watch_record.result.transform.status": "success" }
|
||||
- match: { "watch_record.result.transform.payload.hits.total": 1 }
|
||||
- match: { "watch_record.result.transform.payload.hits.hits.0._id": "3" }
|
||||
# makes sure that the mustache template snippets have been resolved correctly:
|
||||
- match: { "watch_record.result.transform.search.request.body.query.bool.filter.0.range.date.from": "2015-01-04T00:00:00.000Z||-1d" }
|
||||
- match: { "watch_record.result.transform.search.request.body.query.bool.filter.0.range.date.to": "2015-01-04T00:00:00.000Z" }
|
||||
- match: { "watch_record.result.transform.search.request.body.query.bool.filter.1.term.value.value": "val_3" }
|
|
@ -1,440 +0,0 @@
|
|||
SOFTWARE END USER LICENSE AGREEMENT
|
||||
|
||||
READ THIS AGREEMENT CAREFULLY, WHICH CONSTITUTES A LEGALLY BINDING AGREEMENT AND GOVERNS YOUR USE OF
|
||||
ELASTICSEARCH’S PROPRIETARY SOFTWARE. BY INSTALLING AND/OR USING SUCH SOFTWARE, YOU ARE INDICATING THAT YOU AGREE TO THE
|
||||
TERMS AND CONDITIONS SET FORTH IN THIS AGREEMENT. IF YOU DO NOT AGREE WITH SUCH TERMS AND CONDITIONS, YOU MAY NOT
|
||||
INSTALL OR USE ANY OF THE SOFTWARE.
|
||||
|
||||
This END USER LICENSE AGREEMENT (this “Agreement") is entered into by and between the applicable Elasticsearch
|
||||
entity referenced in Attachment 1 hereto (“Elasticsearch”) and the person or entity (“You”) that has downloaded any of
|
||||
Elasticsearch’s proprietary software to which this Agreement is attached or in connection with which this Agreement is
|
||||
presented to You (collectively, the “Software”). This Agreement is effective upon the earlier of the date on the
|
||||
commencement of any License granted pursuant to Section 1.1. below (as applicable, the “Effective Date”).
|
||||
|
||||
1. SOFTWARE LICENSE AND RESTRICTIONS
|
||||
1.1 License Grants.
|
||||
(a) Trial Version License. Subject to the terms and conditions of this Agreement, Elasticsearch agrees to
|
||||
grant, and does hereby grant to You, for a period of thirty (30) days from the date on which You first install the
|
||||
Software (the “Trial Term”), a License to the to use the Eligible Features and Functions of the Software that are
|
||||
applicable to the Trial Version of the Software. You understand and agree that upon the expiration of a Trial Term,
|
||||
You will no longer be able to use the Software, unless you either (i) purchase a Subscription, in which case You will
|
||||
receive a License under Section 1.1(b) below to use the Eligible Features and Functions of the Software that are
|
||||
applicable to the Subscription that You purchase, (ii) undertake the Registration of Your use of the Software with
|
||||
Elasticsearch, in which case You will receive a License under Section 1.1(c) below to the Basic Version of the Software
|
||||
or (iii) obtain from Elasticsearch written consent (e-mail sufficient) to extend the Trial Term, which may be granted by
|
||||
Elasticsearch in its sole and absolute discretion.
|
||||
(b) Subscription License. Subject to the terms and conditions of this Agreement and complete payment of any and
|
||||
all applicable Subscription fees, Elasticsearch agrees to grant, and does hereby grant to You during the Subscription
|
||||
Term, and for the restricted scope of this Agreement, a License (i) to use the Eligible Features and Functions of the
|
||||
Software that are applicable to the Subscription level that You have purchased, (ii) for the number of Nodes (as defined
|
||||
in the Elasticsearch Support Services Policy) and (iii) for the specific project for which you have purchased a
|
||||
Subscription. The level of Subscription, the number of Nodes and specific project for which you have purchased such
|
||||
Subscription, are set forth on the applicable ordering document entered into by Elasticsearch and You for the purchase
|
||||
of the applicable Subscription (“Order Form”).
|
||||
(c) Basic Version License. Subject to the terms and conditions of this Agreement, and in consideration of the
|
||||
Registration of Your use the Software, Elasticsearch agrees to grant, and does hereby grant to You, for a period of one
|
||||
(1) year from the date of Registration (“Basic Term”), a License to use the Eligible Features and Functions of the
|
||||
Software that are applicable to the Basic Version of the Software.
|
||||
1.2 Reservation of Rights; Restrictions. As between Elasticsearch and You, Elasticsearch owns all right title and
|
||||
interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1
|
||||
above, no other license to the Software is granted to You by implication, estoppel or otherwise. You agree not to: (i)
|
||||
reverse engineer or decompile, decrypt, disassemble or otherwise reduce any Software or any portion thereof to
|
||||
human-readable form, except and only to the extent any such restriction is prohibited by applicable law, (ii) deploy the
|
||||
Software on more Nodes (as defined in Elasticsearch’s Support Services Policy) than are permitted under the applicable
|
||||
License grant in Section 1.1 above (iii) where You have purchased a Subscription, use the Software in connection with
|
||||
any project other than the project for which you have purchased such Subscription, as identified on the applicable Order
|
||||
Form, (iv) prepare derivative works from, modify, copy or use the Software in any manner except as expressly permitted
|
||||
in this Agreement; (v) except as expressly permitted in Section 1.1 above, transfer, sell, rent, lease, distribute,
|
||||
sublicense, loan or otherwise transfer the Software in whole or in part to any third party; (vi) except as may be
|
||||
expressly permitted on an applicable Order Form, use the Software for providing time-sharing services, any
|
||||
software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other
|
||||
service offering; (vii) circumvent the limitations on use of the Software that are imposed or preserved by any License
|
||||
Key, (viii) alter or remove any proprietary notices in the Software; or (ix) make available to any third party any
|
||||
analysis of the results of operation of the Software, including benchmarking results, without the prior written consent
|
||||
of Elasticsearch. The Software may contain or be provided with open source libraries, components, utilities and other
|
||||
open source software (collectively, “Open Source Software”), which Open Source Software may have applicable license
|
||||
terms as identified on a website designated by Elasticsearch or otherwise provided with the Software or Documentation.
|
||||
Notwithstanding anything to the contrary herein, use of the Open Source Software shall be subject to the license terms
|
||||
and conditions applicable to such Open Source Software, to the extent required by the applicable licensor (which terms
|
||||
shall not restrict the license rights granted to You hereunder, but may contain additional rights).
|
||||
1.3 Audit Rights. You agree that, unless such right is waived in writing by Elasticsearch, Elasticsearch shall have the
|
||||
right, upon fifteen (15) days’ notice to You, to audit Your use of the Software for compliance with any quantitative
|
||||
limitations on Your use of the Software that are set forth in the applicable Order Form. You agree to provide
|
||||
Elasticsearch with the necessary access to the Software to conduct such an audit either (i) remotely, or (ii) if remote
|
||||
performance is not possible, at Your facilities, during normal business hours and no more than one (1) time in any
|
||||
twelve (12) month period. In the event any such audit reveals that You have used the Software in excess of the
|
||||
applicable quantitative limitations, You agree to solely for Your internal business operations, a limited,
|
||||
non-exclusive, non-transferable, fully paid up, right and license (without the right to grant or authorize sublicenses)
|
||||
promptly pay to Elasticsearch an amount equal to the difference between the fees actually paid and the fees that You
|
||||
should have paid to remain in compliance with such quantitative limitations. This Section 1.3 shall survive for a
|
||||
period of two (2) years from the termination or expiration of this Agreement.
|
||||
1.4 Cluster Metadata. You understand and agree that once deployed, and on a daily basis, the Software may provide
|
||||
metadata to Elasticsearch about Your cluster statistics and associates that metadata with Your IP address. However, no
|
||||
other information is provided to Elasticsearch by the Software, including any information about the data You process or
|
||||
store in connection with your use of the Software. Instructions for disabling this feature are contained in the
|
||||
Software, however leaving this feature active enables Elasticsearch to gather cluster statistics and provide an improved
|
||||
level of support to You.
|
||||
|
||||
2. TERM AND TERMINATION
|
||||
2.1 Term. Unless earlier terminated under Section 2.2 below, this Agreement shall commence on the Effective Date, and
|
||||
shall continue in force for the term of the last to expire applicable license set forth in Section 1.1 above.
|
||||
2.2 Termination. Either party may, upon written notice to the other party, terminate this Agreement for material breach
|
||||
by the other party automatically and without any other formality, if such party has failed to cure such material breach
|
||||
within thirty (30) days of receiving written notice of such material breach from the non-breaching party.
|
||||
Notwithstanding the foregoing, this Agreement shall automatically terminate in the event that You intentionally breach
|
||||
the scope of the license granted in Section 1.1 of this Agreement, provided that Elasticsearch reserves the right to
|
||||
retroactively waive such automatic termination upon written notice to You.
|
||||
2.3 Post Termination or Expiration. Upon termination or expiration of this Agreement, for any reason, You shall
|
||||
promptly cease the use of the Software and Documentation and destroy (and certify to Elasticsearch in writing the fact
|
||||
of such destruction), or return to Elasticsearch, all copies of the Software and Documentation then in Your possession
|
||||
or under Your control.
|
||||
2.4 Survival. Sections 2.3, 2.4, 3, 4 and 5 (as any such Sections may be modified by Attachment 1, if applicable) shall
|
||||
survive any termination or expiration of this Agreement.
|
||||
3. LIMITED WARRANTY AND DISCLAIMER OF WARRANTIES
|
||||
3.1 Limited Performance Warranty. Subject to You purchasing a Subscription, Elasticsearch warrants that during the
|
||||
applicable Subscription Term, the Software will perform in all material respects in accordance with the Documentation.
|
||||
In the event of a breach of the foregoing warranty, Elasticsearch’s sole obligation, and Your exclusive remedy shall be
|
||||
for Elasticsearch to (i) correct any failure(s) of the Software to perform in all material respects in accordance with
|
||||
the Documentation or (ii) if Elasticsearch is unable to provide such a correction within thirty (30) days of receipt of
|
||||
notice of the applicable non-conformity, promptly refund to Customer any pre-paid, unused fees paid by You to
|
||||
Elasticsearch for the applicable Subscription. The warranty set forth in this Section 3.1 does not apply if the
|
||||
applicable Software or any portion thereof: (a) has been altered, except by or on behalf Elasticsearch; (b) has not been
|
||||
used, installed, operated, repaired, or maintained in accordance with this Agreement and/or the Documentation; (c) has
|
||||
been subjected to abnormal physical or electrical stress, misuse, negligence, or accident; or (d) is used on equipment,
|
||||
products, or systems not meeting specifications identified by Elasticsearch in the Documentation. Additionally, the
|
||||
warranties set forth herein only apply when notice of a warranty claim is provided to Elasticsearch within the
|
||||
applicable warranty period specified herein and do not apply to any bug, defect or error caused by or attributable to
|
||||
software or hardware not supplied by Elasticsearch.
|
||||
3.2 Malicious Code. Elasticsearch represents and warrants that prior to making it available for delivery to You,
|
||||
Elasticsearch will use standard industry practices including, without limitation, the use of an updated commercial
|
||||
anti-virus program, to test the Software for Malicious Code and remove any Malicious Code it discovers. In the event of
|
||||
a breach of the foregoing warranty, Elasticsearch’s sole obligation, and Your exclusive remedy shall be for
|
||||
Elasticsearch to replace the Software with Software that does not contain any Malicious Code.
|
||||
3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT
|
||||
WARRANTY OF ANY KIND, AND ELASTICSEARCH AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY
|
||||
REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW,
|
||||
ELASTICSEARCH AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF
|
||||
THE FOREGOING. FURTHER, ELASTICSEARCH DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT
|
||||
THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED.
|
||||
4. LIMITATION OF LIABILITY
|
||||
The provisions of this Section 4 apply if You have not purchased a Subscription. If you have purchased a Subscription,
|
||||
then the limitations of liability set forth in the applicable Subscription Agreement will apply in lieu of those set
|
||||
forth in this Section 4.
|
||||
4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTICSEARCH OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF
|
||||
PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT,
|
||||
SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO
|
||||
USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR
|
||||
TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH THROUGH GROSS NEGLIGENCE
|
||||
OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY OTHER LIABILITY THAT
|
||||
CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW.
|
||||
4.2 Damages Cap. IN NO EVENT SHALL ELASTICSEARCH’S OR ITS LICENSORS’ AGGREGATE, CUMULATIVE LIABILITY UNDER THIS
|
||||
AGREEMENT EXCEED ONE THOUSAND DOLLARS ($1,000).
|
||||
4.3 YOU AGREE THAT THE FOREGOING LIMITATIONS, EXCLUSIONS AND DISCLAIMERS ARE A REASONABLE ALLOCATION OF THE RISK BETWEEN
|
||||
THE PARTIES AND WILL APPLY TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, EVEN IF ANY REMEDY FAILS IN ITS ESSENTIAL
|
||||
PURPOSE.
|
||||
5. DEFINITIONS
|
||||
The following terms have the meanings ascribed:
|
||||
5.1 “License” means a limited, non-exclusive, non-transferable, fully paid up, right and license (without the right to
|
||||
grant or authorize sublicenses) solely for Your internal business operations to (i) install and use, in object code
|
||||
format, the applicable Eligible Features and Functions of the Software, (ii) use, and distribute internally a reasonable
|
||||
number of copies of the Documentation, provided that You must include on such copies all Marks and Notices; (iii) permit
|
||||
Contractors to use the Software and Documentation as set forth in (i) and (ii) above, provided that such use must be
|
||||
solely for Your benefit, and You shall be responsible for all acts and omissions of such Contractors in connection with
|
||||
their use of the Software that are contrary to the terms and conditions of this Agreement..
|
||||
5.2 “License Key” means an alphanumeric code that enables the Eligible Features and Functions of the Software.
|
||||
5.3 “Basic Version” means that version of the Software available for use without the purchase of a Qualifying
|
||||
Subscription, but which does require Registration.
|
||||
5.4 “Contractor” means third party contractors performing services on Your behalf.
|
||||
5.5 “Documentation” means the published end user documentation provided by Elasticsearch with the Software.
|
||||
5.6 “Eligible Features and Functions” means those features and functions of the Software that are eligible for use with
|
||||
respect to a particular version of the Software or level of the Subscription. A list of the Eligible Features and
|
||||
Functions that correspond to each version of the Software and Subscription levels may be found at
|
||||
https://www.elastic.co/subscriptions.
|
||||
5.7 “Malicious Code” means any code that is designed to harm, or otherwise disrupt in any unauthorized manner, the
|
||||
operation of a recipient’s computer programs or computer systems or destroy or damage recipient’s data. For clarity,
|
||||
Malicious Code shall not include any software bugs or errors handled through Support Services, or any standard features
|
||||
of functions of the Software and/or any License Key that are intended to enforce the temporal and other limitations on
|
||||
the scope of the use of the Software to the scope of the license purchased by You.
|
||||
5.8 “Marks and Notices” means all Elasticsearch trademarks, trade names, logos and notices present on the Documentation
|
||||
as originally provided by Elasticsearch.
|
||||
5.9 “Registration” means Elasticsearch’s then-current process under which You may register Your use of the Software with
|
||||
Elasticsearch by providing certain information to Elasticsearch regarding your use of the Software.
|
||||
5.10 “Subscription” means the right to receive Support Services and a License to the Software.
|
||||
5.11 “Subscription Term” means the period of time for which You have purchased a Subscription.
|
||||
5.12 “Trial Version” means that version of the Software available for use without the purchase of a Qualifying
|
||||
Subscription and without Registration.
|
||||
6. MISCELLANEOUS
|
||||
This Agreement, including Attachment 1 hereto, which is hereby incorporated herein by this reference, completely and
|
||||
exclusively states the entire agreement of the parties regarding the subject matter herein, and it supersedes, and its
|
||||
terms govern, all prior proposals, agreements, or other communications between the parties, oral or written, regarding
|
||||
such subject matter. For the avoidance of doubt, the parties hereby expressly acknowledge and agree that if You issue
|
||||
any purchase order or similar document in connection with its purchase of a license to the Software, You will do so only
|
||||
for Your internal, administrative purposes and not with the intent to provide any contractual terms. This Agreement may
|
||||
not be modified except by a subsequently dated, written amendment that expressly amends this Agreement and which is
|
||||
signed on behalf of Elasticsearch and You, by duly authorized representatives. If any provision hereof is held
|
||||
unenforceable, this Agreement will continue without said provision and be interpreted to reflect the original intent of
|
||||
the parties.
|
||||
|
||||
|
||||
ATTACHMENT 1
|
||||
ADDITIONAL TERMS AND CONDITIONS
|
||||
|
||||
A. The following additional terms and conditions apply to all Customers with principal offices in the United States
|
||||
of America:
|
||||
|
||||
(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch, Inc., a Delaware corporation.
|
||||
|
||||
(2) Government Rights. The Software product is "Commercial Computer Software," as that term is defined in 48 C.F.R.
|
||||
2.101, and as the term is used in 48 C.F.R. Part 12, and is a Commercial Item comprised of "commercial computer
|
||||
software" and "commercial computer software documentation". If acquired by or on behalf of a civilian agency, the U.S.
|
||||
Government acquires this commercial computer software and/or commercial computer software documentation subject to the
|
||||
terms of this Agreement, as specified in 48 C.F.R. 12.212 (Computer Software) and 12.211 (Technical Data) of the Federal
|
||||
Acquisition Regulation ("FAR") and its successors. If acquired by or on behalf of any agency within the Department of
|
||||
Defense ("DOD"), the U.S. Government acquires this commercial computer software and/or commercial computer software
|
||||
documentation subject to the terms of the Elasticsearch Software License Agreement as specified in 48 C.F.R. 227.7202-3
|
||||
and 48 C.F.R. 227.7202-4 of the DOD FAR Supplement ("DFARS") and its successors, and consistent with 48 C.F.R. 227.7202.
|
||||
This U.S. Government Rights clause, consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202 is in lieu of, and
|
||||
supersedes, any other FAR, DFARS, or other clause or provision that addresses Government rights in computer software,
|
||||
computer software documentation or technical data related to the Software under this Agreement and in any Subcontract
|
||||
under which this commercial computer software and commercial computer software documentation is acquired or licensed.
|
||||
|
||||
(3) Export Control. You acknowledge that the goods, software and technology acquired from Elasticsearch are subject
|
||||
to U.S. export control laws and regulations, including but not limited to the International Traffic In Arms Regulations
|
||||
(“ITAR”) (22 C.F.R. Parts 120-130 (2010)); the Export Administration Regulations ("EAR") (15 C.F.R. Parts 730-774
|
||||
(2010)); the U.S. antiboycott regulations in the EAR and U.S. Department of the Treasury regulations; the economic
|
||||
sanctions regulations and guidelines of the U.S. Department of the Treasury, Office of Foreign Assets Control, and the
|
||||
USA Patriot Act (Title III of Pub. L. 107-56, signed into law October 26, 2001), as amended. You are now and will
|
||||
remain in the future compliant with all such export control laws and regulations, and will not export, re-export,
|
||||
otherwise transfer any Elasticsearch goods, software or technology or disclose any Elasticsearch software or technology
|
||||
to any person contrary to such laws or regulations. You acknowledge that remote access to the Software may in certain
|
||||
circumstances be considered a re-export of Software, and accordingly, may not be granted in contravention of U.S. export
|
||||
control laws and regulations.
|
||||
(4) Governing Law, Jurisdiction and Venue.
|
||||
(a) Customers in California. If Customer is located in California (as determined by the Customer address on the
|
||||
applicable Order Form, or for a trial license under 1.1(a), the location of person who installed the Software), this
|
||||
Agreement will be governed by the laws of the State of California, without regard to its conflict of laws principles,
|
||||
and all suits hereunder will be brought solely in Federal Court for the Northern District of California, or if that
|
||||
court lacks subject matter jurisdiction, in any California State Court located in Santa Clara County.
|
||||
(b) Customers Outside of California. If Customer is located anywhere other than California (as determined by the
|
||||
Customer address on the applicable Order Form, or for a trial license under 1.1(a), the location of person who installed
|
||||
the Software), this Agreement will be governed by the laws of the State of Delaware, without regard to its conflict of
|
||||
laws principles, and all suits hereunder will be brought solely in Federal Court for the District of Delaware, or if
|
||||
that court lacks subject matter jurisdiction, in any Delaware State Court located in Wilmington, Delaware.
|
||||
(c) All Customers. This Agreement shall not be governed by the 1980 UN Convention on Contracts for the International
|
||||
Sale of Goods. The parties hereby irrevocably waive any and all claims and defenses either might otherwise have in any
|
||||
action or proceeding in any of the applicable courts set forth in (a) or (b) above, based upon any alleged lack of
|
||||
personal jurisdiction, improper venue, forum non conveniens, or any similar claim or defense.
|
||||
(d) Equitable Relief. A breach or threatened breach, by either party of Section 4 may cause irreparable harm for
|
||||
which the non-breaching party shall be entitled to seek injunctive relief without being required to post a bond.
|
||||
|
||||
B. The following additional terms and conditions apply to all Customers with principal offices in Canada:
|
||||
|
||||
(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch B.C. Ltd., a corporation
|
||||
incorporated under laws of the Province of British Columbia.
|
||||
|
||||
(2) Export Control. You acknowledge that the goods, software and technology acquired from Elasticsearch are subject
|
||||
to the restrictions and controls set out in Section A(3) above as well as those imposed by the Export and Import Permits
|
||||
Act (Canada) and the regulations thereunder and that you will comply with all applicable laws and regulations. Without
|
||||
limitation, You acknowledge that the Marvel Software, or any portion thereof, will not be exported: (a) to any country
|
||||
on Canada's Area Control List; (b) to any country subject to UN Security Council embargo or action; or (c) contrary to
|
||||
Canada's Export Control List Item 5505. You are now and will remain in the future compliant with all such export control
|
||||
laws and regulations, and will not export, re-export, otherwise transfer any Elasticsearch goods, software or technology
|
||||
or disclose any Elasticsearch software or technology to any person contrary to such laws or regulations. You will not
|
||||
export or re-export the Marvel Software, or any portion thereof, directly or indirectly, in violation of the Canadian
|
||||
export administration laws and regulations to any country or end user, or to any end user who you know or have reason to
|
||||
know will utilize them in the design, development or production of nuclear, chemical or biological weapons. You further
|
||||
acknowledge that the Marvel Software product may include technical data subject to such Canadian export regulations.
|
||||
Elasticsearch does not represent that the Marvel Software is appropriate or available for use in all countries.
|
||||
Elasticsearch prohibits accessing materials from countries or states where contents are illegal. You are using the
|
||||
Marvel Software on your own initiative and you are responsible for compliance with all applicable laws. You hereby agree
|
||||
to indemnify Elasticsearch and its affiliates from any claims, actions, liability or expenses (including reasonable
|
||||
lawyers' fees) resulting from Your failure to act in accordance with the acknowledgements, agreements, and
|
||||
representations in this Section B(2).
|
||||
(3) Governing Law and Dispute Resolution. This Agreement shall be governed by the Province of Ontario and the
|
||||
federal laws of Canada applicable therein without regard to conflict of laws provisions. The parties hereby irrevocably
|
||||
waive any and all claims and defenses either might otherwise have in any such action or proceeding in any of such courts
|
||||
based upon any alleged lack of personal jurisdiction, improper venue, forum non conveniens or any similar claim or
|
||||
defense. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence, breach,
|
||||
termination, enforcement, interpretation or validity thereof, including the determination of the scope or applicability
|
||||
of this agreement to arbitrate, (each, a “Dispute”), which the parties are unable to resolve after good faith
|
||||
negotiations, shall be submitted first to the upper management level of the parties. The parties, through their upper
|
||||
management level representatives shall meet within thirty (30) days of the Dispute being referred to them and if the
|
||||
parties are unable to resolve such Dispute within thirty (30) days of meeting, the parties agree to seek to resolve the
|
||||
Dispute through mediation with ADR Chambers in the City of Toronto, Ontario, Canada before pursuing any other
|
||||
proceedings. The costs of the mediator shall be shared equally by the parties. If the Dispute has not been resolved
|
||||
within thirty (30) days of the notice to desire to mediate, any party may terminate the mediation and proceed to
|
||||
arbitration and the matter shall be referred to and finally resolved by arbitration at ADR Chambers pursuant to the
|
||||
general ADR Chambers Rules for Arbitration in the City of Toronto, Ontario, Canada. The arbitration shall proceed in
|
||||
accordance with the provisions of the Arbitration Act (Ontario). The arbitral panel shall consist of three (3)
|
||||
arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall
|
||||
discuss and select a chairman. If the two (2) party-appointed arbitrators are unable to agree on the chairman, the
|
||||
chairman shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be
|
||||
independent of each of the parties. The arbitrators shall have the authority to grant specific performance and to
|
||||
allocate between the parties the costs of arbitration (including service fees, arbitrator fees and all other fees
|
||||
related to the arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any
|
||||
arbitration shall be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith.
|
||||
Judgment upon the award so rendered may be entered in a court having jurisdiction or application may be made to such
|
||||
court for judicial acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the
|
||||
foregoing, Elasticsearch shall have the right to institute an action in a court of proper jurisdiction for preliminary
|
||||
injunctive relief pending a final decision by the arbitrator, provided that a permanent injunction and damages shall
|
||||
only be awarded by the arbitrator. The language to be used in the arbitral proceedings shall be English.
|
||||
(4) Language. Any translation of this Agreement is done for local requirements and in the event of a dispute
|
||||
between the English and any non-English version, the English version of this Agreement shall govern. At the request of
|
||||
the parties, the official language of this Agreement and all communications and documents relating hereto is the English
|
||||
language, and the English-language version shall govern all interpretation of the Agreement. À la demande des parties,
|
||||
la langue officielle de la présente convention ainsi que toutes communications et tous documents s'y rapportant est la
|
||||
langue anglaise, et la version anglaise est celle qui régit toute interprétation de la présente convention.
|
||||
(5) Warranty Disclaimer. For Customers with principal offices in the Province of Québec, the following new sentence
|
||||
is to be added to the end of Section 3.3: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN TYPES OF
|
||||
DAMAGES AND/OR WARRANTIES AND CONDITIONS. THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS AGREEMENT SHALL
|
||||
NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES BEYOND AND
|
||||
DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.”
|
||||
(6) Limitation of Liability. For Customers with principal offices in the Province of Québec, the following new
|
||||
sentence is to be added to the end of Section 4.1: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN
|
||||
TYPES OF DAMAGES AND/OR WARRANTIES AND CONDITIONS. THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS
|
||||
AGREEMENT SHALL NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES
|
||||
BEYOND AND DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.”
|
||||
|
||||
C. The following additional terms and conditions apply to all Customers with principal offices outside of the United
|
||||
States of America and Canada:
|
||||
|
||||
(1) Applicable Elasticsearch Entity. The entity providing the license in Germany is Elasticsearch Gmbh; in France is
|
||||
Elasticsearch SARL, in the United Kingdom is Elasticsearch Ltd, in Australia is Elasticsearch Pty Ltd., in Japan is
|
||||
Elasticsearch KK, in Sweden is Elasticsearch AB, in Norway is Elasticsearch AS and in all other countries is
|
||||
Elasticsearch BV.
|
||||
|
||||
(2) Choice of Law. This Agreement shall be governed by and construed in accordance with the laws of the State of New
|
||||
York, without reference to or application of choice of law rules or principles. Notwithstanding any choice of law
|
||||
provision or otherwise, the Uniform Computer Information Transactions Act (UCITA) and the United Nations Convention on
|
||||
the International Sale of Goods shall not apply.
|
||||
|
||||
(3) Arbitration. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence,
|
||||
breach, termination, enforcement, interpretation or validity thereof, including the determination of the scope or
|
||||
applicability of this agreement to arbitrate, (each, a “Dispute”) shall be referred to and finally resolved by
|
||||
arbitration under the rules and at the location identified below. The arbitral panel shall consist of three (3)
|
||||
arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall
|
||||
discuss and select a chairman. If the two party-appointed arbitrators are unable to agree on the chairman, the chairman
|
||||
shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be independent
|
||||
of each of the parties. The arbitrators shall have the authority to grant specific performance and to allocate between
|
||||
the parties the costs of arbitration (including service fees, arbitrator fees and all other fees related to the
|
||||
arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any arbitration shall
|
||||
be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith. Judgment upon the
|
||||
award so rendered may be entered in a court having jurisdiction or application may be made to such court for judicial
|
||||
acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the foregoing, Elasticsearch
|
||||
shall have the right to institute an action in a court of proper jurisdiction for preliminary injunctive relief pending
|
||||
a final decision by the arbitrator, provided that a permanent injunction and damages shall only be awarded by the
|
||||
arbitrator. The language to be used in the arbitral proceedings shall be English.
|
||||
|
||||
In addition, the following terms only apply to Customers with principal offices within Europe, the Middle East or Africa
|
||||
(EMEA):
|
||||
|
||||
Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the London
|
||||
Court of International Arbitration (“LCIA”) Rules (which Rules are deemed to be incorporated by reference into this
|
||||
clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of
|
||||
arbitration shall be London, England.
|
||||
|
||||
(b) In addition, the following terms only apply to Customers with principal offices within Asia Pacific, Australia &
|
||||
New Zealand:
|
||||
|
||||
Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the Rules of
|
||||
Conciliation and Arbitration of the International Chamber of Commerce (“ICC”) in force on the date when the notice of
|
||||
arbitration is submitted in accordance with such Rules (which Rules are deemed to be incorporated by reference into this
|
||||
clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of
|
||||
arbitration shall be Singapore.
|
||||
|
||||
(c) In addition, the following terms only apply to Customers with principal offices within the Americas (excluding
|
||||
North America):
|
||||
|
||||
Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under
|
||||
International Dispute Resolution Procedures of the American Arbitration Association (“AAA”) in force on the date when
|
||||
the notice of arbitration is submitted in accordance with such Procedures (which Procedures are deemed to be
|
||||
incorporated by reference into this clause) on the basis that the governing law is the law of the State of New York,
|
||||
USA. The seat, or legal place, of arbitration shall be New York, New York, USA.
|
||||
|
||||
(4) In addition, for Customers with principal offices within the UK, the following new sentence is added to the end
|
||||
of Section 4.1:
|
||||
|
||||
Nothing in this Agreement shall have effect so as to limit or exclude a party’s liability for death or personal injury
|
||||
caused by negligence or for fraud including fraudulent misrepresentation and this Section 4.1 shall take effect subject
|
||||
to this provision.
|
||||
|
||||
(5) In addition, for Customers with principal offices within France, Sections 1.2, 3 and 4.1 of the Agreement are
|
||||
deleted and replaced with the following new Sections 1.2, 3.3 and 4.1:
|
||||
1.2 Reservation of Rights; Restrictions. Elasticsearch owns all right title and interest in and to the Software and
|
||||
any derivative works thereof, and except as expressly set forth in Section 1.1 above, no other license to the Software
|
||||
is granted to You by implication, or otherwise. You agree not to prepare derivative works from, modify, copy or use the
|
||||
Software in any manner except as expressly permitted in this Agreement; provided that You may copy the Software for
|
||||
archival purposes, only where such software is provided on a non-durable medium; and You may decompile the Software,
|
||||
where necessary for interoperability purposes and where necessary for the correction of errors making the software unfit
|
||||
for its intended purpose, if such right is not reserved by Elasticsearch as editor of the Software. Pursuant to article
|
||||
L122-6-1 of the French intellectual property code, Elasticsearch reserves the right to correct any bugs as necessary for
|
||||
the Software to serve its intended purpose. You agree not to: (i) transfer, sell, rent, lease, distribute, sublicense,
|
||||
loan or otherwise transfer the Software in whole or in part to any third party; (ii) use the Software for providing
|
||||
time-sharing services, any software-as-a-service offering (“SaaS”), service bureau services or as part of an application
|
||||
services provider or other service offering; (iii) alter or remove any proprietary notices in the Software; or (iv) make
|
||||
available to any third party any analysis of the results of operation of the Software, including benchmarking results,
|
||||
without the prior written consent of Elasticsearch.
|
||||
3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS”
|
||||
WITHOUT WARRANTY OF ANY KIND, AND ELASTICSEARCH AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR
|
||||
STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW,
|
||||
ELASTICSEARCH AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR PURPOSE WITH
|
||||
RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTICSEARCH DOES
|
||||
NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE
|
||||
UNINTERRUPTED.
|
||||
4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTICSEARCH OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF
|
||||
PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT OR
|
||||
UNFORESEEABLE DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE, OR THE
|
||||
PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR TORTIOUS CONDUCT,
|
||||
INCLUDING NEGLIGENCE. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH, THROUGH
|
||||
GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU, OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1, OR IN CASE OF
|
||||
DEATH OR PERSONAL INJURY.
|
||||
(6) In addition, for Customers with principal offices within Australia, Sections 4.1, 4.2 and 4.3 of the Agreement
|
||||
are deleted and replaced with the following new Sections 4.1, 4.2 and 4.3:
|
||||
4.1 Disclaimer of Certain Damages. Subject to clause 4.3, a party is not liable for Consequential Loss however
|
||||
caused (including by the negligence of that party) suffered or incurred by the other party in connection with this
|
||||
agreement. “Consequential Loss” means loss of revenues, loss of reputation, indirect loss, loss of profits,
|
||||
consequential loss, loss of actual or anticipated savings, indirect loss, lost opportunities, including opportunities to
|
||||
enter into arrangements with third parties, loss or damage in connection with claims against by third parties, or loss
|
||||
or corruption or data.
|
||||
4.2 Damages Cap. SUBJECT TO CLAUSES 4.1 AND 4.3, ANY LIABILITY OF ELASTICSEARCH FOR ANY LOSS OR DAMAGE, HOWEVER
|
||||
CAUSED (INCLUDING BY THE NEGLIGENCE OF ELASTICSEARCH), SUFFERED BY YOU IN CONNECTION WITH THIS AGREEMENT IS LIMITED TO
|
||||
THE AMOUNT YOU PAID, IN THE TWELVE (12) MONTHS IMMEDIATELY PRIOR TO THE EVENT GIVING RISE TO LIABILITY, UNDER THE
|
||||
ELASTICSEARCH SUPPORT SERVICES AGREEMENT IN CONNECTION WITH WHICH YOU OBTAINED THE LICENSE TO USE THE SOFTWARE. THE
|
||||
LIMITATION SET OUT IN THIS SECTION 4.2 IS AN AGGREGATE LIMIT FOR ALL CLAIMS, WHENEVER MADE.
|
||||
4.3 Limitation and Disclaimer Exceptions. If the Competition and Consumer Act 2010 (Cth) or any other legislation or
|
||||
any other legislation states that there is a guarantee in relation to any good or service supplied by Elasticsearch in
|
||||
connection with this agreement, and Elasticsearch’s liability for failing to comply with that guarantee cannot be
|
||||
excluded but may be limited, Sections 4.1 and 4.2 do not apply to that liability and instead Elasticsearch’s liability
|
||||
for such failure is limited (at Elasticsearch’s election) to, in the case of a supply of goods, the Elasticsearch
|
||||
replacing the goods or supplying equivalent goods or repairing the goods, or in the case of a supply of services,
|
||||
Elasticsearch supplying the services again or paying the cost of having the services supplied again.
|
||||
(7) In addition, for Customers with principal offices within Japan, Sections 1.2, 3 and 4.1 of the Agreement are
|
||||
deleted and replaced with the following new Sections 1.2, 3.3 and 4.1:
|
||||
1.2 Reservation of Rights; Restrictions. As between Elasticsearch and You, Elasticsearch owns all right title and
|
||||
interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1
|
||||
above, no other license to the Software is granted to You by implication or otherwise. You agree not to: (i) prepare
|
||||
derivative works from, modify, copy or use the Software in any manner except as expressly permitted in this Agreement or
|
||||
applicable law; (ii) transfer, sell, rent, lease, distribute, sublicense, loan or otherwise transfer the Software in
|
||||
whole or in part to any third party; (iii) use the Software for providing time-sharing services, any
|
||||
software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other
|
||||
service offering; (iv) alter or remove any proprietary notices in the Software; or (v) make available to any third party
|
||||
any analysis of the results of operation of the Software, including benchmarking results, without the prior written
|
||||
consent of Elasticsearch.
|
||||
3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS”
|
||||
WITHOUT WARRANTY OF ANY KIND, AND ELASTICSEARCH AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR
|
||||
STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW,
|
||||
ELASTICSEARCH AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF
|
||||
THE FOREGOING. FURTHER, ELASTICSEARCH DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT
|
||||
THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED.
|
||||
4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTICSEARCH OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF
|
||||
PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY
|
||||
SPECIALINDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE
|
||||
OR INABILITY TO USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A
|
||||
BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH
|
||||
THROUGH GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY
|
||||
OTHER LIABILITY THAT CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW.
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
import org.elasticsearch.gradle.MavenFilteringHack
|
||||
import org.elasticsearch.gradle.test.NodeInfo
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
|
||||
group 'org.elasticsearch.plugin'
|
||||
|
@ -27,12 +26,9 @@ licenseHeaders {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
// license deps
|
||||
compile project(':x-plugins:elasticsearch:license:base')
|
||||
testCompile project(':x-plugins:elasticsearch:license:licensor')
|
||||
|
||||
// security deps
|
||||
compile project(path: ':modules:transport-netty3', configuration: 'runtime')
|
||||
compile project(path: ':modules:transport-netty4', configuration: 'runtime')
|
||||
compile 'dk.brics.automaton:automaton:1.11-8'
|
||||
compile 'com.unboundid:unboundid-ldapsdk:3.1.1'
|
||||
compile 'org.bouncycastle:bcprov-jdk15on:1.54'
|
||||
|
@ -78,6 +74,11 @@ for (String module : ['', 'license-plugin/', 'security/', 'watcher/', 'monitorin
|
|||
}
|
||||
}
|
||||
|
||||
// make LicenseSigner available for testing signed licenses
|
||||
sourceSets.test.java {
|
||||
srcDir '../license-tools/src/main/java'
|
||||
}
|
||||
|
||||
compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
|
||||
compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
|
||||
|
||||
|
@ -117,8 +118,10 @@ forbiddenPatterns {
|
|||
|
||||
// TODO: standardize packaging config for plugins
|
||||
bundlePlugin {
|
||||
from(projectDir) {
|
||||
from(project(':x-plugins').projectDir) {
|
||||
include 'LICENSE.txt'
|
||||
}
|
||||
from(projectDir) {
|
||||
include 'NOTICE.txt'
|
||||
}
|
||||
from('bin/x-pack') {
|
||||
|
@ -236,29 +239,3 @@ thirdPartyAudit.excludes = [
|
|||
'javax.activation.URLDataSource',
|
||||
'javax.activation.UnsupportedDataTypeException'
|
||||
]
|
||||
|
||||
modifyPom { MavenPom pom ->
|
||||
pom.withXml { XmlProvider xml ->
|
||||
// first find if we have dependencies at all, and grab the node
|
||||
NodeList depsNodes = xml.asNode().get('dependencies')
|
||||
if (depsNodes.isEmpty()) {
|
||||
return
|
||||
}
|
||||
|
||||
// find the 'base' dependency and replace it with the correct name because the project name is
|
||||
// always used even when the pom of the other project is correct
|
||||
Iterator<Node> childNodeIter = depsNodes.get(0).children().iterator()
|
||||
while (childNodeIter.hasNext()) {
|
||||
Node depNode = childNodeIter.next()
|
||||
String groupId = depNode.get('groupId').get(0).text()
|
||||
Node artifactIdNode = depNode.get('artifactId').get(0)
|
||||
String artifactId = artifactIdNode.text()
|
||||
String scope = depNode.get("scope").get(0).text()
|
||||
if (groupId.equals('org.elasticsearch') && artifactId.equals('base')) {
|
||||
artifactIdNode.replaceNode(new Node(null, 'artifactId', 'license-core'))
|
||||
} else if ('test'.equals(scope)) {
|
||||
childNodeIter.remove()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,16 +5,15 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.graph;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.xpack.XPackFeatureSet;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -24,10 +23,9 @@ public class GraphFeatureSet implements XPackFeatureSet {
|
|||
private final XPackLicenseState licenseState;
|
||||
|
||||
@Inject
|
||||
public GraphFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
public GraphFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState) {
|
||||
this.enabled = Graph.enabled(settings);
|
||||
this.licenseState = licenseState;
|
||||
namedWriteableRegistry.register(Usage.class, Usage.writeableName(Graph.NAME), Usage::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -55,7 +53,7 @@ public class GraphFeatureSet implements XPackFeatureSet {
|
|||
return new Usage(available(), enabled());
|
||||
}
|
||||
|
||||
static class Usage extends XPackFeatureSet.Usage {
|
||||
public static class Usage extends XPackFeatureSet.Usage {
|
||||
|
||||
public Usage(StreamInput input) throws IOException {
|
||||
super(input);
|
||||
|
|
|
@ -16,7 +16,6 @@ import java.util.Map;
|
|||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -27,23 +26,24 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.support.RestActions;
|
||||
import org.elasticsearch.rest.action.support.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.XPackClient;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreRequest;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreResponse;
|
||||
import org.elasticsearch.xpack.graph.action.Hop;
|
||||
import org.elasticsearch.xpack.graph.action.VertexRequest;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.xpack.rest.XPackRestHandler;
|
||||
|
||||
|
||||
/**
|
||||
* @see GraphExploreRequest
|
||||
*/
|
||||
public class RestGraphAction extends BaseRestHandler {
|
||||
public class RestGraphAction extends XPackRestHandler {
|
||||
|
||||
private IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
public static final ParseField TIMEOUT_FIELD = new ParseField("timeout");
|
||||
|
@ -68,21 +68,23 @@ public class RestGraphAction extends BaseRestHandler {
|
|||
@Inject
|
||||
public RestGraphAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry) {
|
||||
super(settings);
|
||||
// @deprecated TODO need to add deprecation support as per https://github.com/elastic/x-plugins/issues/1760#issuecomment-217507517
|
||||
controller.registerHandler(GET, "/{index}/_graph/explore", this);
|
||||
controller.registerHandler(POST, "/{index}/_graph/explore", this);
|
||||
controller.registerHandler(GET, "/{index}/{type}/_graph/explore", this);
|
||||
controller.registerHandler(POST, "/{index}/{type}/_graph/explore", this);
|
||||
// new REST endpoint
|
||||
controller.registerHandler(GET, "/{index}/_xpack/graph/_explore", this);
|
||||
controller.registerHandler(POST, "/{index}/_xpack/graph/_explore", this);
|
||||
controller.registerHandler(GET, "/{index}/{type}/_xpack/graph/_explore", this);
|
||||
controller.registerHandler(POST, "/{index}/{type}/_xpack/graph/_explore", this);
|
||||
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
|
||||
// @deprecated Remove in 6.0
|
||||
// NOTE: Old versions did not end with "/_explore"; they were just "/explore"
|
||||
controller.registerWithDeprecatedHandler(GET, "/{index}" + URI_BASE + "/_graph/_explore", this,
|
||||
GET, "/{index}/_graph/explore", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(POST, "/{index}" + URI_BASE + "/_graph/_explore", this,
|
||||
POST, "/{index}/_graph/explore", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(GET, "/{index}/{type}" + URI_BASE + "/_graph/_explore", this,
|
||||
GET, "/{index}/{type}/_graph/explore", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(POST, "/{index}/{type}" + URI_BASE + "/_graph/_explore", this,
|
||||
POST, "/{index}/{type}/_graph/explore", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) throws IOException {
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final XPackClient client) throws IOException {
|
||||
GraphExploreRequest graphRequest = new GraphExploreRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
graphRequest.indicesOptions(IndicesOptions.fromRequest(request, graphRequest.indicesOptions()));
|
||||
graphRequest.routing(request.param("routing"));
|
||||
|
@ -109,7 +111,7 @@ public class RestGraphAction extends BaseRestHandler {
|
|||
}
|
||||
|
||||
graphRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
|
||||
client.execute(INSTANCE, graphRequest, new RestToXContentListener<GraphExploreResponse>(channel));
|
||||
client.es().execute(INSTANCE, graphRequest, new RestToXContentListener<GraphExploreResponse>(channel));
|
||||
}
|
||||
|
||||
private void parseHop(XContentParser parser, QueryParseContext context, Hop currentHop,
|
||||
|
|
|
@ -5,17 +5,13 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.graph;
|
||||
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
|
@ -24,21 +20,14 @@ import static org.mockito.Mockito.when;
|
|||
public class GraphFeatureSetTests extends ESTestCase {
|
||||
|
||||
private XPackLicenseState licenseState;
|
||||
private NamedWriteableRegistry namedWriteableRegistry;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
licenseState = mock(XPackLicenseState.class);
|
||||
namedWriteableRegistry = mock(NamedWriteableRegistry.class);
|
||||
}
|
||||
|
||||
public void testWritableRegistration() throws Exception {
|
||||
new GraphFeatureSet(Settings.EMPTY, licenseState, namedWriteableRegistry);
|
||||
verify(namedWriteableRegistry).register(eq(GraphFeatureSet.Usage.class), eq("xpack.usage.graph"), anyObject());
|
||||
}
|
||||
|
||||
public void testAvailable() throws Exception {
|
||||
GraphFeatureSet featureSet = new GraphFeatureSet(Settings.EMPTY, licenseState, namedWriteableRegistry);
|
||||
GraphFeatureSet featureSet = new GraphFeatureSet(Settings.EMPTY, licenseState);
|
||||
boolean available = randomBoolean();
|
||||
when(licenseState.isGraphAllowed()).thenReturn(available);
|
||||
assertThat(featureSet.available(), is(available));
|
||||
|
@ -54,7 +43,7 @@ public class GraphFeatureSetTests extends ESTestCase {
|
|||
} else {
|
||||
settings.put("xpack.graph.enabled", enabled);
|
||||
}
|
||||
GraphFeatureSet featureSet = new GraphFeatureSet(settings.build(), licenseState, namedWriteableRegistry);
|
||||
GraphFeatureSet featureSet = new GraphFeatureSet(settings.build(), licenseState);
|
||||
assertThat(featureSet.enabled(), is(enabled));
|
||||
}
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
"documentation": "https://www.elastic.co/guide/en/graph/current/explore.html",
|
||||
"methods": ["GET", "POST"],
|
||||
"url": {
|
||||
"path": "/{index}/_xpack/graph/_explore",
|
||||
"paths": ["/{index}/_xpack/graph/_explore", "/{index}/{type}/_xpack/graph/_explore"],
|
||||
"path": "/{index}/_xpack/_graph/_explore",
|
||||
"paths": ["/{index}/_xpack/_graph/_explore", "/{index}/{type}/_xpack/_graph/_explore"],
|
||||
"parts" : {
|
||||
"index": {
|
||||
"type" : "list",
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.license;
|
|||
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
|
||||
import java.util.UUID;
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.license;
|
|||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticsearchException;
|
|
@ -25,9 +25,6 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.core.LicenseVerifier;
|
||||
import org.elasticsearch.license.core.OperationModeFileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
|
@ -5,29 +5,15 @@
|
|||
*/
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.license.core.CryptUtils.decrypt;
|
||||
import static org.elasticsearch.license.core.CryptUtils.encrypt;
|
||||
|
||||
/**
|
||||
* Contains metadata about registered licenses
|
||||
|
@ -101,34 +87,13 @@ class LicensesMetaData extends AbstractDiffable<MetaData.Custom> implements Meta
|
|||
|
||||
@Override
|
||||
public LicensesMetaData fromXContent(XContentParser parser) throws IOException {
|
||||
List<License> pre20Licenses = new ArrayList<>(1);
|
||||
License license = LICENSE_TOMBSTONE;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
String fieldName = parser.currentName();
|
||||
if (fieldName != null) {
|
||||
// for back compat with 1.x license metadata
|
||||
if (fieldName.equals(Fields.TRIAL_LICENSES) || fieldName.equals(Fields.SIGNED_LICENCES)) {
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
if (parser.currentToken().isValue()) {
|
||||
// trial license
|
||||
byte[] data = decrypt(Base64.getDecoder().decode(parser.text()));
|
||||
try (XContentParser trialLicenseParser =
|
||||
XContentFactory.xContent(XContentType.JSON).createParser(data)) {
|
||||
trialLicenseParser.nextToken();
|
||||
License pre20TrialLicense = License.fromXContent(trialLicenseParser);
|
||||
pre20Licenses.add(TrialLicense.create(License.builder().fromPre20LicenseSpec(pre20TrialLicense)));
|
||||
}
|
||||
} else {
|
||||
// signed license
|
||||
pre20Licenses.add(License.fromXContent(parser));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (fieldName.equals(Fields.LICENSE)) {
|
||||
if (fieldName.equals(Fields.LICENSE)) {
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
license = License.fromXContent(parser);
|
||||
|
@ -139,23 +104,6 @@ class LicensesMetaData extends AbstractDiffable<MetaData.Custom> implements Meta
|
|||
}
|
||||
}
|
||||
}
|
||||
// when we see old license metadata,
|
||||
// we try to choose the license that has the latest issue date that is not expired
|
||||
if (!pre20Licenses.isEmpty()) {
|
||||
// take the best unexpired license
|
||||
CollectionUtil.timSort(pre20Licenses, License.LATEST_ISSUE_DATE_FIRST);
|
||||
long now = System.currentTimeMillis();
|
||||
for (License oldLicense : pre20Licenses) {
|
||||
if (oldLicense.expiryDate() > now) {
|
||||
license = oldLicense;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// take the best expired license
|
||||
if (license == LICENSE_TOMBSTONE && !pre20Licenses.isEmpty()) {
|
||||
license = pre20Licenses.get(0);
|
||||
}
|
||||
}
|
||||
return new LicensesMetaData(license);
|
||||
}
|
||||
|
||||
|
@ -173,75 +121,24 @@ class LicensesMetaData extends AbstractDiffable<MetaData.Custom> implements Meta
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput streamOutput) throws IOException {
|
||||
if (streamOutput.getVersion().before(Version.V_2_0_0)) {
|
||||
if (license == LICENSE_TOMBSTONE) {
|
||||
streamOutput.writeVInt(0); // no signed license
|
||||
streamOutput.writeVInt(0); // no trial license
|
||||
} else if (!License.isAutoGeneratedLicense(license.signature())) {
|
||||
streamOutput.writeVInt(1); // one signed license
|
||||
license.writeTo(streamOutput);
|
||||
streamOutput.writeVInt(0); // no trial license
|
||||
} else {
|
||||
streamOutput.writeVInt(0); // no signed license
|
||||
streamOutput.writeVInt(1); // one trial license
|
||||
XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
license.toXContent(contentBuilder,
|
||||
new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
streamOutput.writeString(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
}
|
||||
if (license == LICENSE_TOMBSTONE) {
|
||||
streamOutput.writeBoolean(false); // no license
|
||||
} else {
|
||||
if (license == LICENSE_TOMBSTONE) {
|
||||
streamOutput.writeBoolean(false); // no license
|
||||
} else {
|
||||
streamOutput.writeBoolean(true); // has a license
|
||||
license.writeTo(streamOutput);
|
||||
}
|
||||
streamOutput.writeBoolean(true); // has a license
|
||||
license.writeTo(streamOutput);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public LicensesMetaData readFrom(StreamInput streamInput) throws IOException {
|
||||
License license = LICENSE_TOMBSTONE;
|
||||
if (streamInput.getVersion().before(Version.V_2_0_0)) {
|
||||
int size = streamInput.readVInt();
|
||||
List<License> licenses = new ArrayList<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
licenses.add(License.readLicense(streamInput));
|
||||
}
|
||||
int numTrialLicenses = streamInput.readVInt();
|
||||
for (int i = 0; i < numTrialLicenses; i++) {
|
||||
byte[] data = decrypt(Base64.getDecoder().decode(streamInput.readString()));
|
||||
try (XContentParser trialLicenseParser = XContentFactory.xContent(XContentType.JSON).createParser(data)) {
|
||||
trialLicenseParser.nextToken();
|
||||
License pre20TrialLicense = License.fromXContent(trialLicenseParser);
|
||||
licenses.add(TrialLicense.create(License.builder().fromPre20LicenseSpec(pre20TrialLicense)));
|
||||
}
|
||||
}
|
||||
// when we see read licenses from old pre v2.0,
|
||||
// we try to choose the license that has the latest issue date that is not expired
|
||||
CollectionUtil.timSort(licenses, License.LATEST_ISSUE_DATE_FIRST);
|
||||
long now = System.currentTimeMillis();
|
||||
for (License oldLicense : licenses) {
|
||||
if (oldLicense.expiryDate() > now) {
|
||||
license = oldLicense;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// take the best expired license
|
||||
if (license == LICENSE_TOMBSTONE && !licenses.isEmpty()) {
|
||||
license = licenses.get(0);
|
||||
}
|
||||
} else {
|
||||
if (streamInput.readBoolean()) {
|
||||
license = License.readLicense(streamInput);
|
||||
}
|
||||
if (streamInput.readBoolean()) {
|
||||
license = License.readLicense(streamInput);
|
||||
}
|
||||
return new LicensesMetaData(license);
|
||||
}
|
||||
|
||||
private static final class Fields {
|
||||
private static final String SIGNED_LICENCES = "signed_licenses";
|
||||
private static final String TRIAL_LICENSES = "trial_licenses";
|
||||
private static final String LICENSE = "license";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,10 +45,6 @@ public class Licensing implements ActionPlugin {
|
|||
isTribeNode = isTribeNode(settings);
|
||||
}
|
||||
|
||||
public Collection<Module> nodeModules() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ActionHandler<? extends ActionRequest<?>, ? extends ActionResponse>> getActions() {
|
||||
if (isTribeNode) {
|
||||
|
@ -69,14 +65,6 @@ public class Licensing implements ActionPlugin {
|
|||
RestDeleteLicenseAction.class);
|
||||
}
|
||||
|
||||
public Collection<Object> createComponents(ClusterService clusterService, Clock clock, Environment environment,
|
||||
ResourceWatcherService resourceWatcherService,
|
||||
XPackLicenseState licenseState) {
|
||||
LicenseService licenseService = new LicenseService(settings, clusterService, clock,
|
||||
environment, resourceWatcherService, licenseState);
|
||||
return Arrays.asList(licenseService, licenseState);
|
||||
}
|
||||
|
||||
public List<Setting<?>> getSettings() {
|
||||
// TODO convert this wildcard to a real setting
|
||||
return Collections.singletonList(Setting.groupSetting("license.", Setting.Property.NodeScope));
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.license;
|
|||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
/**
|
||||
*
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.license.core.License.OperationMode;
|
||||
import org.elasticsearch.license.License.OperationMode;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
@ -26,7 +26,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
* In case of failure to read a valid operation mode from <code>licenseModePath</code>,
|
||||
* the operation mode will default to PLATINUM
|
||||
*/
|
||||
public final class OperationModeFileWatcher extends FileChangesListener {
|
||||
public final class OperationModeFileWatcher implements FileChangesListener {
|
||||
private final ResourceWatcherService resourceWatcherService;
|
||||
private final Path licenseModePath;
|
||||
private final AtomicBoolean initialized = new AtomicBoolean();
|
|
@ -10,7 +10,6 @@ import org.elasticsearch.action.ValidateActions;
|
|||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -29,7 +28,7 @@ public class PutLicenseRequest extends AcknowledgedRequest<PutLicenseRequest> {
|
|||
}
|
||||
|
||||
/**
|
||||
* Parses license from json format to an instance of {@link org.elasticsearch.license.core.License}
|
||||
* Parses license from json format to an instance of {@link License}
|
||||
*
|
||||
* @param licenseDefinition licenses definition
|
||||
*/
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.license;
|
|||
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
/**
|
||||
* Register license request builder
|
||||
|
|
|
@ -9,7 +9,6 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
|
|
@ -11,15 +11,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.license.core.CryptUtils.decrypt;
|
||||
import static org.elasticsearch.license.core.CryptUtils.encrypt;
|
||||
import static org.elasticsearch.license.CryptUtils.decrypt;
|
||||
import static org.elasticsearch.license.CryptUtils.encrypt;
|
||||
|
||||
class TrialLicense {
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
/**
|
||||
* Licensing for xpack.
|
||||
*
|
||||
* A {@link org.elasticsearch.license.core.License} is a signed set of json properties that determine what features
|
||||
* A {@link org.elasticsearch.license.License} is a signed set of json properties that determine what features
|
||||
* are available in a running cluster. Licenses are registered through a
|
||||
* {@link org.elasticsearch.license.PutLicenseRequest}. This action is handled by the master node, which places
|
||||
* the signed license into the cluster state. Each node listens for cluster state updates via the
|
||||
|
|
|
@ -17,7 +17,6 @@ import org.elasticsearch.common.component.Lifecycle;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.support.clock.ClockMock;
|
||||
|
|
|
@ -11,8 +11,6 @@ import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
|||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.LicensesMetaData;
|
||||
import org.elasticsearch.xpack.monitoring.Monitoring;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
|
||||
|
|
|
@ -15,7 +15,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.license.core.License.OperationMode;
|
||||
import static org.elasticsearch.license.License.OperationMode;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
|
@ -12,7 +12,6 @@ import org.junit.Before;
|
|||
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.elasticsearch.license.core.OperationModeFileWatcherTests.writeMode;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.eq;
|
||||
|
@ -47,7 +46,7 @@ public class LicenseOperationModeUpdateTests extends ESTestCase {
|
|||
.build();
|
||||
|
||||
assertThat(license.operationMode(), equalTo(License.OperationMode.resolve(type)));
|
||||
writeMode("gold", licenseModeFile);
|
||||
OperationModeFileWatcherTests.writeMode("gold", licenseModeFile);
|
||||
license.setOperationModeFileWatcher(operationModeFileWatcher);
|
||||
verifyZeroInteractions(resourceWatcherService);
|
||||
assertThat(license.operationMode(), equalTo(License.OperationMode.resolve(type)));
|
||||
|
@ -65,7 +64,7 @@ public class LicenseOperationModeUpdateTests extends ESTestCase {
|
|||
.build();
|
||||
|
||||
assertThat(license.operationMode(), equalTo(License.OperationMode.PLATINUM));
|
||||
writeMode("gold", licenseModeFile);
|
||||
OperationModeFileWatcherTests.writeMode("gold", licenseModeFile);
|
||||
license.setOperationModeFileWatcher(operationModeFileWatcher);
|
||||
verify(resourceWatcherService, times(1)).add(any(FileWatcher.class), eq(ResourceWatcherService.Frequency.HIGH));
|
||||
assertThat(license.operationMode(), equalTo(License.OperationMode.GOLD));
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
import org.junit.Before;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
|
@ -9,10 +9,10 @@ import org.elasticsearch.common.network.NetworkModule;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.xpack.MockNetty3Plugin;
|
||||
import org.elasticsearch.xpack.MockNetty4Plugin;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
@ -49,7 +49,7 @@ public class LicenseServiceClusterTests extends AbstractLicensesIntegrationTestC
|
|||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Arrays.asList(XPackPlugin.class, MockNetty3Plugin.class);
|
||||
return Arrays.asList(XPackPlugin.class, MockNetty3Plugin.class, MockNetty4Plugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.license;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import static org.elasticsearch.license.TestUtils.generateSignedLicense;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
|
|
@ -15,7 +15,6 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
|
|||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
|
|
|
@ -5,13 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -20,14 +17,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.elasticsearch.license.core.CryptUtils.encrypt;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -90,120 +84,6 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(trialLicense));
|
||||
}
|
||||
|
||||
public void test1xLicensesMetaDataFromXContent() throws Exception {
|
||||
License signedLicense = TestUtils.generateSignedLicense(TimeValue.timeValueHours(2));
|
||||
long issueDate = signedLicense.issueDate() - TimeValue.timeValueMillis(200).getMillis();
|
||||
License.Builder specBuilder = License.builder()
|
||||
.uid(UUID.randomUUID().toString())
|
||||
.issuedTo("customer")
|
||||
.maxNodes(5)
|
||||
.issueDate(issueDate)
|
||||
.expiryDate(issueDate + TimeValue.timeValueHours(2).getMillis());
|
||||
final License trialLicense = TrialLicense.create(specBuilder);
|
||||
// trial license
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
builder.startObject("licenses");
|
||||
builder.startArray("trial_licenses");
|
||||
XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
trialLicense.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
builder.value(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
builder.endArray();
|
||||
builder.startArray("signed_licenses");
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
LicensesMetaData licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(trialLicense));
|
||||
|
||||
// signed license
|
||||
builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
builder.startObject("licenses");
|
||||
builder.startArray("trial_licenses");
|
||||
builder.endArray();
|
||||
builder.startArray("signed_licenses");
|
||||
signedLicense.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(signedLicense));
|
||||
|
||||
// trial and signed license
|
||||
builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
builder.startObject("licenses");
|
||||
builder.startArray("trial_licenses");
|
||||
contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
trialLicense.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
builder.value(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
builder.endArray();
|
||||
builder.startArray("signed_licenses");
|
||||
signedLicense.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(signedLicense));
|
||||
|
||||
// license with later issue date is selected
|
||||
long laterIssueDate = trialLicense.issueDate() + TimeValue.timeValueHours(2).getMillis();
|
||||
License signedLicenseIssuedLater = TestUtils.generateSignedLicense(laterIssueDate, TimeValue.timeValueHours(2));
|
||||
builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
builder.startObject("licenses");
|
||||
builder.startArray("trial_licenses");
|
||||
contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
trialLicense.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
builder.value(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
builder.endArray();
|
||||
builder.startArray("signed_licenses");
|
||||
signedLicense.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
signedLicenseIssuedLater.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(signedLicenseIssuedLater));
|
||||
|
||||
}
|
||||
|
||||
public void test1xLicensesMetaDataFromStream() throws Exception {
|
||||
long issueDate = System.currentTimeMillis();
|
||||
License.Builder specBuilder = License.builder()
|
||||
.uid(UUID.randomUUID().toString())
|
||||
.issuedTo("customer")
|
||||
.maxNodes(5)
|
||||
.issueDate(issueDate)
|
||||
.expiryDate(issueDate + TimeValue.timeValueHours(1).getMillis());
|
||||
final License trialLicense = TrialLicense.create(specBuilder);
|
||||
// trial license
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.writeVInt(0);
|
||||
output.writeVInt(1);
|
||||
XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
trialLicense.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
output.writeString(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
try (StreamInput input = output.bytes().streamInput()) {
|
||||
input.setVersion(Version.V_2_0_0_beta1);
|
||||
LicensesMetaData licensesMetaData = LicensesMetaData.PROTO.readFrom(input);
|
||||
assertThat(licensesMetaData.getLicense(), equalTo(trialLicense));
|
||||
}
|
||||
|
||||
// signed license
|
||||
License signedLicense = TestUtils.generateSignedLicense(TimeValue.timeValueHours(2));
|
||||
output = new BytesStreamOutput();
|
||||
output.writeVInt(1);
|
||||
signedLicense.writeTo(output);
|
||||
output.writeVInt(0);
|
||||
try (StreamInput input = output.bytes().streamInput()) {
|
||||
input.setVersion(Version.V_2_0_0_beta1);
|
||||
LicensesMetaData licensesMetaData = LicensesMetaData.PROTO.readFrom(input);
|
||||
assertThat(licensesMetaData.getLicense(), equalTo(signedLicense));
|
||||
}
|
||||
}
|
||||
|
||||
public void testLicenseTombstoneFromXContext() throws Exception {
|
||||
final XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject("licenses");
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.license;
|
|||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.xpack.monitoring.Monitoring;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.RandomizedTest;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
|
@ -15,10 +16,12 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.licensor.LicenseSigner;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.junit.Assert;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -27,6 +30,9 @@ import java.util.concurrent.Callable;
|
|||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.ESTestCase.assertNotNull;
|
||||
import static org.elasticsearch.test.ESTestCase.awaitBusy;
|
||||
import static org.elasticsearch.test.ESTestCase.randomAsciiOfLength;
|
||||
|
@ -39,6 +45,16 @@ public class TestUtils {
|
|||
|
||||
private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private static final DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer();
|
||||
|
||||
public static String dateMathString(String time, final long now) {
|
||||
return dateTimeFormatter.print(dateMathParser.parse(time, new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return now;
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
public static long dateMath(String time, final long now) {
|
||||
return dateMathParser.parse(time, new Callable<Long>() {
|
||||
|
@ -48,6 +64,159 @@ public class TestUtils {
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static LicenseSpec generateRandomLicenseSpec(int version) {
|
||||
boolean datesInMillis = randomBoolean();
|
||||
long now = System.currentTimeMillis();
|
||||
String uid = UUID.randomUUID().toString();
|
||||
String feature = "feature__" + randomInt();
|
||||
String issuer = "issuer__" + randomInt();
|
||||
String issuedTo = "issuedTo__" + randomInt();
|
||||
final String type;
|
||||
final String subscriptionType;
|
||||
if (version < License.VERSION_NO_FEATURE_TYPE) {
|
||||
subscriptionType = randomFrom("gold", "silver", "platinum");
|
||||
type = "subscription";//randomFrom("subscription", "internal", "development");
|
||||
} else {
|
||||
subscriptionType = null;
|
||||
type = randomFrom("basic", "dev", "gold", "silver", "platinum");
|
||||
}
|
||||
int maxNodes = RandomizedTest.randomIntBetween(5, 100);
|
||||
if (datesInMillis) {
|
||||
long issueDateInMillis = dateMath("now", now);
|
||||
long expiryDateInMillis = dateMath("now+10d/d", now);
|
||||
return new LicenseSpec(version, uid, feature, issueDateInMillis, expiryDateInMillis, type, subscriptionType, issuedTo, issuer,
|
||||
maxNodes);
|
||||
} else {
|
||||
String issueDate = dateMathString("now", now);
|
||||
String expiryDate = dateMathString("now+10d/d", now);
|
||||
return new LicenseSpec(version, uid, feature, issueDate, expiryDate, type, subscriptionType, issuedTo, issuer, maxNodes);
|
||||
}
|
||||
}
|
||||
|
||||
public static String generateLicenseSpecString(LicenseSpec licenseSpec) throws IOException {
|
||||
XContentBuilder licenses = jsonBuilder();
|
||||
licenses.startObject();
|
||||
licenses.startArray("licenses");
|
||||
licenses.startObject()
|
||||
.field("uid", licenseSpec.uid)
|
||||
.field("type", licenseSpec.type)
|
||||
.field("subscription_type", licenseSpec.subscriptionType)
|
||||
.field("issued_to", licenseSpec.issuedTo)
|
||||
.field("issuer", licenseSpec.issuer)
|
||||
.field("feature", licenseSpec.feature)
|
||||
.field("max_nodes", licenseSpec.maxNodes);
|
||||
|
||||
if (licenseSpec.issueDate != null) {
|
||||
licenses.field("issue_date", licenseSpec.issueDate);
|
||||
} else {
|
||||
licenses.field("issue_date_in_millis", licenseSpec.issueDateInMillis);
|
||||
}
|
||||
if (licenseSpec.expiryDate != null) {
|
||||
licenses.field("expiry_date", licenseSpec.expiryDate);
|
||||
} else {
|
||||
licenses.field("expiry_date_in_millis", licenseSpec.expiryDateInMillis);
|
||||
}
|
||||
licenses.field("version", licenseSpec.version);
|
||||
licenses.endObject();
|
||||
licenses.endArray();
|
||||
licenses.endObject();
|
||||
return licenses.string();
|
||||
}
|
||||
|
||||
public static License generateLicenses(LicenseSpec spec) {
|
||||
License.Builder builder = License.builder()
|
||||
.uid(spec.uid)
|
||||
.feature(spec.feature)
|
||||
.type(spec.type)
|
||||
.subscriptionType(spec.subscriptionType)
|
||||
.issuedTo(spec.issuedTo)
|
||||
.issuer(spec.issuer)
|
||||
.maxNodes(spec.maxNodes);
|
||||
|
||||
if (spec.expiryDate != null) {
|
||||
builder.expiryDate(DateUtils.endOfTheDay(spec.expiryDate));
|
||||
} else {
|
||||
builder.expiryDate(spec.expiryDateInMillis);
|
||||
}
|
||||
if (spec.issueDate != null) {
|
||||
builder.issueDate(DateUtils.beginningOfTheDay(spec.issueDate));
|
||||
} else {
|
||||
builder.issueDate(spec.issueDateInMillis);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static void assertLicenseSpec(LicenseSpec spec, License license) {
|
||||
MatcherAssert.assertThat(license.uid(), equalTo(spec.uid));
|
||||
MatcherAssert.assertThat(license.issuedTo(), equalTo(spec.issuedTo));
|
||||
MatcherAssert.assertThat(license.issuer(), equalTo(spec.issuer));
|
||||
MatcherAssert.assertThat(license.type(), equalTo(spec.type));
|
||||
MatcherAssert.assertThat(license.maxNodes(), equalTo(spec.maxNodes));
|
||||
if (spec.issueDate != null) {
|
||||
MatcherAssert.assertThat(license.issueDate(), equalTo(DateUtils.beginningOfTheDay(spec.issueDate)));
|
||||
} else {
|
||||
MatcherAssert.assertThat(license.issueDate(), equalTo(spec.issueDateInMillis));
|
||||
}
|
||||
if (spec.expiryDate != null) {
|
||||
MatcherAssert.assertThat(license.expiryDate(), equalTo(DateUtils.endOfTheDay(spec.expiryDate)));
|
||||
} else {
|
||||
MatcherAssert.assertThat(license.expiryDate(), equalTo(spec.expiryDateInMillis));
|
||||
}
|
||||
}
|
||||
|
||||
public static class LicenseSpec {
|
||||
public final int version;
|
||||
public final String feature;
|
||||
public final String issueDate;
|
||||
public final long issueDateInMillis;
|
||||
public final String expiryDate;
|
||||
public final long expiryDateInMillis;
|
||||
public final String uid;
|
||||
public final String type;
|
||||
public final String subscriptionType;
|
||||
public final String issuedTo;
|
||||
public final String issuer;
|
||||
public final int maxNodes;
|
||||
|
||||
public LicenseSpec(String issueDate, String expiryDate) {
|
||||
this(License.VERSION_CURRENT, UUID.randomUUID().toString(), "feature", issueDate, expiryDate, "trial", "none", "customer",
|
||||
"elasticsearch", 5);
|
||||
}
|
||||
|
||||
public LicenseSpec(int version, String uid, String feature, long issueDateInMillis, long expiryDateInMillis, String type,
|
||||
String subscriptionType, String issuedTo, String issuer, int maxNodes) {
|
||||
this.version = version;
|
||||
this.feature = feature;
|
||||
this.issueDateInMillis = issueDateInMillis;
|
||||
this.issueDate = null;
|
||||
this.expiryDateInMillis = expiryDateInMillis;
|
||||
this.expiryDate = null;
|
||||
this.uid = uid;
|
||||
this.type = type;
|
||||
this.subscriptionType = subscriptionType;
|
||||
this.issuedTo = issuedTo;
|
||||
this.issuer = issuer;
|
||||
this.maxNodes = maxNodes;
|
||||
}
|
||||
|
||||
public LicenseSpec(int version, String uid, String feature, String issueDate, String expiryDate, String type,
|
||||
String subscriptionType, String issuedTo, String issuer, int maxNodes) {
|
||||
this.version = version;
|
||||
this.feature = feature;
|
||||
this.issueDate = issueDate;
|
||||
this.issueDateInMillis = -1;
|
||||
this.expiryDate = expiryDate;
|
||||
this.expiryDateInMillis = -1;
|
||||
this.uid = uid;
|
||||
this.type = type;
|
||||
this.subscriptionType = subscriptionType;
|
||||
this.issuedTo = issuedTo;
|
||||
this.issuer = issuer;
|
||||
this.maxNodes = maxNodes;
|
||||
}
|
||||
}
|
||||
|
||||
public static Path getTestPriKeyPath() throws Exception {
|
||||
return getResourcePath("/private.key");
|
||||
}
|
||||
|
|
|
@ -11,8 +11,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.TrialLicense;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -21,7 +19,7 @@ import java.util.Base64;
|
|||
import java.util.Collections;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.elasticsearch.license.core.CryptUtils.encrypt;
|
||||
import static org.elasticsearch.license.CryptUtils.encrypt;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
||||
|
|
|
@ -7,28 +7,46 @@ package org.elasticsearch.xpack.monitoring;
|
|||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.license.LicenseService;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction;
|
||||
import org.elasticsearch.xpack.monitoring.action.TransportMonitoringBulkAction;
|
||||
import org.elasticsearch.xpack.monitoring.agent.AgentService;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.CollectorModule;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.ExporterModule;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.Collector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterStateCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterStatsCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.indices.IndexRecoveryCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.indices.IndexStatsCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.indices.IndicesStatsCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.node.NodeStatsCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.shards.ShardsCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.Exporter;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.Exporters;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.http.HttpExporter;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.local.LocalExporter;
|
||||
import org.elasticsearch.xpack.monitoring.cleaner.CleanerService;
|
||||
import org.elasticsearch.xpack.monitoring.client.MonitoringClientModule;
|
||||
import org.elasticsearch.xpack.monitoring.rest.action.RestMonitoringBulkAction;
|
||||
import org.elasticsearch.plugins.ActionPlugin;
|
||||
import org.elasticsearch.rest.RestHandler;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonList;
|
||||
|
@ -44,12 +62,16 @@ public class Monitoring implements ActionPlugin {
|
|||
public static final String NAME = "monitoring";
|
||||
|
||||
private final Settings settings;
|
||||
private final Environment env;
|
||||
private final XPackLicenseState licenseState;
|
||||
private final boolean enabled;
|
||||
private final boolean transportClientMode;
|
||||
private final boolean tribeNode;
|
||||
|
||||
public Monitoring(Settings settings) {
|
||||
public Monitoring(Settings settings, Environment env, XPackLicenseState licenseState) {
|
||||
this.settings = settings;
|
||||
this.env = env;
|
||||
this.licenseState = licenseState;
|
||||
this.enabled = enabled(settings);
|
||||
this.transportClientMode = XPackPlugin.transportClientMode(settings);
|
||||
this.tribeNode = XPackPlugin.isTribeNode(settings);
|
||||
|
@ -65,20 +87,41 @@ public class Monitoring implements ActionPlugin {
|
|||
|
||||
public Collection<Module> nodeModules() {
|
||||
List<Module> modules = new ArrayList<>();
|
||||
modules.add(new MonitoringModule(enabled, transportClientMode));
|
||||
modules.add(new ExporterModule(settings));
|
||||
if (enabled && transportClientMode == false && tribeNode == false) {
|
||||
modules.add(new CollectorModule());
|
||||
modules.add(new MonitoringClientModule());
|
||||
}
|
||||
modules.add(b -> {
|
||||
XPackPlugin.bindFeatureSet(b, MonitoringFeatureSet.class);
|
||||
if (transportClientMode || enabled == false || tribeNode) {
|
||||
b.bind(Exporters.class).toProvider(Providers.of(null));
|
||||
}
|
||||
});
|
||||
return modules;
|
||||
}
|
||||
|
||||
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
|
||||
if (enabled == false || transportClientMode || tribeNode) {
|
||||
public Collection<Object> createComponents(InternalClient client, ThreadPool threadPool, ClusterService clusterService,
|
||||
LicenseService licenseService) {
|
||||
if (enabled == false || tribeNode) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return Arrays.<Class<? extends LifecycleComponent>>asList(AgentService.class, CleanerService.class);
|
||||
|
||||
final ClusterSettings clusterSettings = clusterService.getClusterSettings();
|
||||
final MonitoringSettings monitoringSettings = new MonitoringSettings(settings, clusterSettings);
|
||||
final CleanerService cleanerService = new CleanerService(settings, clusterSettings, threadPool, licenseState);
|
||||
|
||||
Map<String, Exporter.Factory> exporterFactories = new HashMap<>();
|
||||
exporterFactories.put(HttpExporter.TYPE, config -> new HttpExporter(config, env));
|
||||
exporterFactories.put(LocalExporter.TYPE, config -> new LocalExporter(config, client, clusterService, cleanerService));
|
||||
final Exporters exporters = new Exporters(settings, exporterFactories, clusterService);
|
||||
|
||||
Set<Collector> collectors = new HashSet<>();
|
||||
collectors.add(new IndicesStatsCollector(settings, clusterService, monitoringSettings, licenseState, client));
|
||||
collectors.add(new IndexStatsCollector(settings, clusterService, monitoringSettings, licenseState, client));
|
||||
collectors.add(new ClusterStatsCollector(settings, clusterService, monitoringSettings, licenseState, client, licenseService));
|
||||
collectors.add(new ClusterStateCollector(settings, clusterService, monitoringSettings, licenseState, client));
|
||||
collectors.add(new ShardsCollector(settings, clusterService, monitoringSettings, licenseState));
|
||||
collectors.add(new NodeStatsCollector(settings, clusterService, monitoringSettings, licenseState, client));
|
||||
collectors.add(new IndexRecoveryCollector(settings, clusterService, monitoringSettings, licenseState, client));
|
||||
final AgentService agentService = new AgentService(settings, clusterSettings, collectors, exporters);
|
||||
|
||||
return Arrays.asList(agentService, monitoringSettings, exporters, cleanerService);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,9 +5,12 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -17,10 +20,6 @@ import org.elasticsearch.xpack.XPackFeatureSet;
|
|||
import org.elasticsearch.xpack.monitoring.agent.exporter.Exporter;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.Exporters;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -31,12 +30,10 @@ public class MonitoringFeatureSet implements XPackFeatureSet {
|
|||
private final Exporters exporters;
|
||||
|
||||
@Inject
|
||||
public MonitoringFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState, @Nullable Exporters exporters,
|
||||
NamedWriteableRegistry namedWriteableRegistry) {
|
||||
public MonitoringFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState, @Nullable Exporters exporters) {
|
||||
this.enabled = MonitoringSettings.ENABLED.get(settings);
|
||||
this.licenseState = licenseState;
|
||||
this.exporters = exporters;
|
||||
namedWriteableRegistry.register(Usage.class, Usage.writeableName(Monitoring.NAME), Usage::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -71,7 +68,7 @@ public class MonitoringFeatureSet implements XPackFeatureSet {
|
|||
Map<String, Object> usage = new HashMap<>();
|
||||
for (Exporter exporter : exporters) {
|
||||
if (exporter.config().enabled()) {
|
||||
String type = exporter.type();
|
||||
String type = exporter.config().type();
|
||||
int count = (Integer) usage.getOrDefault(type, 0);
|
||||
usage.put(type, count + 1);
|
||||
}
|
||||
|
@ -79,7 +76,7 @@ public class MonitoringFeatureSet implements XPackFeatureSet {
|
|||
return usage;
|
||||
}
|
||||
|
||||
static class Usage extends XPackFeatureSet.Usage {
|
||||
public static class Usage extends XPackFeatureSet.Usage {
|
||||
|
||||
private static final String ENABLED_EXPORTERS_XFIELD = "enabled_exporters";
|
||||
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.monitoring;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.monitoring.agent.AgentService;
|
||||
import org.elasticsearch.xpack.monitoring.cleaner.CleanerService;
|
||||
|
||||
public class MonitoringModule extends AbstractModule {
|
||||
|
||||
private final boolean enabled;
|
||||
private final boolean transportClientMode;
|
||||
|
||||
public MonitoringModule(boolean enabled, boolean transportClientMode) {
|
||||
this.enabled = enabled;
|
||||
this.transportClientMode = transportClientMode;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
XPackPlugin.bindFeatureSet(binder(), MonitoringFeatureSet.class);
|
||||
|
||||
if (enabled && transportClientMode == false) {
|
||||
bind(MonitoringSettings.class).asEagerSingleton();
|
||||
bind(AgentService.class).asEagerSingleton();
|
||||
bind(CleanerService.class).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -34,7 +34,6 @@ public class MonitoringSettings extends AbstractComponent {
|
|||
* The minimum amount of time allowed for the history duration.
|
||||
*/
|
||||
public static final TimeValue HISTORY_DURATION_MINIMUM = TimeValue.timeValueHours(24);
|
||||
public static final TimeValue MAX_LICENSE_GRACE_PERIOD = TimeValue.timeValueHours(7 * 24);
|
||||
|
||||
/**
|
||||
* Determines whether monitoring is enabled/disabled
|
||||
|
@ -123,7 +122,7 @@ public class MonitoringSettings extends AbstractComponent {
|
|||
* Settings/Options per configured exporter
|
||||
*/
|
||||
public static final Setting<Settings> EXPORTERS_SETTINGS =
|
||||
groupSetting(collectionKey("exporters."), Property.Dynamic, Property.NodeScope);
|
||||
groupSetting(key("exporters."), Property.Dynamic, Property.NodeScope);
|
||||
|
||||
public static List<Setting<?>> getSettings() {
|
||||
return Arrays.asList(INDICES,
|
||||
|
@ -141,7 +140,7 @@ public class MonitoringSettings extends AbstractComponent {
|
|||
}
|
||||
|
||||
public static List<String> getSettingsFilter() {
|
||||
return Arrays.asList("xpack.monitoring.collection.exporters.*.auth.*", "xpack.monitoring.collection.exporters.*.ssl.*");
|
||||
return Arrays.asList(key("exporters.*.auth.*"), key("exporters.*.ssl.*"));
|
||||
}
|
||||
|
||||
|
||||
|
@ -153,7 +152,6 @@ public class MonitoringSettings extends AbstractComponent {
|
|||
private volatile boolean recoveryActiveOnly;
|
||||
private volatile String[] indices;
|
||||
|
||||
@Inject
|
||||
public MonitoringSettings(Settings settings, ClusterSettings clusterSettings) {
|
||||
super(settings);
|
||||
|
||||
|
|
|
@ -51,7 +51,6 @@ public class AgentService extends AbstractLifecycleComponent {
|
|||
private final String[] settingsCollectors;
|
||||
private final Exporters exporters;
|
||||
|
||||
@Inject
|
||||
public AgentService(Settings settings, ClusterSettings clusterSettings, Set<Collector> collectors, Exporters exporters) {
|
||||
super(settings);
|
||||
this.samplingIntervalMillis = MonitoringSettings.INTERVAL.get(settings).millis();
|
||||
|
|
|
@ -27,7 +27,6 @@ public abstract class AbstractCollector extends AbstractLifecycleComponent imple
|
|||
protected final MonitoringSettings monitoringSettings;
|
||||
protected final XPackLicenseState licenseState;
|
||||
|
||||
@Inject
|
||||
public AbstractCollector(Settings settings, String name, ClusterService clusterService,
|
||||
MonitoringSettings monitoringSettings, XPackLicenseState licenseState) {
|
||||
super(settings);
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.collector;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterStateCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterStatsCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.indices.IndexRecoveryCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.indices.IndexStatsCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.indices.IndicesStatsCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.node.NodeStatsCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.shards.ShardsCollector;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
public class CollectorModule extends AbstractModule {
|
||||
|
||||
private final Set<Class<? extends Collector>> collectors = new HashSet<>();
|
||||
|
||||
public CollectorModule() {
|
||||
// Registers default collectors
|
||||
registerCollector(IndicesStatsCollector.class);
|
||||
registerCollector(IndexStatsCollector.class);
|
||||
registerCollector(ClusterStatsCollector.class);
|
||||
registerCollector(ClusterStateCollector.class);
|
||||
registerCollector(ShardsCollector.class);
|
||||
registerCollector(NodeStatsCollector.class);
|
||||
registerCollector(IndexRecoveryCollector.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
Multibinder<Collector> binder = Multibinder.newSetBinder(binder(), Collector.class);
|
||||
for (Class<? extends Collector> collector : collectors) {
|
||||
bind(collector).asEagerSingleton();
|
||||
binder.addBinding().to(collector);
|
||||
}
|
||||
}
|
||||
|
||||
public void registerCollector(Class<? extends Collector> collector) {
|
||||
collectors.add(collector);
|
||||
}
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.monitoring.agent.collector.cluster;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class ClusterInfoMonitoringDoc extends MonitoringDoc {
|
||||
|
|
|
@ -5,13 +5,17 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.collector.cluster;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoringSettings;
|
||||
|
@ -19,11 +23,6 @@ import org.elasticsearch.xpack.monitoring.agent.collector.AbstractCollector;
|
|||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Collector for cluster state.
|
||||
* <p>
|
||||
|
@ -36,7 +35,6 @@ public class ClusterStateCollector extends AbstractCollector {
|
|||
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public ClusterStateCollector(Settings settings, ClusterService clusterService,
|
||||
MonitoringSettings monitoringSettings, XPackLicenseState licenseState, InternalClient client) {
|
||||
super(settings, NAME, clusterService, monitoringSettings, licenseState);
|
||||
|
|
|
@ -5,13 +5,17 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.collector.cluster;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.LicenseService;
|
||||
import org.elasticsearch.license.LicenseUtils;
|
||||
|
@ -21,11 +25,6 @@ import org.elasticsearch.xpack.monitoring.agent.collector.AbstractCollector;
|
|||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Collector for cluster stats.
|
||||
* <p>
|
||||
|
@ -43,7 +42,6 @@ public class ClusterStatsCollector extends AbstractCollector {
|
|||
private final LicenseService licenseService;
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public ClusterStatsCollector(Settings settings, ClusterService clusterService,
|
||||
MonitoringSettings monitoringSettings, XPackLicenseState licenseState, InternalClient client,
|
||||
LicenseService licenseService) {
|
||||
|
|
|
@ -5,12 +5,17 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.collector.indices;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
|
@ -20,12 +25,6 @@ import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
|||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Collector for the Recovery API.
|
||||
* <p>
|
||||
|
@ -38,7 +37,6 @@ public class IndexRecoveryCollector extends AbstractCollector {
|
|||
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public IndexRecoveryCollector(Settings settings, ClusterService clusterService,
|
||||
MonitoringSettings monitoringSettings, XPackLicenseState licenseState, InternalClient client) {
|
||||
super(settings, NAME, clusterService, monitoringSettings, licenseState);
|
||||
|
|
|
@ -5,6 +5,12 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.collector.indices;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
|
@ -12,7 +18,6 @@ import org.elasticsearch.client.Client;
|
|||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
|
@ -22,12 +27,6 @@ import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
|||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Collector for indices statistics.
|
||||
* <p>
|
||||
|
@ -40,7 +39,6 @@ public class IndexStatsCollector extends AbstractCollector {
|
|||
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public IndexStatsCollector(Settings settings, ClusterService clusterService,
|
||||
MonitoringSettings monitoringSettings, XPackLicenseState licenseState, InternalClient client) {
|
||||
super(settings, NAME, clusterService, monitoringSettings, licenseState);
|
||||
|
|
|
@ -5,12 +5,15 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.collector.indices;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
|
@ -20,10 +23,6 @@ import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
|||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* Collector for indices statistics.
|
||||
* <p>
|
||||
|
@ -35,7 +34,6 @@ public class IndicesStatsCollector extends AbstractCollector {
|
|||
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public IndicesStatsCollector(Settings settings, ClusterService clusterService,
|
||||
MonitoringSettings monitoringSettings, XPackLicenseState licenseState, InternalClient client) {
|
||||
super(settings, NAME, clusterService, monitoringSettings, licenseState);
|
||||
|
|
|
@ -5,6 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.collector.node;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
|
@ -12,20 +16,14 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
|
|||
import org.elasticsearch.bootstrap.BootstrapInfo;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoringSettings;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
||||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* Collector for nodes statistics.
|
||||
* <p>
|
||||
|
@ -37,29 +35,11 @@ public class NodeStatsCollector extends AbstractCollector {
|
|||
public static final String NAME = "node-stats-collector";
|
||||
|
||||
private final Client client;
|
||||
private final NodeEnvironment nodeEnvironment;
|
||||
|
||||
private final DiskThresholdDecider diskThresholdDecider;
|
||||
|
||||
@Inject
|
||||
public NodeStatsCollector(Settings settings, ClusterService clusterService, MonitoringSettings monitoringSettings,
|
||||
XPackLicenseState licenseState, InternalClient client,
|
||||
NodeEnvironment nodeEnvironment, DiskThresholdDecider diskThresholdDecider) {
|
||||
XPackLicenseState licenseState, InternalClient client) {
|
||||
super(settings, NAME, clusterService, monitoringSettings, licenseState);
|
||||
this.client = client;
|
||||
this.nodeEnvironment = nodeEnvironment;
|
||||
this.diskThresholdDecider = diskThresholdDecider;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean shouldCollect() {
|
||||
// In some cases, the collector starts to collect nodes stats but the
|
||||
// NodeEnvironment is not fully initialized (NodePath is null) and can fail.
|
||||
// This why we need to check for nodeEnvironment.hasNodeFile() here, but only
|
||||
// for nodes that can hold data. Client nodes can collect nodes stats because
|
||||
// elasticsearch correctly handles the nodes stats for client nodes.
|
||||
return super.shouldCollect()
|
||||
&& (DiscoveryNode.nodeRequiresLocalStorage(settings) == false || nodeEnvironment.hasNodeFile());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -80,12 +60,6 @@ public class NodeStatsCollector extends AbstractCollector {
|
|||
}
|
||||
|
||||
NodeStats nodeStats = response.getNodes().get(0);
|
||||
|
||||
// Here we are calling directly the DiskThresholdDecider to retrieve the high watermark value
|
||||
// It would be nicer to use a settings API like documented in #6732
|
||||
Double diskThresholdWatermarkHigh = (diskThresholdDecider != null) ? 100.0 - diskThresholdDecider.getFreeDiskThresholdHigh() : -1;
|
||||
boolean diskThresholdDeciderEnabled = (diskThresholdDecider != null) && diskThresholdDecider.isEnabled();
|
||||
|
||||
DiscoveryNode sourceNode = localNode();
|
||||
|
||||
NodeStatsMonitoringDoc nodeStatsDoc = new NodeStatsMonitoringDoc(monitoringId(), monitoringVersion());
|
||||
|
@ -96,8 +70,6 @@ public class NodeStatsCollector extends AbstractCollector {
|
|||
nodeStatsDoc.setNodeMaster(isLocalNodeMaster());
|
||||
nodeStatsDoc.setNodeStats(nodeStats);
|
||||
nodeStatsDoc.setMlockall(BootstrapInfo.isMemoryLocked());
|
||||
nodeStatsDoc.setDiskThresholdWaterMarkHigh(diskThresholdWatermarkHigh);
|
||||
nodeStatsDoc.setDiskThresholdDeciderEnabled(diskThresholdDeciderEnabled);
|
||||
|
||||
return Collections.singletonList(nodeStatsDoc);
|
||||
}
|
||||
|
|
|
@ -13,10 +13,7 @@ public class NodeStatsMonitoringDoc extends MonitoringDoc {
|
|||
private String nodeId;
|
||||
private boolean nodeMaster;
|
||||
private NodeStats nodeStats;
|
||||
|
||||
private boolean mlockall;
|
||||
private Double diskThresholdWaterMarkHigh;
|
||||
private boolean diskThresholdDeciderEnabled;
|
||||
|
||||
public NodeStatsMonitoringDoc(String monitoringId, String monitoringVersion) {
|
||||
super(monitoringId, monitoringVersion);
|
||||
|
@ -38,14 +35,6 @@ public class NodeStatsMonitoringDoc extends MonitoringDoc {
|
|||
this.mlockall = mlockall;
|
||||
}
|
||||
|
||||
public void setDiskThresholdWaterMarkHigh(Double diskThresholdWaterMarkHigh) {
|
||||
this.diskThresholdWaterMarkHigh = diskThresholdWaterMarkHigh;
|
||||
}
|
||||
|
||||
public void setDiskThresholdDeciderEnabled(boolean diskThresholdDeciderEnabled) {
|
||||
this.diskThresholdDeciderEnabled = diskThresholdDeciderEnabled;
|
||||
}
|
||||
|
||||
public String getNodeId() {
|
||||
return nodeId;
|
||||
}
|
||||
|
@ -61,13 +50,5 @@ public class NodeStatsMonitoringDoc extends MonitoringDoc {
|
|||
public boolean isMlockall() {
|
||||
return mlockall;
|
||||
}
|
||||
|
||||
public Double getDiskThresholdWaterMarkHigh() {
|
||||
return diskThresholdWaterMarkHigh;
|
||||
}
|
||||
|
||||
public boolean isDiskThresholdDeciderEnabled() {
|
||||
return diskThresholdDeciderEnabled;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,12 +5,17 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.collector.shards;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
|
@ -18,12 +23,6 @@ import org.elasticsearch.xpack.monitoring.MonitoringSettings;
|
|||
import org.elasticsearch.xpack.monitoring.agent.collector.AbstractCollector;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Collector for shards.
|
||||
* <p>
|
||||
|
@ -34,7 +33,6 @@ public class ShardsCollector extends AbstractCollector {
|
|||
|
||||
public static final String NAME = "shards-collector";
|
||||
|
||||
@Inject
|
||||
public ShardsCollector(Settings settings, ClusterService clusterService,
|
||||
MonitoringSettings monitoringSettings, XPackLicenseState licenseState) {
|
||||
super(settings, NAME, clusterService, monitoringSettings, licenseState);
|
||||
|
|
|
@ -19,7 +19,6 @@ public abstract class Exporter implements AutoCloseable {
|
|||
public static final String INDEX_NAME_TIME_FORMAT_SETTING = "index.name.time_format";
|
||||
public static final String BULK_TIMEOUT_SETTING = "bulk.timeout";
|
||||
|
||||
protected final String type;
|
||||
protected final Config config;
|
||||
protected final ESLogger logger;
|
||||
|
||||
|
@ -27,17 +26,12 @@ public abstract class Exporter implements AutoCloseable {
|
|||
|
||||
private AtomicBoolean closed = new AtomicBoolean(false);
|
||||
|
||||
public Exporter(String type, Config config) {
|
||||
this.type = type;
|
||||
public Exporter(Config config) {
|
||||
this.config = config;
|
||||
this.logger = config.logger(getClass());
|
||||
this.bulkTimeout = config.settings().getAsTime(BULK_TIMEOUT_SETTING, null);
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return config.name;
|
||||
}
|
||||
|
@ -50,6 +44,11 @@ public abstract class Exporter implements AutoCloseable {
|
|||
return false;
|
||||
}
|
||||
|
||||
/** Returns true if only one instance of this exporter should be allowed. */
|
||||
public boolean isSingleton() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens up a new export bulk. May return {@code null} indicating this exporter is not ready
|
||||
* yet to export the docs
|
||||
|
@ -76,12 +75,14 @@ public abstract class Exporter implements AutoCloseable {
|
|||
public static class Config {
|
||||
|
||||
private final String name;
|
||||
private final String type;
|
||||
private final boolean enabled;
|
||||
private final Settings globalSettings;
|
||||
private final Settings settings;
|
||||
|
||||
public Config(String name, Settings globalSettings, Settings settings) {
|
||||
public Config(String name, String type, Settings globalSettings, Settings settings) {
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
this.globalSettings = globalSettings;
|
||||
this.settings = settings;
|
||||
this.enabled = settings.getAsBoolean("enabled", true);
|
||||
|
@ -91,6 +92,10 @@ public abstract class Exporter implements AutoCloseable {
|
|||
return name;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public boolean enabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
@ -104,24 +109,10 @@ public abstract class Exporter implements AutoCloseable {
|
|||
}
|
||||
}
|
||||
|
||||
public abstract static class Factory<E extends Exporter> {
|
||||
/** A factory for constructing {@link Exporter} instances.*/
|
||||
public interface Factory {
|
||||
|
||||
private final String type;
|
||||
private final boolean singleton;
|
||||
|
||||
public Factory(String type, boolean singleton) {
|
||||
this.type = type;
|
||||
this.singleton = singleton;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public boolean singleton() {
|
||||
return singleton;
|
||||
}
|
||||
|
||||
public abstract E create(Config config);
|
||||
/** Create an exporter with the given configuration. */
|
||||
Exporter create(Config config);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.exporter;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.multibindings.MapBinder;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.monitoring.Monitoring;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.http.HttpExporter;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.local.LocalExporter;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class ExporterModule extends AbstractModule {
|
||||
|
||||
private final Settings settings;
|
||||
private final Map<String, Class<? extends Exporter.Factory<? extends Exporter>>> exporterFactories = new HashMap<>();
|
||||
|
||||
public ExporterModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
registerExporter(HttpExporter.TYPE, HttpExporter.Factory.class);
|
||||
registerExporter(LocalExporter.TYPE, LocalExporter.Factory.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
if (Monitoring.enabled(settings) && XPackPlugin.transportClientMode(settings) == false
|
||||
&& XPackPlugin.isTribeNode(settings) == false) {
|
||||
bind(Exporters.class).asEagerSingleton();
|
||||
MapBinder<String, Exporter.Factory> factoryBinder = MapBinder.newMapBinder(binder(), String.class, Exporter.Factory.class);
|
||||
for (Map.Entry<String, Class<? extends Exporter.Factory<? extends Exporter>>> entry : exporterFactories.entrySet()) {
|
||||
bind(entry.getValue()).asEagerSingleton();
|
||||
factoryBinder.addBinding(entry.getKey()).to(entry.getValue());
|
||||
}
|
||||
} else {
|
||||
bind(Exporters.class).toProvider(Providers.of(null));
|
||||
}
|
||||
}
|
||||
|
||||
public void registerExporter(String type, Class<? extends Exporter.Factory<? extends Exporter>> factory) {
|
||||
exporterFactories.put(type, factory);
|
||||
}
|
||||
}
|
|
@ -8,9 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.exporter;
|
|||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.component.Lifecycle;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
@ -39,16 +37,15 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable<Ex
|
|||
|
||||
private final AtomicReference<Map<String, Exporter>> exporters;
|
||||
|
||||
@Inject
|
||||
public Exporters(Settings settings, Map<String, Exporter.Factory> factories,
|
||||
ClusterService clusterService,
|
||||
ClusterSettings clusterSettings) {
|
||||
ClusterService clusterService) {
|
||||
|
||||
super(settings);
|
||||
this.factories = factories;
|
||||
this.clusterService = clusterService;
|
||||
this.exporters = new AtomicReference<>(emptyMap());
|
||||
clusterSettings.addSettingsUpdateConsumer(MonitoringSettings.EXPORTERS_SETTINGS, this::setExportersSetting);
|
||||
clusterService.getClusterSettings().addSettingsUpdateConsumer(MonitoringSettings.EXPORTERS_SETTINGS,
|
||||
this::setExportersSetting);
|
||||
}
|
||||
|
||||
private void setExportersSetting(Settings exportersSetting) {
|
||||
|
@ -135,7 +132,7 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable<Ex
|
|||
if (factory == null) {
|
||||
throw new SettingsException("unknown exporter type [" + type + "] set for exporter [" + name + "]");
|
||||
}
|
||||
Exporter.Config config = new Exporter.Config(name, globalSettings, exporterSettings);
|
||||
Exporter.Config config = new Exporter.Config(name, type, globalSettings, exporterSettings);
|
||||
if (!config.enabled()) {
|
||||
hasDisabled = true;
|
||||
if (logger.isDebugEnabled()) {
|
||||
|
@ -143,8 +140,9 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable<Ex
|
|||
}
|
||||
continue;
|
||||
}
|
||||
if (factory.singleton()) {
|
||||
// this is a singleton exporter factory, let's make sure we didn't already registered one
|
||||
Exporter exporter = factory.create(config);
|
||||
if (exporter.isSingleton()) {
|
||||
// this is a singleton exporter, let's make sure we didn't already create one
|
||||
// (there can only be one instance of a singleton exporter)
|
||||
if (singletons.contains(type)) {
|
||||
throw new SettingsException("multiple [" + type + "] exporters are configured. there can " +
|
||||
|
@ -152,7 +150,7 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable<Ex
|
|||
}
|
||||
singletons.add(type);
|
||||
}
|
||||
exporters.put(config.name(), factory.create(config));
|
||||
exporters.put(config.name(), exporter);
|
||||
}
|
||||
|
||||
// no exporters are configured, lets create a default local one.
|
||||
|
@ -161,7 +159,8 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable<Ex
|
|||
// fallback on the default
|
||||
//
|
||||
if (exporters.isEmpty() && !hasDisabled) {
|
||||
Exporter.Config config = new Exporter.Config("default_" + LocalExporter.TYPE, globalSettings, Settings.EMPTY);
|
||||
Exporter.Config config = new Exporter.Config("default_" + LocalExporter.TYPE, LocalExporter.TYPE,
|
||||
globalSettings, Settings.EMPTY);
|
||||
exporters.put(config.name(), factories.get(LocalExporter.TYPE).create(config));
|
||||
}
|
||||
|
||||
|
|
|
@ -111,9 +111,8 @@ public class HttpExporter extends Exporter {
|
|||
final ConnectionKeepAliveWorker keepAliveWorker;
|
||||
Thread keepAliveThread;
|
||||
|
||||
public HttpExporter(Exporter.Config config, Environment env) {
|
||||
|
||||
super(TYPE, config);
|
||||
public HttpExporter(Config config, Environment env) {
|
||||
super(config);
|
||||
this.env = env;
|
||||
|
||||
hosts = config.settings().getAsArray(HOST_SETTING, Strings.EMPTY_ARRAY);
|
||||
|
@ -537,7 +536,7 @@ public class HttpExporter extends Exporter {
|
|||
try {
|
||||
HttpExporterUtils.parseHostWithPath(host, "");
|
||||
} catch (URISyntaxException | MalformedURLException e) {
|
||||
throw new SettingsException("[xpack.monitoring.collection.exporters] invalid host: [" + host + "]." +
|
||||
throw new SettingsException("[xpack.monitoring.exporters] invalid host: [" + host + "]." +
|
||||
" error: [" + e.getMessage() + "]");
|
||||
}
|
||||
}
|
||||
|
@ -754,20 +753,4 @@ public class HttpExporter extends Exporter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class Factory extends Exporter.Factory<HttpExporter> {
|
||||
|
||||
private final Environment env;
|
||||
|
||||
@Inject
|
||||
public Factory(Environment env) {
|
||||
super(TYPE, false);
|
||||
this.env = env;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HttpExporter create(Config config) {
|
||||
return new HttpExporter(config, env);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue