Merge branch 'master' into security/dynamic-disabling-in-es
Original commit: elastic/x-pack-elasticsearch@a5a3ce4851
This commit is contained in:
commit
d2ac60c08e
|
@ -1,9 +1,13 @@
|
|||
apply plugin: 'elasticsearch.build'
|
||||
|
||||
dependencies {
|
||||
compile project(':x-plugins:elasticsearch:license:base')
|
||||
compile project(':x-plugins:elasticsearch:x-pack')
|
||||
compile "org.elasticsearch:elasticsearch:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
}
|
||||
|
||||
project.forbiddenPatterns {
|
||||
exclude '**/*.key'
|
||||
}
|
||||
|
||||
dependencyLicenses.enabled = false
|
|
@ -7,13 +7,12 @@ package org.elasticsearch.license.licensor;
|
|||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.CryptUtils;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.CryptUtils;
|
||||
import org.elasticsearch.license.License;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
|
@ -20,8 +20,8 @@ import java.security.KeyPair;
|
|||
import java.security.KeyPairGenerator;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
import static org.elasticsearch.license.core.CryptUtils.writeEncryptedPrivateKey;
|
||||
import static org.elasticsearch.license.core.CryptUtils.writeEncryptedPublicKey;
|
||||
import static org.elasticsearch.license.CryptUtils.writeEncryptedPrivateKey;
|
||||
import static org.elasticsearch.license.CryptUtils.writeEncryptedPublicKey;
|
||||
|
||||
public class KeyPairGeneratorTool extends Command {
|
||||
|
|
@ -20,7 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.licensor.LicenseSigner;
|
||||
|
||||
public class LicenseGeneratorTool extends Command {
|
|
@ -20,8 +20,8 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.core.LicenseVerifier;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.LicenseVerifier;
|
||||
|
||||
public class LicenseVerificationTool extends Command {
|
||||
|
|
@ -6,9 +6,9 @@
|
|||
package org.elasticsearch.license.licensor;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.DateUtils;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.core.LicenseVerifier;
|
||||
import org.elasticsearch.license.DateUtils;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.LicenseVerifier;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
|
@ -13,8 +13,8 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.DateUtils;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.DateUtils;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.cli.Command;
|
|||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
import org.junit.Before;
|
||||
|
|
@ -14,7 +14,7 @@ import org.elasticsearch.cli.CommandTestCase;
|
|||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
import org.junit.Before;
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
elasticsearch-license
|
||||
=====================
|
||||
|
||||
Elasticsearch Licensing core, tools and plugin
|
||||
|
||||
## Core
|
||||
|
||||
Contains core data structures, utilities used by **Licensor** and **Plugin**.
|
||||
|
||||
See `core/` and `core-shaded/`
|
||||
|
||||
## Licensor
|
||||
|
||||
Contains a collection of tools to generate key-pairs, licenses and validate licenses.
|
||||
|
||||
See `licensor/`
|
||||
|
||||
see [wiki] (https://github.com/elasticsearch/elasticsearch-license/wiki) for documentation on
|
||||
[Licensing Tools Usage & Reference] (https://github.com/elasticsearch/elasticsearch-license/wiki/License-Tools-Usage-&-Reference)
|
||||
|
||||
## Plugin
|
||||
|
||||
**NOTE**: The license plugin has to be packaged with the right public key when being deployed to public repositories in maven
|
||||
or uploaded to s3. Use `-Dkeys.path=<PATH_TO_KEY_DIR>` with maven command to package the plugin with a specified key.
|
||||
|
||||
See `plugin/`
|
||||
|
||||
see [Getting Started] (https://github.com/elasticsearch/elasticsearch-license/blob/master/docs/getting-started.asciidoc) to install license plugin.
|
||||
|
||||
see [Licensing REST APIs] (https://github.com/elasticsearch/elasticsearch-license/blob/master/docs/license.asciidoc)
|
||||
to use the license plugin from an elasticsearch deployment.
|
||||
|
||||
see [wiki] (https://github.com/elasticsearch/elasticsearch-license/wiki) for documentation on
|
||||
- [License Plugin Consumer Interface] (https://github.com/elasticsearch/elasticsearch-license/wiki/License---Consumer-Interface)
|
||||
- [License Plugin Release Process] (https://github.com/elasticsearch/elasticsearch-license/wiki/Plugin-Release-Process)
|
||||
- [License Plugin Design] (https://github.com/elasticsearch/elasticsearch-license/wiki/License-Plugin--Design)
|
|
@ -1,20 +0,0 @@
|
|||
apply plugin: 'elasticsearch.build'
|
||||
|
||||
dependencies {
|
||||
compile "org.elasticsearch:elasticsearch:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
}
|
||||
|
||||
compactProfile = 'full'
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
|
||||
jar {
|
||||
baseName = 'license-core'
|
||||
}
|
||||
|
||||
modifyPom {
|
||||
project {
|
||||
artifactId 'license-core'
|
||||
}
|
||||
}
|
|
@ -1,201 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.ESTestCase.randomFrom;
|
||||
import static org.hamcrest.core.IsEqual.equalTo;
|
||||
|
||||
public class TestUtils {
|
||||
|
||||
private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private static final DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer();
|
||||
|
||||
public static String dateMathString(String time, final long now) {
|
||||
return dateTimeFormatter.print(dateMathParser.parse(time, new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return now;
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
public static long dateMath(String time, final long now) {
|
||||
return dateMathParser.parse(time, new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return now;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static LicenseSpec generateRandomLicenseSpec(int version) {
|
||||
boolean datesInMillis = randomBoolean();
|
||||
long now = System.currentTimeMillis();
|
||||
String uid = UUID.randomUUID().toString();
|
||||
String feature = "feature__" + randomInt();
|
||||
String issuer = "issuer__" + randomInt();
|
||||
String issuedTo = "issuedTo__" + randomInt();
|
||||
final String type;
|
||||
final String subscriptionType;
|
||||
if (version < License.VERSION_NO_FEATURE_TYPE) {
|
||||
subscriptionType = randomFrom("gold", "silver", "platinum");
|
||||
type = "subscription";//randomFrom("subscription", "internal", "development");
|
||||
} else {
|
||||
subscriptionType = null;
|
||||
type = randomFrom("basic", "dev", "gold", "silver", "platinum");
|
||||
}
|
||||
int maxNodes = randomIntBetween(5, 100);
|
||||
if (datesInMillis) {
|
||||
long issueDateInMillis = dateMath("now", now);
|
||||
long expiryDateInMillis = dateMath("now+10d/d", now);
|
||||
return new LicenseSpec(version, uid, feature, issueDateInMillis, expiryDateInMillis, type, subscriptionType, issuedTo, issuer,
|
||||
maxNodes);
|
||||
} else {
|
||||
String issueDate = dateMathString("now", now);
|
||||
String expiryDate = dateMathString("now+10d/d", now);
|
||||
return new LicenseSpec(version, uid, feature, issueDate, expiryDate, type, subscriptionType, issuedTo, issuer, maxNodes);
|
||||
}
|
||||
}
|
||||
|
||||
public static String generateLicenseSpecString(LicenseSpec licenseSpec) throws IOException {
|
||||
XContentBuilder licenses = jsonBuilder();
|
||||
licenses.startObject();
|
||||
licenses.startArray("licenses");
|
||||
licenses.startObject()
|
||||
.field("uid", licenseSpec.uid)
|
||||
.field("type", licenseSpec.type)
|
||||
.field("subscription_type", licenseSpec.subscriptionType)
|
||||
.field("issued_to", licenseSpec.issuedTo)
|
||||
.field("issuer", licenseSpec.issuer)
|
||||
.field("feature", licenseSpec.feature)
|
||||
.field("max_nodes", licenseSpec.maxNodes);
|
||||
|
||||
if (licenseSpec.issueDate != null) {
|
||||
licenses.field("issue_date", licenseSpec.issueDate);
|
||||
} else {
|
||||
licenses.field("issue_date_in_millis", licenseSpec.issueDateInMillis);
|
||||
}
|
||||
if (licenseSpec.expiryDate != null) {
|
||||
licenses.field("expiry_date", licenseSpec.expiryDate);
|
||||
} else {
|
||||
licenses.field("expiry_date_in_millis", licenseSpec.expiryDateInMillis);
|
||||
}
|
||||
licenses.field("version", licenseSpec.version);
|
||||
licenses.endObject();
|
||||
licenses.endArray();
|
||||
licenses.endObject();
|
||||
return licenses.string();
|
||||
}
|
||||
|
||||
public static License generateLicenses(LicenseSpec spec) {
|
||||
License.Builder builder = License.builder()
|
||||
.uid(spec.uid)
|
||||
.feature(spec.feature)
|
||||
.type(spec.type)
|
||||
.subscriptionType(spec.subscriptionType)
|
||||
.issuedTo(spec.issuedTo)
|
||||
.issuer(spec.issuer)
|
||||
.maxNodes(spec.maxNodes);
|
||||
|
||||
if (spec.expiryDate != null) {
|
||||
builder.expiryDate(DateUtils.endOfTheDay(spec.expiryDate));
|
||||
} else {
|
||||
builder.expiryDate(spec.expiryDateInMillis);
|
||||
}
|
||||
if (spec.issueDate != null) {
|
||||
builder.issueDate(DateUtils.beginningOfTheDay(spec.issueDate));
|
||||
} else {
|
||||
builder.issueDate(spec.issueDateInMillis);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static void assertLicenseSpec(LicenseSpec spec, License license) {
|
||||
MatcherAssert.assertThat(license.uid(), equalTo(spec.uid));
|
||||
MatcherAssert.assertThat(license.issuedTo(), equalTo(spec.issuedTo));
|
||||
MatcherAssert.assertThat(license.issuer(), equalTo(spec.issuer));
|
||||
MatcherAssert.assertThat(license.type(), equalTo(spec.type));
|
||||
MatcherAssert.assertThat(license.maxNodes(), equalTo(spec.maxNodes));
|
||||
if (spec.issueDate != null) {
|
||||
MatcherAssert.assertThat(license.issueDate(), equalTo(DateUtils.beginningOfTheDay(spec.issueDate)));
|
||||
} else {
|
||||
MatcherAssert.assertThat(license.issueDate(), equalTo(spec.issueDateInMillis));
|
||||
}
|
||||
if (spec.expiryDate != null) {
|
||||
MatcherAssert.assertThat(license.expiryDate(), equalTo(DateUtils.endOfTheDay(spec.expiryDate)));
|
||||
} else {
|
||||
MatcherAssert.assertThat(license.expiryDate(), equalTo(spec.expiryDateInMillis));
|
||||
}
|
||||
}
|
||||
|
||||
public static class LicenseSpec {
|
||||
public final int version;
|
||||
public final String feature;
|
||||
public final String issueDate;
|
||||
public final long issueDateInMillis;
|
||||
public final String expiryDate;
|
||||
public final long expiryDateInMillis;
|
||||
public final String uid;
|
||||
public final String type;
|
||||
public final String subscriptionType;
|
||||
public final String issuedTo;
|
||||
public final String issuer;
|
||||
public final int maxNodes;
|
||||
|
||||
public LicenseSpec(String issueDate, String expiryDate) {
|
||||
this(License.VERSION_CURRENT, UUID.randomUUID().toString(), "feature", issueDate, expiryDate, "trial", "none", "customer",
|
||||
"elasticsearch", 5);
|
||||
}
|
||||
|
||||
public LicenseSpec(int version, String uid, String feature, long issueDateInMillis, long expiryDateInMillis, String type,
|
||||
String subscriptionType, String issuedTo, String issuer, int maxNodes) {
|
||||
this.version = version;
|
||||
this.feature = feature;
|
||||
this.issueDateInMillis = issueDateInMillis;
|
||||
this.issueDate = null;
|
||||
this.expiryDateInMillis = expiryDateInMillis;
|
||||
this.expiryDate = null;
|
||||
this.uid = uid;
|
||||
this.type = type;
|
||||
this.subscriptionType = subscriptionType;
|
||||
this.issuedTo = issuedTo;
|
||||
this.issuer = issuer;
|
||||
this.maxNodes = maxNodes;
|
||||
}
|
||||
|
||||
public LicenseSpec(int version, String uid, String feature, String issueDate, String expiryDate, String type,
|
||||
String subscriptionType, String issuedTo, String issuer, int maxNodes) {
|
||||
this.version = version;
|
||||
this.feature = feature;
|
||||
this.issueDate = issueDate;
|
||||
this.issueDateInMillis = -1;
|
||||
this.expiryDate = expiryDate;
|
||||
this.expiryDateInMillis = -1;
|
||||
this.uid = uid;
|
||||
this.type = type;
|
||||
this.subscriptionType = subscriptionType;
|
||||
this.issuedTo = issuedTo;
|
||||
this.issuer = issuer;
|
||||
this.maxNodes = maxNodes;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
subprojects {
|
||||
project.afterEvaluate {
|
||||
project.forbiddenPatterns {
|
||||
exclude '**/*.key'
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
es.logger.level=INFO
|
||||
log4j.rootLogger=${es.logger.level}, out
|
||||
|
||||
log4j.logger.org.apache.http=INFO, out
|
||||
log4j.additivity.org.apache.http=false
|
||||
|
||||
log4j.logger.org.elasticsearch.license=TRACE
|
||||
|
||||
log4j.appender.out=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.out.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n
|
Binary file not shown.
|
@ -1,11 +0,0 @@
|
|||
|
||||
/*
|
||||
* Messy tests that depend on mustache directly. Fix these!
|
||||
*/
|
||||
|
||||
apply plugin: 'elasticsearch.messy-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: ':x-plugins:elasticsearch:x-pack', configuration: 'testArtifacts')
|
||||
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
|
||||
}
|
|
@ -1,393 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.mustache.MustachePlugin;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.xpack.common.text.TextTemplate;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
|
||||
import org.elasticsearch.xpack.watcher.actions.ExecutableActions;
|
||||
import org.elasticsearch.xpack.watcher.condition.always.ExecutableAlwaysCondition;
|
||||
import org.elasticsearch.xpack.watcher.execution.TriggeredExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.input.Input;
|
||||
import org.elasticsearch.xpack.watcher.input.search.ExecutableSearchInput;
|
||||
import org.elasticsearch.xpack.watcher.input.search.SearchInput;
|
||||
import org.elasticsearch.xpack.watcher.input.search.SearchInputFactory;
|
||||
import org.elasticsearch.xpack.watcher.input.simple.ExecutableSimpleInput;
|
||||
import org.elasticsearch.xpack.watcher.input.simple.SimpleInput;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherScript;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest;
|
||||
import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateService;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStatus;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.chrono.ISOChronology;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.getRandomSupportedSearchType;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, randomDynamicTemplates = false, supportsDedicatedMasters = false,
|
||||
numDataNodes = 1)
|
||||
public class SearchInputIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
Collection<Class<? extends Plugin>> types = new ArrayList<>();
|
||||
types.addAll(super.nodePlugins());
|
||||
types.add(MustachePlugin.class);
|
||||
types.add(CustomScriptContextPlugin.class);
|
||||
return types;
|
||||
}
|
||||
|
||||
private static final String TEMPLATE_QUERY = "{\"query\":{\"bool\":{\"must\":{\"match\":{\"event_type\":{\"query\":\"a\"," +
|
||||
"\"type\":\"boolean\"}}},\"filter\":{\"range\":{\"_timestamp\":" +
|
||||
"{\"from\":\"{{ctx.trigger.scheduled_time}}||-{{seconds_param}}\",\"to\":\"{{ctx.trigger.scheduled_time}}\"," +
|
||||
"\"include_lower\":true,\"include_upper\":true}}}}}}";
|
||||
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
final Path tempDir = createTempDir();
|
||||
final Path configPath = tempDir.resolve("config");
|
||||
final Path scriptPath = configPath.resolve("scripts");
|
||||
try {
|
||||
Files.createDirectories(scriptPath);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("failed to create config dir");
|
||||
|
||||
}
|
||||
try (InputStream stream = SearchInputIT.class.getResourceAsStream("/org/elasticsearch/xpack/watcher/input/search/config/scripts" +
|
||||
"/test_disk_template.mustache");
|
||||
OutputStream out = Files.newOutputStream(scriptPath.resolve("test_disk_template.mustache"))) {
|
||||
Streams.copy(stream, out);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("failed to copy mustache template");
|
||||
}
|
||||
|
||||
|
||||
//Set path so ScriptService will pick up the test scripts
|
||||
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
|
||||
.put("path.conf", configPath).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings transportClientSettings() {
|
||||
return Settings.builder()
|
||||
.put(super.transportClientSettings())
|
||||
.build();
|
||||
}
|
||||
|
||||
public void testExecute() throws Exception {
|
||||
SearchSourceBuilder searchSourceBuilder = searchSource().query(
|
||||
boolQuery().must(matchQuery("event_type", "a")).must(rangeQuery("_timestamp")
|
||||
.from("{{ctx.trigger.scheduled_time}}||-30s").to("{{ctx.trigger.triggered_time}}")));
|
||||
SearchRequest searchRequest = client()
|
||||
.prepareSearch()
|
||||
.setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.request()
|
||||
.source(searchSourceBuilder);
|
||||
|
||||
WatcherSearchTemplateRequest request = new WatcherSearchTemplateRequest(searchRequest);
|
||||
ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), logger,
|
||||
WatcherClientProxy.of(client()), watcherSearchTemplateService(), null);
|
||||
WatchExecutionContext ctx = new TriggeredExecutionContext(
|
||||
new Watch("test-watch",
|
||||
new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))),
|
||||
new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger),
|
||||
new ExecutableAlwaysCondition(logger),
|
||||
null,
|
||||
null,
|
||||
new ExecutableActions(new ArrayList<ActionWrapper>()),
|
||||
null,
|
||||
new WatchStatus(new DateTime(0, UTC), emptyMap())),
|
||||
new DateTime(0, UTC),
|
||||
new ScheduleTriggerEvent("test-watch", new DateTime(0, UTC), new DateTime(0, UTC)),
|
||||
timeValueSeconds(5));
|
||||
SearchInput.Result result = searchInput.execute(ctx, new Payload.Simple());
|
||||
|
||||
assertThat(XContentMapValues.extractValue("hits.total", result.payload().data()), equalTo(0));
|
||||
assertNotNull(result.executedRequest());
|
||||
assertThat(result.status(), is(Input.Result.Status.SUCCESS));
|
||||
assertEquals(result.executedRequest().searchType(), request.getRequest().searchType());
|
||||
assertArrayEquals(result.executedRequest().indices(), request.getRequest().indices());
|
||||
assertEquals(result.executedRequest().indicesOptions(), request.getRequest().indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(result);
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.from"), equalTo("1970-01-01T00:00:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.to"), equalTo("1970-01-01T00:00:00.000Z"));
|
||||
}
|
||||
|
||||
public void testSearchInlineTemplate() throws Exception {
|
||||
WatchExecutionContext ctx = createContext();
|
||||
|
||||
Map<String, Object> triggerParams = new HashMap<String, Object>();
|
||||
triggerParams.put("triggered_time", new DateTime(1970, 01, 01, 00, 01, 00, 000, ISOChronology.getInstanceUTC()));
|
||||
triggerParams.put("scheduled_time", new DateTime(1970, 01, 01, 00, 01, 00, 000, ISOChronology.getInstanceUTC()));
|
||||
Map<String, Object> ctxParams = new HashMap<String, Object>();
|
||||
ctxParams.put("id", ctx.id().value());
|
||||
ctxParams.put("metadata", null);
|
||||
ctxParams.put("vars", new HashMap<String, Object>());
|
||||
ctxParams.put("watch_id", "test-watch");
|
||||
ctxParams.put("trigger", triggerParams);
|
||||
ctxParams.put("payload", new Payload.Simple().data());
|
||||
ctxParams.put("execution_time", new DateTime(1970, 01, 01, 00, 01, 00, 000, ISOChronology.getInstanceUTC()));
|
||||
Map<String, Object> expectedParams = new HashMap<String, Object>();
|
||||
expectedParams.put("seconds_param", "30s");
|
||||
expectedParams.put("ctx", ctxParams);
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("seconds_param", "30s");
|
||||
|
||||
WatcherScript template = WatcherScript.inline(TEMPLATE_QUERY).lang("mustache").params(params).build();
|
||||
|
||||
SearchRequest request = client().prepareSearch()
|
||||
.setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.setIndices("test-search-index").request();
|
||||
|
||||
SearchInput.Result executedResult = executeSearchInput(request, template, ctx);
|
||||
|
||||
assertNotNull(executedResult.executedRequest());
|
||||
assertThat(executedResult.status(), is(Input.Result.Status.SUCCESS));
|
||||
if (getNumShards("test-search-index").numPrimaries > 1) {
|
||||
assertEquals(executedResult.executedRequest().searchType(), request.searchType());
|
||||
}
|
||||
assertArrayEquals(executedResult.executedRequest().indices(), request.indices());
|
||||
assertEquals(executedResult.executedRequest().indicesOptions(), request.indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(executedResult);
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.from"), equalTo("1970-01-01T00:01:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.to"), equalTo("1970-01-01T00:01:00.000Z"));
|
||||
}
|
||||
|
||||
public void testSearchIndexedTemplate() throws Exception {
|
||||
WatchExecutionContext ctx = createContext();
|
||||
|
||||
PutStoredScriptRequest indexedScriptRequest = client().admin().cluster().preparePutStoredScript()
|
||||
.setId("test-template")
|
||||
.setScriptLang("mustache")
|
||||
.setSource(new BytesArray(TEMPLATE_QUERY))
|
||||
.request();
|
||||
assertThat(client().admin().cluster().putStoredScript(indexedScriptRequest).actionGet().isAcknowledged(), is(true));
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("seconds_param", "30s");
|
||||
|
||||
WatcherScript template = WatcherScript.indexed("test-template").lang("mustache").params(params).build();
|
||||
|
||||
jsonBuilder().value(TextTemplate.indexed("test-template").params(params).build()).bytes();
|
||||
SearchRequest request = client().prepareSearch().setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.setIndices("test-search-index").request();
|
||||
|
||||
SearchInput.Result executedResult = executeSearchInput(request, template, ctx);
|
||||
|
||||
assertNotNull(executedResult.executedRequest());
|
||||
assertThat(executedResult.status(), is(Input.Result.Status.SUCCESS));
|
||||
if (getNumShards("test-search-index").numPrimaries > 1) {
|
||||
assertEquals(executedResult.executedRequest().searchType(), request.searchType());
|
||||
}
|
||||
assertArrayEquals(executedResult.executedRequest().indices(), request.indices());
|
||||
assertEquals(executedResult.executedRequest().indicesOptions(), request.indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(executedResult);
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.from"), equalTo("1970-01-01T00:01:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.to"), equalTo("1970-01-01T00:01:00.000Z"));
|
||||
|
||||
}
|
||||
|
||||
public void testSearchOnDiskTemplate() throws Exception {
|
||||
WatchExecutionContext ctx = createContext();
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("seconds_param", "30s");
|
||||
|
||||
WatcherScript template = WatcherScript.file("test_disk_template").lang("mustache").params(params).build();
|
||||
SearchRequest request = client().prepareSearch().setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.setIndices("test-search-index").request();
|
||||
|
||||
SearchInput.Result executedResult = executeSearchInput(request, template, ctx);
|
||||
|
||||
assertNotNull(executedResult.executedRequest());
|
||||
assertThat(executedResult.status(), is(Input.Result.Status.SUCCESS));
|
||||
assertArrayEquals(executedResult.executedRequest().indices(), request.indices());
|
||||
assertEquals(executedResult.executedRequest().indicesOptions(), request.indicesOptions());
|
||||
}
|
||||
|
||||
public void testDifferentSearchType() throws Exception {
|
||||
SearchSourceBuilder searchSourceBuilder = searchSource().query(
|
||||
boolQuery().must(matchQuery("event_type", "a")).must(rangeQuery("_timestamp")
|
||||
.from("{{ctx.trigger.scheduled_time}}||-30s").to("{{ctx.trigger.triggered_time}}"))
|
||||
);
|
||||
SearchType searchType = getRandomSupportedSearchType();
|
||||
|
||||
SearchRequest searchRequest = client()
|
||||
.prepareSearch()
|
||||
.setSearchType(searchType)
|
||||
.request()
|
||||
.source(searchSourceBuilder);
|
||||
|
||||
WatcherSearchTemplateRequest request = new WatcherSearchTemplateRequest(searchRequest);
|
||||
|
||||
ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), logger,
|
||||
WatcherClientProxy.of(client()), watcherSearchTemplateService(), null);
|
||||
WatchExecutionContext ctx = new TriggeredExecutionContext(
|
||||
new Watch("test-watch",
|
||||
new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))),
|
||||
new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger),
|
||||
new ExecutableAlwaysCondition(logger),
|
||||
null,
|
||||
null,
|
||||
new ExecutableActions(new ArrayList<ActionWrapper>()),
|
||||
null,
|
||||
new WatchStatus(new DateTime(0, UTC), emptyMap())),
|
||||
new DateTime(0, UTC),
|
||||
new ScheduleTriggerEvent("test-watch", new DateTime(0, UTC), new DateTime(0, UTC)),
|
||||
timeValueSeconds(5));
|
||||
SearchInput.Result result = searchInput.execute(ctx, new Payload.Simple());
|
||||
|
||||
assertThat(XContentMapValues.extractValue("hits.total", result.payload().data()), equalTo(0));
|
||||
assertNotNull(result.executedRequest());
|
||||
assertThat(result.status(), is(Input.Result.Status.SUCCESS));
|
||||
assertEquals(result.executedRequest().searchType(), searchType);
|
||||
assertArrayEquals(result.executedRequest().indices(), searchRequest.indices());
|
||||
assertEquals(result.executedRequest().indicesOptions(), searchRequest.indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(result);
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.from"), equalTo("1970-01-01T00:00:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.to"), equalTo("1970-01-01T00:00:00.000Z"));
|
||||
}
|
||||
|
||||
public void testParserValid() throws Exception {
|
||||
SearchRequest searchRequest = client().prepareSearch()
|
||||
.setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.request()
|
||||
.source(searchSource()
|
||||
.query(boolQuery().must(matchQuery("event_type", "a")).must(rangeQuery("_timestamp")
|
||||
.from("{{ctx.trigger.scheduled_time}}||-30s").to("{{ctx.trigger.triggered_time}}"))));
|
||||
|
||||
TimeValue timeout = randomBoolean() ? TimeValue.timeValueSeconds(randomInt(10)) : null;
|
||||
XContentBuilder builder = jsonBuilder().value(
|
||||
new SearchInput(new WatcherSearchTemplateRequest(searchRequest), null, timeout, null));
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
IndicesQueriesRegistry indicesQueryRegistry = internalCluster().getInstance(IndicesQueriesRegistry.class);
|
||||
SearchInputFactory factory = new SearchInputFactory(Settings.EMPTY, WatcherClientProxy.of(client()), indicesQueryRegistry,
|
||||
null, null, scriptService());
|
||||
|
||||
SearchInput searchInput = factory.parseInput("_id", parser);
|
||||
assertEquals(SearchInput.TYPE, searchInput.type());
|
||||
assertThat(searchInput.getTimeout(), equalTo(timeout));
|
||||
}
|
||||
|
||||
private WatchExecutionContext createContext() {
|
||||
return new TriggeredExecutionContext(
|
||||
new Watch("test-watch",
|
||||
new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))),
|
||||
new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger),
|
||||
new ExecutableAlwaysCondition(logger),
|
||||
null,
|
||||
null,
|
||||
new ExecutableActions(new ArrayList<ActionWrapper>()),
|
||||
null,
|
||||
new WatchStatus(new DateTime(50000, UTC), emptyMap())),
|
||||
new DateTime(60000, UTC),
|
||||
new ScheduleTriggerEvent("test-watch", new DateTime(60000, UTC), new DateTime(60000, UTC)),
|
||||
timeValueSeconds(5));
|
||||
}
|
||||
|
||||
private SearchInput.Result executeSearchInput(SearchRequest request, WatcherScript template,
|
||||
WatchExecutionContext ctx) throws IOException {
|
||||
createIndex("test-search-index");
|
||||
ensureGreen("test-search-index");
|
||||
SearchInput.Builder siBuilder = SearchInput.builder(new WatcherSearchTemplateRequest(request, template));
|
||||
|
||||
SearchInput si = siBuilder.build();
|
||||
|
||||
ExecutableSearchInput searchInput = new ExecutableSearchInput(si, logger, WatcherClientProxy.of(client()),
|
||||
watcherSearchTemplateService(), null);
|
||||
return searchInput.execute(ctx, new Payload.Simple());
|
||||
}
|
||||
|
||||
protected WatcherSearchTemplateService watcherSearchTemplateService() {
|
||||
String master = internalCluster().getMasterName();
|
||||
return new WatcherSearchTemplateService(internalCluster().clusterService(master).getSettings(),
|
||||
internalCluster().getInstance(ScriptService.class, master),
|
||||
internalCluster().getInstance(IndicesQueriesRegistry.class, master),
|
||||
internalCluster().getInstance(AggregatorParsers.class, master),
|
||||
internalCluster().getInstance(Suggesters.class, master)
|
||||
);
|
||||
}
|
||||
|
||||
protected ScriptService scriptService() {
|
||||
return internalCluster().getInstance(ScriptService.class);
|
||||
}
|
||||
|
||||
private XContentSource toXContentSource(SearchInput.Result result) throws IOException {
|
||||
try (XContentBuilder builder = jsonBuilder()) {
|
||||
result.executedRequest().source().toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
return new XContentSource(builder);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom plugin that registers XPack script context.
|
||||
*/
|
||||
public static class CustomScriptContextPlugin extends Plugin implements ScriptPlugin {
|
||||
|
||||
@Override
|
||||
public ScriptContext.Plugin getCustomScriptContexts() {
|
||||
return WatcherScript.CTX_PLUGIN;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This package contains tests that use mustache to test what looks
|
||||
* to be unrelated functionality, or functionality that should be
|
||||
* tested with a mock instead. Instead of doing an epic battle
|
||||
* with these tests, they are temporarily moved here to the mustache
|
||||
* module's tests, but that is likely not where they belong. Please
|
||||
* help by cleaning them up and we can remove this package!
|
||||
*
|
||||
* <ul>
|
||||
* <li>If the test is testing templating integration with another core subsystem,
|
||||
* fix it to use a mock instead, so it can be in the core tests again</li>
|
||||
* <li>If the test is just being lazy, and does not really need templating to test
|
||||
* something, clean it up!</li>
|
||||
* </ul>
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must" : [
|
||||
{
|
||||
"match": {
|
||||
"event_type": {
|
||||
"query": "a",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"_timestamp": {
|
||||
"from": "{{ctx.trigger.scheduled_time}}||-{{seconds_param}}",
|
||||
"to": "{{ctx.trigger.scheduled_time}}",
|
||||
"include_lower": true,
|
||||
"include_upper": true
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must" : [
|
||||
{
|
||||
"match": {
|
||||
"event_type": {
|
||||
"query": "a",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"_timestamp": {
|
||||
"from": "{{ctx.trigger.scheduled_time}}||-{{seconds_param}}",
|
||||
"to": "{{ctx.trigger.scheduled_time}}",
|
||||
"include_lower": true,
|
||||
"include_upper": true
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,157 @@
|
|||
---
|
||||
setup:
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
- do: {xpack.watcher.stats:{}}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 1
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-01T00:00:00",
|
||||
"value" : "val_1"
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 2
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-02T00:00:00",
|
||||
"value" : "val_2"
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 3
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-03T00:00:00",
|
||||
"value" : "val_3"
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 4
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-04T00:00:00",
|
||||
"value" : "val_4"
|
||||
}
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: idx
|
||||
|
||||
---
|
||||
"Test input mustache integration":
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
body: >
|
||||
{
|
||||
"trigger_data" : {
|
||||
"scheduled_time" : "2015-01-04T00:00:00"
|
||||
},
|
||||
"watch" : {
|
||||
"trigger" : { "schedule" : { "interval" : "10s" } },
|
||||
"actions" : {
|
||||
"dummy" : {
|
||||
"logging" : {
|
||||
"text" : "executed!"
|
||||
}
|
||||
}
|
||||
},
|
||||
"input" : {
|
||||
"search" : {
|
||||
"request" : {
|
||||
"indices" : "idx",
|
||||
"body" : {
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"filter" : [
|
||||
{
|
||||
"range" : {
|
||||
"date" : {
|
||||
"lte" : "{{ctx.trigger.scheduled_time}}",
|
||||
"gte" : "{{ctx.trigger.scheduled_time}}||-3d"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
- match: { "watch_record.result.input.type": "search" }
|
||||
- match: { "watch_record.result.input.status": "success" }
|
||||
- match: { "watch_record.result.input.payload.hits.total": 4 }
|
||||
# makes sure that the mustache template snippets have been resolved correctly:
|
||||
- match: { "watch_record.result.input.search.request.body.query.bool.filter.0.range.date.from": "2015-01-04T00:00:00.000Z||-3d" }
|
||||
- match: { "watch_record.result.input.search.request.body.query.bool.filter.0.range.date.to": "2015-01-04T00:00:00.000Z" }
|
||||
|
||||
---
|
||||
"Test transform mustache integration":
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
body: >
|
||||
{
|
||||
"trigger_data" : {
|
||||
"scheduled_time" : "2015-01-04T00:00:00"
|
||||
},
|
||||
"watch" : {
|
||||
"trigger" : { "schedule" : { "interval" : "10s" } },
|
||||
"input" : { "simple" : { "value" : "val_3" } },
|
||||
"actions" : {
|
||||
"dummy" : {
|
||||
"logging" : {
|
||||
"text" : "executed!"
|
||||
}
|
||||
}
|
||||
},
|
||||
"transform" : {
|
||||
"search" : {
|
||||
"request" : {
|
||||
"indices" : "idx",
|
||||
"body" : {
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"filter" : [
|
||||
{
|
||||
"range" : {
|
||||
"date" : {
|
||||
"lte" : "{{ctx.trigger.scheduled_time}}",
|
||||
"gte" : "{{ctx.trigger.scheduled_time}}||-1d"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term" : {
|
||||
"value" : "{{ctx.payload.value}}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
- match: { "watch_record.result.transform.type": "search" }
|
||||
- match: { "watch_record.result.transform.status": "success" }
|
||||
- match: { "watch_record.result.transform.payload.hits.total": 1 }
|
||||
- match: { "watch_record.result.transform.payload.hits.hits.0._id": "3" }
|
||||
# makes sure that the mustache template snippets have been resolved correctly:
|
||||
- match: { "watch_record.result.transform.search.request.body.query.bool.filter.0.range.date.from": "2015-01-04T00:00:00.000Z||-1d" }
|
||||
- match: { "watch_record.result.transform.search.request.body.query.bool.filter.0.range.date.to": "2015-01-04T00:00:00.000Z" }
|
||||
- match: { "watch_record.result.transform.search.request.body.query.bool.filter.1.term.value.value": "val_3" }
|
|
@ -1,440 +0,0 @@
|
|||
SOFTWARE END USER LICENSE AGREEMENT
|
||||
|
||||
READ THIS AGREEMENT CAREFULLY, WHICH CONSTITUTES A LEGALLY BINDING AGREEMENT AND GOVERNS YOUR USE OF
|
||||
ELASTICSEARCH’S PROPRIETARY SOFTWARE. BY INSTALLING AND/OR USING SUCH SOFTWARE, YOU ARE INDICATING THAT YOU AGREE TO THE
|
||||
TERMS AND CONDITIONS SET FORTH IN THIS AGREEMENT. IF YOU DO NOT AGREE WITH SUCH TERMS AND CONDITIONS, YOU MAY NOT
|
||||
INSTALL OR USE ANY OF THE SOFTWARE.
|
||||
|
||||
This END USER LICENSE AGREEMENT (this “Agreement") is entered into by and between the applicable Elasticsearch
|
||||
entity referenced in Attachment 1 hereto (“Elasticsearch”) and the person or entity (“You”) that has downloaded any of
|
||||
Elasticsearch’s proprietary software to which this Agreement is attached or in connection with which this Agreement is
|
||||
presented to You (collectively, the “Software”). This Agreement is effective upon the earlier of the date on the
|
||||
commencement of any License granted pursuant to Section 1.1. below (as applicable, the “Effective Date”).
|
||||
|
||||
1. SOFTWARE LICENSE AND RESTRICTIONS
|
||||
1.1 License Grants.
|
||||
(a) Trial Version License. Subject to the terms and conditions of this Agreement, Elasticsearch agrees to
|
||||
grant, and does hereby grant to You, for a period of thirty (30) days from the date on which You first install the
|
||||
Software (the “Trial Term”), a License to the to use the Eligible Features and Functions of the Software that are
|
||||
applicable to the Trial Version of the Software. You understand and agree that upon the expiration of a Trial Term,
|
||||
You will no longer be able to use the Software, unless you either (i) purchase a Subscription, in which case You will
|
||||
receive a License under Section 1.1(b) below to use the Eligible Features and Functions of the Software that are
|
||||
applicable to the Subscription that You purchase, (ii) undertake the Registration of Your use of the Software with
|
||||
Elasticsearch, in which case You will receive a License under Section 1.1(c) below to the Basic Version of the Software
|
||||
or (iii) obtain from Elasticsearch written consent (e-mail sufficient) to extend the Trial Term, which may be granted by
|
||||
Elasticsearch in its sole and absolute discretion.
|
||||
(b) Subscription License. Subject to the terms and conditions of this Agreement and complete payment of any and
|
||||
all applicable Subscription fees, Elasticsearch agrees to grant, and does hereby grant to You during the Subscription
|
||||
Term, and for the restricted scope of this Agreement, a License (i) to use the Eligible Features and Functions of the
|
||||
Software that are applicable to the Subscription level that You have purchased, (ii) for the number of Nodes (as defined
|
||||
in the Elasticsearch Support Services Policy) and (iii) for the specific project for which you have purchased a
|
||||
Subscription. The level of Subscription, the number of Nodes and specific project for which you have purchased such
|
||||
Subscription, are set forth on the applicable ordering document entered into by Elasticsearch and You for the purchase
|
||||
of the applicable Subscription (“Order Form”).
|
||||
(c) Basic Version License. Subject to the terms and conditions of this Agreement, and in consideration of the
|
||||
Registration of Your use the Software, Elasticsearch agrees to grant, and does hereby grant to You, for a period of one
|
||||
(1) year from the date of Registration (“Basic Term”), a License to use the Eligible Features and Functions of the
|
||||
Software that are applicable to the Basic Version of the Software.
|
||||
1.2 Reservation of Rights; Restrictions. As between Elasticsearch and You, Elasticsearch owns all right title and
|
||||
interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1
|
||||
above, no other license to the Software is granted to You by implication, estoppel or otherwise. You agree not to: (i)
|
||||
reverse engineer or decompile, decrypt, disassemble or otherwise reduce any Software or any portion thereof to
|
||||
human-readable form, except and only to the extent any such restriction is prohibited by applicable law, (ii) deploy the
|
||||
Software on more Nodes (as defined in Elasticsearch’s Support Services Policy) than are permitted under the applicable
|
||||
License grant in Section 1.1 above (iii) where You have purchased a Subscription, use the Software in connection with
|
||||
any project other than the project for which you have purchased such Subscription, as identified on the applicable Order
|
||||
Form, (iv) prepare derivative works from, modify, copy or use the Software in any manner except as expressly permitted
|
||||
in this Agreement; (v) except as expressly permitted in Section 1.1 above, transfer, sell, rent, lease, distribute,
|
||||
sublicense, loan or otherwise transfer the Software in whole or in part to any third party; (vi) except as may be
|
||||
expressly permitted on an applicable Order Form, use the Software for providing time-sharing services, any
|
||||
software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other
|
||||
service offering; (vii) circumvent the limitations on use of the Software that are imposed or preserved by any License
|
||||
Key, (viii) alter or remove any proprietary notices in the Software; or (ix) make available to any third party any
|
||||
analysis of the results of operation of the Software, including benchmarking results, without the prior written consent
|
||||
of Elasticsearch. The Software may contain or be provided with open source libraries, components, utilities and other
|
||||
open source software (collectively, “Open Source Software”), which Open Source Software may have applicable license
|
||||
terms as identified on a website designated by Elasticsearch or otherwise provided with the Software or Documentation.
|
||||
Notwithstanding anything to the contrary herein, use of the Open Source Software shall be subject to the license terms
|
||||
and conditions applicable to such Open Source Software, to the extent required by the applicable licensor (which terms
|
||||
shall not restrict the license rights granted to You hereunder, but may contain additional rights).
|
||||
1.3 Audit Rights. You agree that, unless such right is waived in writing by Elasticsearch, Elasticsearch shall have the
|
||||
right, upon fifteen (15) days’ notice to You, to audit Your use of the Software for compliance with any quantitative
|
||||
limitations on Your use of the Software that are set forth in the applicable Order Form. You agree to provide
|
||||
Elasticsearch with the necessary access to the Software to conduct such an audit either (i) remotely, or (ii) if remote
|
||||
performance is not possible, at Your facilities, during normal business hours and no more than one (1) time in any
|
||||
twelve (12) month period. In the event any such audit reveals that You have used the Software in excess of the
|
||||
applicable quantitative limitations, You agree to solely for Your internal business operations, a limited,
|
||||
non-exclusive, non-transferable, fully paid up, right and license (without the right to grant or authorize sublicenses)
|
||||
promptly pay to Elasticsearch an amount equal to the difference between the fees actually paid and the fees that You
|
||||
should have paid to remain in compliance with such quantitative limitations. This Section 1.3 shall survive for a
|
||||
period of two (2) years from the termination or expiration of this Agreement.
|
||||
1.4 Cluster Metadata. You understand and agree that once deployed, and on a daily basis, the Software may provide
|
||||
metadata to Elasticsearch about Your cluster statistics and associates that metadata with Your IP address. However, no
|
||||
other information is provided to Elasticsearch by the Software, including any information about the data You process or
|
||||
store in connection with your use of the Software. Instructions for disabling this feature are contained in the
|
||||
Software, however leaving this feature active enables Elasticsearch to gather cluster statistics and provide an improved
|
||||
level of support to You.
|
||||
|
||||
2. TERM AND TERMINATION
|
||||
2.1 Term. Unless earlier terminated under Section 2.2 below, this Agreement shall commence on the Effective Date, and
|
||||
shall continue in force for the term of the last to expire applicable license set forth in Section 1.1 above.
|
||||
2.2 Termination. Either party may, upon written notice to the other party, terminate this Agreement for material breach
|
||||
by the other party automatically and without any other formality, if such party has failed to cure such material breach
|
||||
within thirty (30) days of receiving written notice of such material breach from the non-breaching party.
|
||||
Notwithstanding the foregoing, this Agreement shall automatically terminate in the event that You intentionally breach
|
||||
the scope of the license granted in Section 1.1 of this Agreement, provided that Elasticsearch reserves the right to
|
||||
retroactively waive such automatic termination upon written notice to You.
|
||||
2.3 Post Termination or Expiration. Upon termination or expiration of this Agreement, for any reason, You shall
|
||||
promptly cease the use of the Software and Documentation and destroy (and certify to Elasticsearch in writing the fact
|
||||
of such destruction), or return to Elasticsearch, all copies of the Software and Documentation then in Your possession
|
||||
or under Your control.
|
||||
2.4 Survival. Sections 2.3, 2.4, 3, 4 and 5 (as any such Sections may be modified by Attachment 1, if applicable) shall
|
||||
survive any termination or expiration of this Agreement.
|
||||
3. LIMITED WARRANTY AND DISCLAIMER OF WARRANTIES
|
||||
3.1 Limited Performance Warranty. Subject to You purchasing a Subscription, Elasticsearch warrants that during the
|
||||
applicable Subscription Term, the Software will perform in all material respects in accordance with the Documentation.
|
||||
In the event of a breach of the foregoing warranty, Elasticsearch’s sole obligation, and Your exclusive remedy shall be
|
||||
for Elasticsearch to (i) correct any failure(s) of the Software to perform in all material respects in accordance with
|
||||
the Documentation or (ii) if Elasticsearch is unable to provide such a correction within thirty (30) days of receipt of
|
||||
notice of the applicable non-conformity, promptly refund to Customer any pre-paid, unused fees paid by You to
|
||||
Elasticsearch for the applicable Subscription. The warranty set forth in this Section 3.1 does not apply if the
|
||||
applicable Software or any portion thereof: (a) has been altered, except by or on behalf Elasticsearch; (b) has not been
|
||||
used, installed, operated, repaired, or maintained in accordance with this Agreement and/or the Documentation; (c) has
|
||||
been subjected to abnormal physical or electrical stress, misuse, negligence, or accident; or (d) is used on equipment,
|
||||
products, or systems not meeting specifications identified by Elasticsearch in the Documentation. Additionally, the
|
||||
warranties set forth herein only apply when notice of a warranty claim is provided to Elasticsearch within the
|
||||
applicable warranty period specified herein and do not apply to any bug, defect or error caused by or attributable to
|
||||
software or hardware not supplied by Elasticsearch.
|
||||
3.2 Malicious Code. Elasticsearch represents and warrants that prior to making it available for delivery to You,
|
||||
Elasticsearch will use standard industry practices including, without limitation, the use of an updated commercial
|
||||
anti-virus program, to test the Software for Malicious Code and remove any Malicious Code it discovers. In the event of
|
||||
a breach of the foregoing warranty, Elasticsearch’s sole obligation, and Your exclusive remedy shall be for
|
||||
Elasticsearch to replace the Software with Software that does not contain any Malicious Code.
|
||||
3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT
|
||||
WARRANTY OF ANY KIND, AND ELASTICSEARCH AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY
|
||||
REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW,
|
||||
ELASTICSEARCH AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF
|
||||
THE FOREGOING. FURTHER, ELASTICSEARCH DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT
|
||||
THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED.
|
||||
4. LIMITATION OF LIABILITY
|
||||
The provisions of this Section 4 apply if You have not purchased a Subscription. If you have purchased a Subscription,
|
||||
then the limitations of liability set forth in the applicable Subscription Agreement will apply in lieu of those set
|
||||
forth in this Section 4.
|
||||
4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTICSEARCH OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF
|
||||
PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT,
|
||||
SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO
|
||||
USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR
|
||||
TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH THROUGH GROSS NEGLIGENCE
|
||||
OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY OTHER LIABILITY THAT
|
||||
CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW.
|
||||
4.2 Damages Cap. IN NO EVENT SHALL ELASTICSEARCH’S OR ITS LICENSORS’ AGGREGATE, CUMULATIVE LIABILITY UNDER THIS
|
||||
AGREEMENT EXCEED ONE THOUSAND DOLLARS ($1,000).
|
||||
4.3 YOU AGREE THAT THE FOREGOING LIMITATIONS, EXCLUSIONS AND DISCLAIMERS ARE A REASONABLE ALLOCATION OF THE RISK BETWEEN
|
||||
THE PARTIES AND WILL APPLY TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, EVEN IF ANY REMEDY FAILS IN ITS ESSENTIAL
|
||||
PURPOSE.
|
||||
5. DEFINITIONS
|
||||
The following terms have the meanings ascribed:
|
||||
5.1 “License” means a limited, non-exclusive, non-transferable, fully paid up, right and license (without the right to
|
||||
grant or authorize sublicenses) solely for Your internal business operations to (i) install and use, in object code
|
||||
format, the applicable Eligible Features and Functions of the Software, (ii) use, and distribute internally a reasonable
|
||||
number of copies of the Documentation, provided that You must include on such copies all Marks and Notices; (iii) permit
|
||||
Contractors to use the Software and Documentation as set forth in (i) and (ii) above, provided that such use must be
|
||||
solely for Your benefit, and You shall be responsible for all acts and omissions of such Contractors in connection with
|
||||
their use of the Software that are contrary to the terms and conditions of this Agreement..
|
||||
5.2 “License Key” means an alphanumeric code that enables the Eligible Features and Functions of the Software.
|
||||
5.3 “Basic Version” means that version of the Software available for use without the purchase of a Qualifying
|
||||
Subscription, but which does require Registration.
|
||||
5.4 “Contractor” means third party contractors performing services on Your behalf.
|
||||
5.5 “Documentation” means the published end user documentation provided by Elasticsearch with the Software.
|
||||
5.6 “Eligible Features and Functions” means those features and functions of the Software that are eligible for use with
|
||||
respect to a particular version of the Software or level of the Subscription. A list of the Eligible Features and
|
||||
Functions that correspond to each version of the Software and Subscription levels may be found at
|
||||
https://www.elastic.co/subscriptions.
|
||||
5.7 “Malicious Code” means any code that is designed to harm, or otherwise disrupt in any unauthorized manner, the
|
||||
operation of a recipient’s computer programs or computer systems or destroy or damage recipient’s data. For clarity,
|
||||
Malicious Code shall not include any software bugs or errors handled through Support Services, or any standard features
|
||||
of functions of the Software and/or any License Key that are intended to enforce the temporal and other limitations on
|
||||
the scope of the use of the Software to the scope of the license purchased by You.
|
||||
5.8 “Marks and Notices” means all Elasticsearch trademarks, trade names, logos and notices present on the Documentation
|
||||
as originally provided by Elasticsearch.
|
||||
5.9 “Registration” means Elasticsearch’s then-current process under which You may register Your use of the Software with
|
||||
Elasticsearch by providing certain information to Elasticsearch regarding your use of the Software.
|
||||
5.10 “Subscription” means the right to receive Support Services and a License to the Software.
|
||||
5.11 “Subscription Term” means the period of time for which You have purchased a Subscription.
|
||||
5.12 “Trial Version” means that version of the Software available for use without the purchase of a Qualifying
|
||||
Subscription and without Registration.
|
||||
6. MISCELLANEOUS
|
||||
This Agreement, including Attachment 1 hereto, which is hereby incorporated herein by this reference, completely and
|
||||
exclusively states the entire agreement of the parties regarding the subject matter herein, and it supersedes, and its
|
||||
terms govern, all prior proposals, agreements, or other communications between the parties, oral or written, regarding
|
||||
such subject matter. For the avoidance of doubt, the parties hereby expressly acknowledge and agree that if You issue
|
||||
any purchase order or similar document in connection with its purchase of a license to the Software, You will do so only
|
||||
for Your internal, administrative purposes and not with the intent to provide any contractual terms. This Agreement may
|
||||
not be modified except by a subsequently dated, written amendment that expressly amends this Agreement and which is
|
||||
signed on behalf of Elasticsearch and You, by duly authorized representatives. If any provision hereof is held
|
||||
unenforceable, this Agreement will continue without said provision and be interpreted to reflect the original intent of
|
||||
the parties.
|
||||
|
||||
|
||||
ATTACHMENT 1
|
||||
ADDITIONAL TERMS AND CONDITIONS
|
||||
|
||||
A. The following additional terms and conditions apply to all Customers with principal offices in the United States
|
||||
of America:
|
||||
|
||||
(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch, Inc., a Delaware corporation.
|
||||
|
||||
(2) Government Rights. The Software product is "Commercial Computer Software," as that term is defined in 48 C.F.R.
|
||||
2.101, and as the term is used in 48 C.F.R. Part 12, and is a Commercial Item comprised of "commercial computer
|
||||
software" and "commercial computer software documentation". If acquired by or on behalf of a civilian agency, the U.S.
|
||||
Government acquires this commercial computer software and/or commercial computer software documentation subject to the
|
||||
terms of this Agreement, as specified in 48 C.F.R. 12.212 (Computer Software) and 12.211 (Technical Data) of the Federal
|
||||
Acquisition Regulation ("FAR") and its successors. If acquired by or on behalf of any agency within the Department of
|
||||
Defense ("DOD"), the U.S. Government acquires this commercial computer software and/or commercial computer software
|
||||
documentation subject to the terms of the Elasticsearch Software License Agreement as specified in 48 C.F.R. 227.7202-3
|
||||
and 48 C.F.R. 227.7202-4 of the DOD FAR Supplement ("DFARS") and its successors, and consistent with 48 C.F.R. 227.7202.
|
||||
This U.S. Government Rights clause, consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202 is in lieu of, and
|
||||
supersedes, any other FAR, DFARS, or other clause or provision that addresses Government rights in computer software,
|
||||
computer software documentation or technical data related to the Software under this Agreement and in any Subcontract
|
||||
under which this commercial computer software and commercial computer software documentation is acquired or licensed.
|
||||
|
||||
(3) Export Control. You acknowledge that the goods, software and technology acquired from Elasticsearch are subject
|
||||
to U.S. export control laws and regulations, including but not limited to the International Traffic In Arms Regulations
|
||||
(“ITAR”) (22 C.F.R. Parts 120-130 (2010)); the Export Administration Regulations ("EAR") (15 C.F.R. Parts 730-774
|
||||
(2010)); the U.S. antiboycott regulations in the EAR and U.S. Department of the Treasury regulations; the economic
|
||||
sanctions regulations and guidelines of the U.S. Department of the Treasury, Office of Foreign Assets Control, and the
|
||||
USA Patriot Act (Title III of Pub. L. 107-56, signed into law October 26, 2001), as amended. You are now and will
|
||||
remain in the future compliant with all such export control laws and regulations, and will not export, re-export,
|
||||
otherwise transfer any Elasticsearch goods, software or technology or disclose any Elasticsearch software or technology
|
||||
to any person contrary to such laws or regulations. You acknowledge that remote access to the Software may in certain
|
||||
circumstances be considered a re-export of Software, and accordingly, may not be granted in contravention of U.S. export
|
||||
control laws and regulations.
|
||||
(4) Governing Law, Jurisdiction and Venue.
|
||||
(a) Customers in California. If Customer is located in California (as determined by the Customer address on the
|
||||
applicable Order Form, or for a trial license under 1.1(a), the location of person who installed the Software), this
|
||||
Agreement will be governed by the laws of the State of California, without regard to its conflict of laws principles,
|
||||
and all suits hereunder will be brought solely in Federal Court for the Northern District of California, or if that
|
||||
court lacks subject matter jurisdiction, in any California State Court located in Santa Clara County.
|
||||
(b) Customers Outside of California. If Customer is located anywhere other than California (as determined by the
|
||||
Customer address on the applicable Order Form, or for a trial license under 1.1(a), the location of person who installed
|
||||
the Software), this Agreement will be governed by the laws of the State of Delaware, without regard to its conflict of
|
||||
laws principles, and all suits hereunder will be brought solely in Federal Court for the District of Delaware, or if
|
||||
that court lacks subject matter jurisdiction, in any Delaware State Court located in Wilmington, Delaware.
|
||||
(c) All Customers. This Agreement shall not be governed by the 1980 UN Convention on Contracts for the International
|
||||
Sale of Goods. The parties hereby irrevocably waive any and all claims and defenses either might otherwise have in any
|
||||
action or proceeding in any of the applicable courts set forth in (a) or (b) above, based upon any alleged lack of
|
||||
personal jurisdiction, improper venue, forum non conveniens, or any similar claim or defense.
|
||||
(d) Equitable Relief. A breach or threatened breach, by either party of Section 4 may cause irreparable harm for
|
||||
which the non-breaching party shall be entitled to seek injunctive relief without being required to post a bond.
|
||||
|
||||
B. The following additional terms and conditions apply to all Customers with principal offices in Canada:
|
||||
|
||||
(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch B.C. Ltd., a corporation
|
||||
incorporated under laws of the Province of British Columbia.
|
||||
|
||||
(2) Export Control. You acknowledge that the goods, software and technology acquired from Elasticsearch are subject
|
||||
to the restrictions and controls set out in Section A(3) above as well as those imposed by the Export and Import Permits
|
||||
Act (Canada) and the regulations thereunder and that you will comply with all applicable laws and regulations. Without
|
||||
limitation, You acknowledge that the Marvel Software, or any portion thereof, will not be exported: (a) to any country
|
||||
on Canada's Area Control List; (b) to any country subject to UN Security Council embargo or action; or (c) contrary to
|
||||
Canada's Export Control List Item 5505. You are now and will remain in the future compliant with all such export control
|
||||
laws and regulations, and will not export, re-export, otherwise transfer any Elasticsearch goods, software or technology
|
||||
or disclose any Elasticsearch software or technology to any person contrary to such laws or regulations. You will not
|
||||
export or re-export the Marvel Software, or any portion thereof, directly or indirectly, in violation of the Canadian
|
||||
export administration laws and regulations to any country or end user, or to any end user who you know or have reason to
|
||||
know will utilize them in the design, development or production of nuclear, chemical or biological weapons. You further
|
||||
acknowledge that the Marvel Software product may include technical data subject to such Canadian export regulations.
|
||||
Elasticsearch does not represent that the Marvel Software is appropriate or available for use in all countries.
|
||||
Elasticsearch prohibits accessing materials from countries or states where contents are illegal. You are using the
|
||||
Marvel Software on your own initiative and you are responsible for compliance with all applicable laws. You hereby agree
|
||||
to indemnify Elasticsearch and its affiliates from any claims, actions, liability or expenses (including reasonable
|
||||
lawyers' fees) resulting from Your failure to act in accordance with the acknowledgements, agreements, and
|
||||
representations in this Section B(2).
|
||||
(3) Governing Law and Dispute Resolution. This Agreement shall be governed by the Province of Ontario and the
|
||||
federal laws of Canada applicable therein without regard to conflict of laws provisions. The parties hereby irrevocably
|
||||
waive any and all claims and defenses either might otherwise have in any such action or proceeding in any of such courts
|
||||
based upon any alleged lack of personal jurisdiction, improper venue, forum non conveniens or any similar claim or
|
||||
defense. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence, breach,
|
||||
termination, enforcement, interpretation or validity thereof, including the determination of the scope or applicability
|
||||
of this agreement to arbitrate, (each, a “Dispute”), which the parties are unable to resolve after good faith
|
||||
negotiations, shall be submitted first to the upper management level of the parties. The parties, through their upper
|
||||
management level representatives shall meet within thirty (30) days of the Dispute being referred to them and if the
|
||||
parties are unable to resolve such Dispute within thirty (30) days of meeting, the parties agree to seek to resolve the
|
||||
Dispute through mediation with ADR Chambers in the City of Toronto, Ontario, Canada before pursuing any other
|
||||
proceedings. The costs of the mediator shall be shared equally by the parties. If the Dispute has not been resolved
|
||||
within thirty (30) days of the notice to desire to mediate, any party may terminate the mediation and proceed to
|
||||
arbitration and the matter shall be referred to and finally resolved by arbitration at ADR Chambers pursuant to the
|
||||
general ADR Chambers Rules for Arbitration in the City of Toronto, Ontario, Canada. The arbitration shall proceed in
|
||||
accordance with the provisions of the Arbitration Act (Ontario). The arbitral panel shall consist of three (3)
|
||||
arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall
|
||||
discuss and select a chairman. If the two (2) party-appointed arbitrators are unable to agree on the chairman, the
|
||||
chairman shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be
|
||||
independent of each of the parties. The arbitrators shall have the authority to grant specific performance and to
|
||||
allocate between the parties the costs of arbitration (including service fees, arbitrator fees and all other fees
|
||||
related to the arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any
|
||||
arbitration shall be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith.
|
||||
Judgment upon the award so rendered may be entered in a court having jurisdiction or application may be made to such
|
||||
court for judicial acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the
|
||||
foregoing, Elasticsearch shall have the right to institute an action in a court of proper jurisdiction for preliminary
|
||||
injunctive relief pending a final decision by the arbitrator, provided that a permanent injunction and damages shall
|
||||
only be awarded by the arbitrator. The language to be used in the arbitral proceedings shall be English.
|
||||
(4) Language. Any translation of this Agreement is done for local requirements and in the event of a dispute
|
||||
between the English and any non-English version, the English version of this Agreement shall govern. At the request of
|
||||
the parties, the official language of this Agreement and all communications and documents relating hereto is the English
|
||||
language, and the English-language version shall govern all interpretation of the Agreement. À la demande des parties,
|
||||
la langue officielle de la présente convention ainsi que toutes communications et tous documents s'y rapportant est la
|
||||
langue anglaise, et la version anglaise est celle qui régit toute interprétation de la présente convention.
|
||||
(5) Warranty Disclaimer. For Customers with principal offices in the Province of Québec, the following new sentence
|
||||
is to be added to the end of Section 3.3: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN TYPES OF
|
||||
DAMAGES AND/OR WARRANTIES AND CONDITIONS. THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS AGREEMENT SHALL
|
||||
NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES BEYOND AND
|
||||
DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.”
|
||||
(6) Limitation of Liability. For Customers with principal offices in the Province of Québec, the following new
|
||||
sentence is to be added to the end of Section 4.1: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN
|
||||
TYPES OF DAMAGES AND/OR WARRANTIES AND CONDITIONS. THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS
|
||||
AGREEMENT SHALL NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES
|
||||
BEYOND AND DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.”
|
||||
|
||||
C. The following additional terms and conditions apply to all Customers with principal offices outside of the United
|
||||
States of America and Canada:
|
||||
|
||||
(1) Applicable Elasticsearch Entity. The entity providing the license in Germany is Elasticsearch Gmbh; in France is
|
||||
Elasticsearch SARL, in the United Kingdom is Elasticsearch Ltd, in Australia is Elasticsearch Pty Ltd., in Japan is
|
||||
Elasticsearch KK, in Sweden is Elasticsearch AB, in Norway is Elasticsearch AS and in all other countries is
|
||||
Elasticsearch BV.
|
||||
|
||||
(2) Choice of Law. This Agreement shall be governed by and construed in accordance with the laws of the State of New
|
||||
York, without reference to or application of choice of law rules or principles. Notwithstanding any choice of law
|
||||
provision or otherwise, the Uniform Computer Information Transactions Act (UCITA) and the United Nations Convention on
|
||||
the International Sale of Goods shall not apply.
|
||||
|
||||
(3) Arbitration. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence,
|
||||
breach, termination, enforcement, interpretation or validity thereof, including the determination of the scope or
|
||||
applicability of this agreement to arbitrate, (each, a “Dispute”) shall be referred to and finally resolved by
|
||||
arbitration under the rules and at the location identified below. The arbitral panel shall consist of three (3)
|
||||
arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall
|
||||
discuss and select a chairman. If the two party-appointed arbitrators are unable to agree on the chairman, the chairman
|
||||
shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be independent
|
||||
of each of the parties. The arbitrators shall have the authority to grant specific performance and to allocate between
|
||||
the parties the costs of arbitration (including service fees, arbitrator fees and all other fees related to the
|
||||
arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any arbitration shall
|
||||
be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith. Judgment upon the
|
||||
award so rendered may be entered in a court having jurisdiction or application may be made to such court for judicial
|
||||
acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the foregoing, Elasticsearch
|
||||
shall have the right to institute an action in a court of proper jurisdiction for preliminary injunctive relief pending
|
||||
a final decision by the arbitrator, provided that a permanent injunction and damages shall only be awarded by the
|
||||
arbitrator. The language to be used in the arbitral proceedings shall be English.
|
||||
|
||||
In addition, the following terms only apply to Customers with principal offices within Europe, the Middle East or Africa
|
||||
(EMEA):
|
||||
|
||||
Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the London
|
||||
Court of International Arbitration (“LCIA”) Rules (which Rules are deemed to be incorporated by reference into this
|
||||
clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of
|
||||
arbitration shall be London, England.
|
||||
|
||||
(b) In addition, the following terms only apply to Customers with principal offices within Asia Pacific, Australia &
|
||||
New Zealand:
|
||||
|
||||
Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the Rules of
|
||||
Conciliation and Arbitration of the International Chamber of Commerce (“ICC”) in force on the date when the notice of
|
||||
arbitration is submitted in accordance with such Rules (which Rules are deemed to be incorporated by reference into this
|
||||
clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of
|
||||
arbitration shall be Singapore.
|
||||
|
||||
(c) In addition, the following terms only apply to Customers with principal offices within the Americas (excluding
|
||||
North America):
|
||||
|
||||
Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under
|
||||
International Dispute Resolution Procedures of the American Arbitration Association (“AAA”) in force on the date when
|
||||
the notice of arbitration is submitted in accordance with such Procedures (which Procedures are deemed to be
|
||||
incorporated by reference into this clause) on the basis that the governing law is the law of the State of New York,
|
||||
USA. The seat, or legal place, of arbitration shall be New York, New York, USA.
|
||||
|
||||
(4) In addition, for Customers with principal offices within the UK, the following new sentence is added to the end
|
||||
of Section 4.1:
|
||||
|
||||
Nothing in this Agreement shall have effect so as to limit or exclude a party’s liability for death or personal injury
|
||||
caused by negligence or for fraud including fraudulent misrepresentation and this Section 4.1 shall take effect subject
|
||||
to this provision.
|
||||
|
||||
(5) In addition, for Customers with principal offices within France, Sections 1.2, 3 and 4.1 of the Agreement are
|
||||
deleted and replaced with the following new Sections 1.2, 3.3 and 4.1:
|
||||
1.2 Reservation of Rights; Restrictions. Elasticsearch owns all right title and interest in and to the Software and
|
||||
any derivative works thereof, and except as expressly set forth in Section 1.1 above, no other license to the Software
|
||||
is granted to You by implication, or otherwise. You agree not to prepare derivative works from, modify, copy or use the
|
||||
Software in any manner except as expressly permitted in this Agreement; provided that You may copy the Software for
|
||||
archival purposes, only where such software is provided on a non-durable medium; and You may decompile the Software,
|
||||
where necessary for interoperability purposes and where necessary for the correction of errors making the software unfit
|
||||
for its intended purpose, if such right is not reserved by Elasticsearch as editor of the Software. Pursuant to article
|
||||
L122-6-1 of the French intellectual property code, Elasticsearch reserves the right to correct any bugs as necessary for
|
||||
the Software to serve its intended purpose. You agree not to: (i) transfer, sell, rent, lease, distribute, sublicense,
|
||||
loan or otherwise transfer the Software in whole or in part to any third party; (ii) use the Software for providing
|
||||
time-sharing services, any software-as-a-service offering (“SaaS”), service bureau services or as part of an application
|
||||
services provider or other service offering; (iii) alter or remove any proprietary notices in the Software; or (iv) make
|
||||
available to any third party any analysis of the results of operation of the Software, including benchmarking results,
|
||||
without the prior written consent of Elasticsearch.
|
||||
3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS”
|
||||
WITHOUT WARRANTY OF ANY KIND, AND ELASTICSEARCH AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR
|
||||
STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW,
|
||||
ELASTICSEARCH AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR PURPOSE WITH
|
||||
RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTICSEARCH DOES
|
||||
NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE
|
||||
UNINTERRUPTED.
|
||||
4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTICSEARCH OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF
|
||||
PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT OR
|
||||
UNFORESEEABLE DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE, OR THE
|
||||
PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR TORTIOUS CONDUCT,
|
||||
INCLUDING NEGLIGENCE. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH, THROUGH
|
||||
GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU, OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1, OR IN CASE OF
|
||||
DEATH OR PERSONAL INJURY.
|
||||
(6) In addition, for Customers with principal offices within Australia, Sections 4.1, 4.2 and 4.3 of the Agreement
|
||||
are deleted and replaced with the following new Sections 4.1, 4.2 and 4.3:
|
||||
4.1 Disclaimer of Certain Damages. Subject to clause 4.3, a party is not liable for Consequential Loss however
|
||||
caused (including by the negligence of that party) suffered or incurred by the other party in connection with this
|
||||
agreement. “Consequential Loss” means loss of revenues, loss of reputation, indirect loss, loss of profits,
|
||||
consequential loss, loss of actual or anticipated savings, indirect loss, lost opportunities, including opportunities to
|
||||
enter into arrangements with third parties, loss or damage in connection with claims against by third parties, or loss
|
||||
or corruption or data.
|
||||
4.2 Damages Cap. SUBJECT TO CLAUSES 4.1 AND 4.3, ANY LIABILITY OF ELASTICSEARCH FOR ANY LOSS OR DAMAGE, HOWEVER
|
||||
CAUSED (INCLUDING BY THE NEGLIGENCE OF ELASTICSEARCH), SUFFERED BY YOU IN CONNECTION WITH THIS AGREEMENT IS LIMITED TO
|
||||
THE AMOUNT YOU PAID, IN THE TWELVE (12) MONTHS IMMEDIATELY PRIOR TO THE EVENT GIVING RISE TO LIABILITY, UNDER THE
|
||||
ELASTICSEARCH SUPPORT SERVICES AGREEMENT IN CONNECTION WITH WHICH YOU OBTAINED THE LICENSE TO USE THE SOFTWARE. THE
|
||||
LIMITATION SET OUT IN THIS SECTION 4.2 IS AN AGGREGATE LIMIT FOR ALL CLAIMS, WHENEVER MADE.
|
||||
4.3 Limitation and Disclaimer Exceptions. If the Competition and Consumer Act 2010 (Cth) or any other legislation or
|
||||
any other legislation states that there is a guarantee in relation to any good or service supplied by Elasticsearch in
|
||||
connection with this agreement, and Elasticsearch’s liability for failing to comply with that guarantee cannot be
|
||||
excluded but may be limited, Sections 4.1 and 4.2 do not apply to that liability and instead Elasticsearch’s liability
|
||||
for such failure is limited (at Elasticsearch’s election) to, in the case of a supply of goods, the Elasticsearch
|
||||
replacing the goods or supplying equivalent goods or repairing the goods, or in the case of a supply of services,
|
||||
Elasticsearch supplying the services again or paying the cost of having the services supplied again.
|
||||
(7) In addition, for Customers with principal offices within Japan, Sections 1.2, 3 and 4.1 of the Agreement are
|
||||
deleted and replaced with the following new Sections 1.2, 3.3 and 4.1:
|
||||
1.2 Reservation of Rights; Restrictions. As between Elasticsearch and You, Elasticsearch owns all right title and
|
||||
interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1
|
||||
above, no other license to the Software is granted to You by implication or otherwise. You agree not to: (i) prepare
|
||||
derivative works from, modify, copy or use the Software in any manner except as expressly permitted in this Agreement or
|
||||
applicable law; (ii) transfer, sell, rent, lease, distribute, sublicense, loan or otherwise transfer the Software in
|
||||
whole or in part to any third party; (iii) use the Software for providing time-sharing services, any
|
||||
software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other
|
||||
service offering; (iv) alter or remove any proprietary notices in the Software; or (v) make available to any third party
|
||||
any analysis of the results of operation of the Software, including benchmarking results, without the prior written
|
||||
consent of Elasticsearch.
|
||||
3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS”
|
||||
WITHOUT WARRANTY OF ANY KIND, AND ELASTICSEARCH AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR
|
||||
STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW,
|
||||
ELASTICSEARCH AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF
|
||||
THE FOREGOING. FURTHER, ELASTICSEARCH DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT
|
||||
THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED.
|
||||
4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTICSEARCH OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF
|
||||
PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY
|
||||
SPECIALINDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE
|
||||
OR INABILITY TO USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A
|
||||
BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH
|
||||
THROUGH GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY
|
||||
OTHER LIABILITY THAT CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW.
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
import org.elasticsearch.gradle.MavenFilteringHack
|
||||
import org.elasticsearch.gradle.test.NodeInfo
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
|
||||
group 'org.elasticsearch.plugin'
|
||||
|
@ -27,10 +26,6 @@ licenseHeaders {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
// license deps
|
||||
compile project(':x-plugins:elasticsearch:license:base')
|
||||
testCompile project(':x-plugins:elasticsearch:license:licensor')
|
||||
|
||||
// security deps
|
||||
compile project(path: ':modules:transport-netty3', configuration: 'runtime')
|
||||
compile 'dk.brics.automaton:automaton:1.11-8'
|
||||
|
@ -78,6 +73,11 @@ for (String module : ['', 'license-plugin/', 'security/', 'watcher/', 'monitorin
|
|||
}
|
||||
}
|
||||
|
||||
// make LicenseSigner available for testing signed licenses
|
||||
sourceSets.test.java {
|
||||
srcDir '../license-tools/src/main/java'
|
||||
}
|
||||
|
||||
compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
|
||||
compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
|
||||
|
||||
|
@ -117,8 +117,10 @@ forbiddenPatterns {
|
|||
|
||||
// TODO: standardize packaging config for plugins
|
||||
bundlePlugin {
|
||||
from(projectDir) {
|
||||
from(project(':x-plugins').projectDir) {
|
||||
include 'LICENSE.txt'
|
||||
}
|
||||
from(projectDir) {
|
||||
include 'NOTICE.txt'
|
||||
}
|
||||
from('bin/x-pack') {
|
||||
|
@ -236,29 +238,3 @@ thirdPartyAudit.excludes = [
|
|||
'javax.activation.URLDataSource',
|
||||
'javax.activation.UnsupportedDataTypeException'
|
||||
]
|
||||
|
||||
modifyPom { MavenPom pom ->
|
||||
pom.withXml { XmlProvider xml ->
|
||||
// first find if we have dependencies at all, and grab the node
|
||||
NodeList depsNodes = xml.asNode().get('dependencies')
|
||||
if (depsNodes.isEmpty()) {
|
||||
return
|
||||
}
|
||||
|
||||
// find the 'base' dependency and replace it with the correct name because the project name is
|
||||
// always used even when the pom of the other project is correct
|
||||
Iterator<Node> childNodeIter = depsNodes.get(0).children().iterator()
|
||||
while (childNodeIter.hasNext()) {
|
||||
Node depNode = childNodeIter.next()
|
||||
String groupId = depNode.get('groupId').get(0).text()
|
||||
Node artifactIdNode = depNode.get('artifactId').get(0)
|
||||
String artifactId = artifactIdNode.text()
|
||||
String scope = depNode.get("scope").get(0).text()
|
||||
if (groupId.equals('org.elasticsearch') && artifactId.equals('base')) {
|
||||
artifactIdNode.replaceNode(new Node(null, 'artifactId', 'license-core'))
|
||||
} else if ('test'.equals(scope)) {
|
||||
childNodeIter.remove()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@ import java.util.Map;
|
|||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -27,23 +26,24 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.support.RestActions;
|
||||
import org.elasticsearch.rest.action.support.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.XPackClient;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreRequest;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreResponse;
|
||||
import org.elasticsearch.xpack.graph.action.Hop;
|
||||
import org.elasticsearch.xpack.graph.action.VertexRequest;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.xpack.rest.XPackRestHandler;
|
||||
|
||||
|
||||
/**
|
||||
* @see GraphExploreRequest
|
||||
*/
|
||||
public class RestGraphAction extends BaseRestHandler {
|
||||
public class RestGraphAction extends XPackRestHandler {
|
||||
|
||||
private IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
public static final ParseField TIMEOUT_FIELD = new ParseField("timeout");
|
||||
|
@ -68,21 +68,23 @@ public class RestGraphAction extends BaseRestHandler {
|
|||
@Inject
|
||||
public RestGraphAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry) {
|
||||
super(settings);
|
||||
// @deprecated TODO need to add deprecation support as per https://github.com/elastic/x-plugins/issues/1760#issuecomment-217507517
|
||||
controller.registerHandler(GET, "/{index}/_graph/explore", this);
|
||||
controller.registerHandler(POST, "/{index}/_graph/explore", this);
|
||||
controller.registerHandler(GET, "/{index}/{type}/_graph/explore", this);
|
||||
controller.registerHandler(POST, "/{index}/{type}/_graph/explore", this);
|
||||
// new REST endpoint
|
||||
controller.registerHandler(GET, "/{index}/_xpack/graph/_explore", this);
|
||||
controller.registerHandler(POST, "/{index}/_xpack/graph/_explore", this);
|
||||
controller.registerHandler(GET, "/{index}/{type}/_xpack/graph/_explore", this);
|
||||
controller.registerHandler(POST, "/{index}/{type}/_xpack/graph/_explore", this);
|
||||
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
|
||||
// @deprecated Remove in 6.0
|
||||
// NOTE: Old versions did not end with "/_explore"; they were just "/explore"
|
||||
controller.registerWithDeprecatedHandler(GET, "/{index}" + URI_BASE + "/_graph/_explore", this,
|
||||
GET, "/{index}/_graph/explore", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(POST, "/{index}" + URI_BASE + "/_graph/_explore", this,
|
||||
POST, "/{index}/_graph/explore", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(GET, "/{index}/{type}" + URI_BASE + "/_graph/_explore", this,
|
||||
GET, "/{index}/{type}/_graph/explore", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(POST, "/{index}/{type}" + URI_BASE + "/_graph/_explore", this,
|
||||
POST, "/{index}/{type}/_graph/explore", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) throws IOException {
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final XPackClient client) throws IOException {
|
||||
GraphExploreRequest graphRequest = new GraphExploreRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
graphRequest.indicesOptions(IndicesOptions.fromRequest(request, graphRequest.indicesOptions()));
|
||||
graphRequest.routing(request.param("routing"));
|
||||
|
@ -109,7 +111,7 @@ public class RestGraphAction extends BaseRestHandler {
|
|||
}
|
||||
|
||||
graphRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
|
||||
client.execute(INSTANCE, graphRequest, new RestToXContentListener<GraphExploreResponse>(channel));
|
||||
client.es().execute(INSTANCE, graphRequest, new RestToXContentListener<GraphExploreResponse>(channel));
|
||||
}
|
||||
|
||||
private void parseHop(XContentParser parser, QueryParseContext context, Hop currentHop,
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
"documentation": "https://www.elastic.co/guide/en/graph/current/explore.html",
|
||||
"methods": ["GET", "POST"],
|
||||
"url": {
|
||||
"path": "/{index}/_xpack/graph/_explore",
|
||||
"paths": ["/{index}/_xpack/graph/_explore", "/{index}/{type}/_xpack/graph/_explore"],
|
||||
"path": "/{index}/_xpack/_graph/_explore",
|
||||
"paths": ["/{index}/_xpack/_graph/_explore", "/{index}/{type}/_xpack/_graph/_explore"],
|
||||
"parts" : {
|
||||
"index": {
|
||||
"type" : "list",
|
||||
|
@ -30,4 +30,4 @@
|
|||
"description" : "Graph Query DSL"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.license;
|
|||
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
|
||||
import java.util.UUID;
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.license;
|
|||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticsearchException;
|
|
@ -25,9 +25,6 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.core.LicenseVerifier;
|
||||
import org.elasticsearch.license.core.OperationModeFileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
|
@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.license;
|
|||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
/**
|
||||
*
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.license.core.License.OperationMode;
|
||||
import org.elasticsearch.license.License.OperationMode;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
|
@ -10,7 +10,6 @@ import org.elasticsearch.action.ValidateActions;
|
|||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -29,7 +28,7 @@ public class PutLicenseRequest extends AcknowledgedRequest<PutLicenseRequest> {
|
|||
}
|
||||
|
||||
/**
|
||||
* Parses license from json format to an instance of {@link org.elasticsearch.license.core.License}
|
||||
* Parses license from json format to an instance of {@link License}
|
||||
*
|
||||
* @param licenseDefinition licenses definition
|
||||
*/
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.license;
|
|||
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
/**
|
||||
* Register license request builder
|
||||
|
|
|
@ -9,7 +9,6 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
|
|
@ -11,15 +11,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.license.core.CryptUtils.decrypt;
|
||||
import static org.elasticsearch.license.core.CryptUtils.encrypt;
|
||||
import static org.elasticsearch.license.CryptUtils.decrypt;
|
||||
import static org.elasticsearch.license.CryptUtils.encrypt;
|
||||
|
||||
class TrialLicense {
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
/**
|
||||
* Licensing for xpack.
|
||||
*
|
||||
* A {@link org.elasticsearch.license.core.License} is a signed set of json properties that determine what features
|
||||
* A {@link org.elasticsearch.license.License} is a signed set of json properties that determine what features
|
||||
* are available in a running cluster. Licenses are registered through a
|
||||
* {@link org.elasticsearch.license.PutLicenseRequest}. This action is handled by the master node, which places
|
||||
* the signed license into the cluster state. Each node listens for cluster state updates via the
|
||||
|
|
|
@ -17,7 +17,6 @@ import org.elasticsearch.common.component.Lifecycle;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.support.clock.ClockMock;
|
||||
|
|
|
@ -11,8 +11,6 @@ import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
|||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.LicensesMetaData;
|
||||
import org.elasticsearch.xpack.monitoring.Monitoring;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
|
||||
|
|
|
@ -15,7 +15,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.license.core.License.OperationMode;
|
||||
import static org.elasticsearch.license.License.OperationMode;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
|
@ -12,7 +12,6 @@ import org.junit.Before;
|
|||
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.elasticsearch.license.core.OperationModeFileWatcherTests.writeMode;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.eq;
|
||||
|
@ -47,7 +46,7 @@ public class LicenseOperationModeUpdateTests extends ESTestCase {
|
|||
.build();
|
||||
|
||||
assertThat(license.operationMode(), equalTo(License.OperationMode.resolve(type)));
|
||||
writeMode("gold", licenseModeFile);
|
||||
OperationModeFileWatcherTests.writeMode("gold", licenseModeFile);
|
||||
license.setOperationModeFileWatcher(operationModeFileWatcher);
|
||||
verifyZeroInteractions(resourceWatcherService);
|
||||
assertThat(license.operationMode(), equalTo(License.OperationMode.resolve(type)));
|
||||
|
@ -65,7 +64,7 @@ public class LicenseOperationModeUpdateTests extends ESTestCase {
|
|||
.build();
|
||||
|
||||
assertThat(license.operationMode(), equalTo(License.OperationMode.PLATINUM));
|
||||
writeMode("gold", licenseModeFile);
|
||||
OperationModeFileWatcherTests.writeMode("gold", licenseModeFile);
|
||||
license.setOperationModeFileWatcher(operationModeFileWatcher);
|
||||
verify(resourceWatcherService, times(1)).add(any(FileWatcher.class), eq(ResourceWatcherService.Frequency.HIGH));
|
||||
assertThat(license.operationMode(), equalTo(License.OperationMode.GOLD));
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
import org.junit.Before;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
|
@ -9,7 +9,6 @@ import org.elasticsearch.common.network.NetworkModule;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.xpack.MockNetty3Plugin;
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.license;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import static org.elasticsearch.license.TestUtils.generateSignedLicense;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
|
|
@ -15,7 +15,6 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
|
|||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
|
|
|
@ -17,7 +17,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Collections;
|
||||
|
|
|
@ -8,7 +8,6 @@ package org.elasticsearch.license;
|
|||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.xpack.monitoring.Monitoring;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.RandomizedTest;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
|
@ -15,10 +16,12 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.licensor.LicenseSigner;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.junit.Assert;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -27,6 +30,9 @@ import java.util.concurrent.Callable;
|
|||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.ESTestCase.assertNotNull;
|
||||
import static org.elasticsearch.test.ESTestCase.awaitBusy;
|
||||
import static org.elasticsearch.test.ESTestCase.randomAsciiOfLength;
|
||||
|
@ -39,6 +45,16 @@ public class TestUtils {
|
|||
|
||||
private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private static final DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer();
|
||||
|
||||
public static String dateMathString(String time, final long now) {
|
||||
return dateTimeFormatter.print(dateMathParser.parse(time, new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return now;
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
public static long dateMath(String time, final long now) {
|
||||
return dateMathParser.parse(time, new Callable<Long>() {
|
||||
|
@ -48,6 +64,159 @@ public class TestUtils {
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static LicenseSpec generateRandomLicenseSpec(int version) {
|
||||
boolean datesInMillis = randomBoolean();
|
||||
long now = System.currentTimeMillis();
|
||||
String uid = UUID.randomUUID().toString();
|
||||
String feature = "feature__" + randomInt();
|
||||
String issuer = "issuer__" + randomInt();
|
||||
String issuedTo = "issuedTo__" + randomInt();
|
||||
final String type;
|
||||
final String subscriptionType;
|
||||
if (version < License.VERSION_NO_FEATURE_TYPE) {
|
||||
subscriptionType = randomFrom("gold", "silver", "platinum");
|
||||
type = "subscription";//randomFrom("subscription", "internal", "development");
|
||||
} else {
|
||||
subscriptionType = null;
|
||||
type = randomFrom("basic", "dev", "gold", "silver", "platinum");
|
||||
}
|
||||
int maxNodes = RandomizedTest.randomIntBetween(5, 100);
|
||||
if (datesInMillis) {
|
||||
long issueDateInMillis = dateMath("now", now);
|
||||
long expiryDateInMillis = dateMath("now+10d/d", now);
|
||||
return new LicenseSpec(version, uid, feature, issueDateInMillis, expiryDateInMillis, type, subscriptionType, issuedTo, issuer,
|
||||
maxNodes);
|
||||
} else {
|
||||
String issueDate = dateMathString("now", now);
|
||||
String expiryDate = dateMathString("now+10d/d", now);
|
||||
return new LicenseSpec(version, uid, feature, issueDate, expiryDate, type, subscriptionType, issuedTo, issuer, maxNodes);
|
||||
}
|
||||
}
|
||||
|
||||
public static String generateLicenseSpecString(LicenseSpec licenseSpec) throws IOException {
|
||||
XContentBuilder licenses = jsonBuilder();
|
||||
licenses.startObject();
|
||||
licenses.startArray("licenses");
|
||||
licenses.startObject()
|
||||
.field("uid", licenseSpec.uid)
|
||||
.field("type", licenseSpec.type)
|
||||
.field("subscription_type", licenseSpec.subscriptionType)
|
||||
.field("issued_to", licenseSpec.issuedTo)
|
||||
.field("issuer", licenseSpec.issuer)
|
||||
.field("feature", licenseSpec.feature)
|
||||
.field("max_nodes", licenseSpec.maxNodes);
|
||||
|
||||
if (licenseSpec.issueDate != null) {
|
||||
licenses.field("issue_date", licenseSpec.issueDate);
|
||||
} else {
|
||||
licenses.field("issue_date_in_millis", licenseSpec.issueDateInMillis);
|
||||
}
|
||||
if (licenseSpec.expiryDate != null) {
|
||||
licenses.field("expiry_date", licenseSpec.expiryDate);
|
||||
} else {
|
||||
licenses.field("expiry_date_in_millis", licenseSpec.expiryDateInMillis);
|
||||
}
|
||||
licenses.field("version", licenseSpec.version);
|
||||
licenses.endObject();
|
||||
licenses.endArray();
|
||||
licenses.endObject();
|
||||
return licenses.string();
|
||||
}
|
||||
|
||||
public static License generateLicenses(LicenseSpec spec) {
|
||||
License.Builder builder = License.builder()
|
||||
.uid(spec.uid)
|
||||
.feature(spec.feature)
|
||||
.type(spec.type)
|
||||
.subscriptionType(spec.subscriptionType)
|
||||
.issuedTo(spec.issuedTo)
|
||||
.issuer(spec.issuer)
|
||||
.maxNodes(spec.maxNodes);
|
||||
|
||||
if (spec.expiryDate != null) {
|
||||
builder.expiryDate(DateUtils.endOfTheDay(spec.expiryDate));
|
||||
} else {
|
||||
builder.expiryDate(spec.expiryDateInMillis);
|
||||
}
|
||||
if (spec.issueDate != null) {
|
||||
builder.issueDate(DateUtils.beginningOfTheDay(spec.issueDate));
|
||||
} else {
|
||||
builder.issueDate(spec.issueDateInMillis);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static void assertLicenseSpec(LicenseSpec spec, License license) {
|
||||
MatcherAssert.assertThat(license.uid(), equalTo(spec.uid));
|
||||
MatcherAssert.assertThat(license.issuedTo(), equalTo(spec.issuedTo));
|
||||
MatcherAssert.assertThat(license.issuer(), equalTo(spec.issuer));
|
||||
MatcherAssert.assertThat(license.type(), equalTo(spec.type));
|
||||
MatcherAssert.assertThat(license.maxNodes(), equalTo(spec.maxNodes));
|
||||
if (spec.issueDate != null) {
|
||||
MatcherAssert.assertThat(license.issueDate(), equalTo(DateUtils.beginningOfTheDay(spec.issueDate)));
|
||||
} else {
|
||||
MatcherAssert.assertThat(license.issueDate(), equalTo(spec.issueDateInMillis));
|
||||
}
|
||||
if (spec.expiryDate != null) {
|
||||
MatcherAssert.assertThat(license.expiryDate(), equalTo(DateUtils.endOfTheDay(spec.expiryDate)));
|
||||
} else {
|
||||
MatcherAssert.assertThat(license.expiryDate(), equalTo(spec.expiryDateInMillis));
|
||||
}
|
||||
}
|
||||
|
||||
public static class LicenseSpec {
|
||||
public final int version;
|
||||
public final String feature;
|
||||
public final String issueDate;
|
||||
public final long issueDateInMillis;
|
||||
public final String expiryDate;
|
||||
public final long expiryDateInMillis;
|
||||
public final String uid;
|
||||
public final String type;
|
||||
public final String subscriptionType;
|
||||
public final String issuedTo;
|
||||
public final String issuer;
|
||||
public final int maxNodes;
|
||||
|
||||
public LicenseSpec(String issueDate, String expiryDate) {
|
||||
this(License.VERSION_CURRENT, UUID.randomUUID().toString(), "feature", issueDate, expiryDate, "trial", "none", "customer",
|
||||
"elasticsearch", 5);
|
||||
}
|
||||
|
||||
public LicenseSpec(int version, String uid, String feature, long issueDateInMillis, long expiryDateInMillis, String type,
|
||||
String subscriptionType, String issuedTo, String issuer, int maxNodes) {
|
||||
this.version = version;
|
||||
this.feature = feature;
|
||||
this.issueDateInMillis = issueDateInMillis;
|
||||
this.issueDate = null;
|
||||
this.expiryDateInMillis = expiryDateInMillis;
|
||||
this.expiryDate = null;
|
||||
this.uid = uid;
|
||||
this.type = type;
|
||||
this.subscriptionType = subscriptionType;
|
||||
this.issuedTo = issuedTo;
|
||||
this.issuer = issuer;
|
||||
this.maxNodes = maxNodes;
|
||||
}
|
||||
|
||||
public LicenseSpec(int version, String uid, String feature, String issueDate, String expiryDate, String type,
|
||||
String subscriptionType, String issuedTo, String issuer, int maxNodes) {
|
||||
this.version = version;
|
||||
this.feature = feature;
|
||||
this.issueDate = issueDate;
|
||||
this.issueDateInMillis = -1;
|
||||
this.expiryDate = expiryDate;
|
||||
this.expiryDateInMillis = -1;
|
||||
this.uid = uid;
|
||||
this.type = type;
|
||||
this.subscriptionType = subscriptionType;
|
||||
this.issuedTo = issuedTo;
|
||||
this.issuer = issuer;
|
||||
this.maxNodes = maxNodes;
|
||||
}
|
||||
}
|
||||
|
||||
public static Path getTestPriKeyPath() throws Exception {
|
||||
return getResourcePath("/private.key");
|
||||
}
|
||||
|
|
|
@ -11,8 +11,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.TrialLicense;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -21,7 +19,7 @@ import java.util.Base64;
|
|||
import java.util.Collections;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.elasticsearch.license.core.CryptUtils.encrypt;
|
||||
import static org.elasticsearch.license.CryptUtils.encrypt;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.monitoring.agent.collector.cluster;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
||||
|
||||
public class ClusterInfoMonitoringDoc extends MonitoringDoc {
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.elasticsearch.common.collect.MapBuilder;
|
|||
import org.elasticsearch.common.hash.MessageDigests;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterInfoMonitoringDoc;
|
||||
import org.elasticsearch.xpack.monitoring.agent.resolver.MonitoringIndexNameResolver;
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterInfoMonitoringDoc;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
|
||||
import org.elasticsearch.xpack.monitoring.agent.resolver.MonitoringIndexNameResolverTestCase;
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.elasticsearch.action.get.GetResponse;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoringSettings;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterStatsCollector;
|
||||
|
|
|
@ -111,6 +111,7 @@ import org.elasticsearch.xpack.security.rest.action.user.RestGetUsersAction;
|
|||
import org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction;
|
||||
import org.elasticsearch.xpack.security.ssl.ClientSSLService;
|
||||
import org.elasticsearch.xpack.security.ssl.SSLConfiguration;
|
||||
import org.elasticsearch.xpack.security.ssl.SSLConfigurationReloader;
|
||||
import org.elasticsearch.xpack.security.ssl.ServerSSLService;
|
||||
import org.elasticsearch.xpack.security.support.OptionalSettings;
|
||||
import org.elasticsearch.xpack.security.transport.SecurityClientTransportService;
|
||||
|
@ -185,8 +186,7 @@ public class Security implements ActionPlugin, IngestPlugin {
|
|||
modules.add(b -> {
|
||||
// for transport client we still must inject these ssl classes with guice
|
||||
b.bind(ServerSSLService.class).toProvider(Providers.<ServerSSLService>of(null));
|
||||
b.bind(ClientSSLService.class).toInstance(
|
||||
new ClientSSLService(settings, null, new SSLConfiguration.Global(settings), null));
|
||||
b.bind(ClientSSLService.class).toInstance(new ClientSSLService(settings, null, new SSLConfiguration.Global(settings)));
|
||||
});
|
||||
|
||||
return modules;
|
||||
|
@ -232,8 +232,12 @@ public class Security implements ActionPlugin, IngestPlugin {
|
|||
components.add(securityContext);
|
||||
|
||||
final SSLConfiguration.Global globalSslConfig = new SSLConfiguration.Global(settings);
|
||||
final ClientSSLService clientSSLService = new ClientSSLService(settings, env, globalSslConfig, resourceWatcherService);
|
||||
final ServerSSLService serverSSLService = new ServerSSLService(settings, env, globalSslConfig, resourceWatcherService);
|
||||
final ClientSSLService clientSSLService = new ClientSSLService(settings, env, globalSslConfig);
|
||||
final ServerSSLService serverSSLService = new ServerSSLService(settings, env, globalSslConfig);
|
||||
// just create the reloader as it will register itself as a listener to the ssl service and nothing else depends on it
|
||||
// IMPORTANT: if the reloader construction is moved to later, then it needs to be updated to ensure any SSLContexts that have been
|
||||
// loaded by the services are also monitored by the reloader!
|
||||
new SSLConfigurationReloader(settings, env, serverSSLService, clientSSLService, resourceWatcherService);
|
||||
components.add(clientSSLService);
|
||||
components.add(serverSSLService);
|
||||
|
||||
|
|
|
@ -10,14 +10,11 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.xpack.security.audit.AuditTrailService;
|
||||
import org.elasticsearch.xpack.security.authc.Realm;
|
||||
import org.elasticsearch.xpack.security.authc.Realms;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
import org.elasticsearch.xpack.XPackFeatureSet;
|
||||
import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore;
|
||||
import org.elasticsearch.xpack.security.authz.store.RolesStore;
|
||||
|
@ -25,20 +22,20 @@ import org.elasticsearch.xpack.security.crypto.CryptoService;
|
|||
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
|
||||
import org.elasticsearch.xpack.security.transport.netty3.SecurityNetty3HttpServerTransport;
|
||||
import org.elasticsearch.xpack.security.transport.netty3.SecurityNetty3Transport;
|
||||
import org.elasticsearch.xpack.security.user.AnonymousUser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SecurityFeatureSet implements XPackFeatureSet {
|
||||
|
||||
private static final Map<String, Object> DISABLED_FEATURE_MAP = Collections.singletonMap("enabled", false);
|
||||
|
||||
private final Settings settings;
|
||||
private final boolean enabled;
|
||||
private final XPackLicenseState licenseState;
|
||||
|
@ -91,28 +88,22 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
|
||||
@Override
|
||||
public XPackFeatureSet.Usage usage() {
|
||||
List<Map<String, Object>> enabledRealms = buildEnabledRealms(realms);
|
||||
Map<String, Object> realmsUsage = buildRealmsUsage(realms);
|
||||
Map<String, Object> rolesStoreUsage = rolesStoreUsage(rolesStore);
|
||||
Map<String, Object> sslUsage = sslUsage(settings);
|
||||
Map<String, Object> auditUsage = auditUsage(auditTrailService);
|
||||
Map<String, Object> ipFilterUsage = ipFilterUsage(ipFilter);
|
||||
boolean hasSystemKey = systemKeyUsage(cryptoService);
|
||||
return new Usage(available(), enabled(), enabledRealms, rolesStoreUsage, sslUsage, auditUsage, ipFilterUsage, hasSystemKey);
|
||||
Map<String, Object> systemKeyUsage = systemKeyUsage(cryptoService);
|
||||
Map<String, Object> anonymousUsage = Collections.singletonMap("enabled", AnonymousUser.enabled());
|
||||
return new Usage(available(), enabled(), realmsUsage, rolesStoreUsage, sslUsage, auditUsage, ipFilterUsage, systemKeyUsage,
|
||||
anonymousUsage);
|
||||
}
|
||||
|
||||
static List<Map<String, Object>> buildEnabledRealms(Realms realms) {
|
||||
static Map<String, Object> buildRealmsUsage(Realms realms) {
|
||||
if (realms == null) {
|
||||
return Collections.emptyList();
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
List<Map<String, Object>> enabledRealms = new ArrayList<>();
|
||||
for (Realm realm : realms) {
|
||||
if (realm instanceof ReservedRealm) {
|
||||
continue; // we don't need usage of this one
|
||||
}
|
||||
Map<String, Object> stats = realm.usageStats();
|
||||
enabledRealms.add(stats);
|
||||
}
|
||||
return enabledRealms;
|
||||
return realms.usageStats();
|
||||
}
|
||||
|
||||
static Map<String, Object> rolesStoreUsage(@Nullable RolesStore rolesStore) {
|
||||
|
@ -131,82 +122,88 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
|
||||
static Map<String, Object> auditUsage(@Nullable AuditTrailService auditTrailService) {
|
||||
if (auditTrailService == null) {
|
||||
return Collections.emptyMap();
|
||||
return DISABLED_FEATURE_MAP;
|
||||
}
|
||||
return auditTrailService.usageStats();
|
||||
}
|
||||
|
||||
static Map<String, Object> ipFilterUsage(@Nullable IPFilter ipFilter) {
|
||||
if (ipFilter == null) {
|
||||
return Collections.emptyMap();
|
||||
return IPFilter.DISABLED_USAGE_STATS;
|
||||
}
|
||||
return ipFilter.usageStats();
|
||||
}
|
||||
|
||||
static boolean systemKeyUsage(CryptoService cryptoService) {
|
||||
static Map<String, Object> systemKeyUsage(CryptoService cryptoService) {
|
||||
// we can piggy back on the encryption enabled method as it is only enabled if there is a system key
|
||||
return cryptoService != null && cryptoService.isEncryptionEnabled();
|
||||
return Collections.singletonMap("enabled", cryptoService != null && cryptoService.isEncryptionEnabled());
|
||||
}
|
||||
|
||||
static class Usage extends XPackFeatureSet.Usage {
|
||||
|
||||
private static final String ENABLED_REALMS_XFIELD = "enabled_realms";
|
||||
private static final String REALMS_XFIELD = "realms";
|
||||
private static final String ROLES_XFIELD = "roles";
|
||||
private static final String SSL_XFIELD = "ssl";
|
||||
private static final String AUDIT_XFIELD = "audit";
|
||||
private static final String IP_FILTER_XFIELD = "ipfilter";
|
||||
private static final String SYSTEM_KEY_XFIELD = "system_key";
|
||||
private static final String ANONYMOUS_XFIELD = "anonymous";
|
||||
|
||||
private List<Map<String, Object>> enabledRealms;
|
||||
private Map<String, Object> realmsUsage;
|
||||
private Map<String, Object> rolesStoreUsage;
|
||||
private Map<String, Object> sslUsage;
|
||||
private Map<String, Object> auditUsage;
|
||||
private Map<String, Object> ipFilterUsage;
|
||||
private boolean hasSystemKey;
|
||||
private Map<String, Object> systemKeyUsage;
|
||||
private Map<String, Object> anonymousUsage;
|
||||
|
||||
public Usage(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
enabledRealms = in.readList(StreamInput::readMap);
|
||||
realmsUsage = in.readMap();
|
||||
rolesStoreUsage = in.readMap();
|
||||
sslUsage = in.readMap();
|
||||
auditUsage = in.readMap();
|
||||
ipFilterUsage = in.readMap();
|
||||
hasSystemKey = in.readBoolean();
|
||||
systemKeyUsage = in.readMap();
|
||||
anonymousUsage = in.readMap();
|
||||
}
|
||||
|
||||
public Usage(boolean available, boolean enabled, List<Map<String, Object>> enabledRealms, Map<String, Object> rolesStoreUsage,
|
||||
public Usage(boolean available, boolean enabled, Map<String, Object> realmsUsage, Map<String, Object> rolesStoreUsage,
|
||||
Map<String, Object> sslUsage, Map<String, Object> auditUsage, Map<String, Object> ipFilterUsage,
|
||||
boolean hasSystemKey) {
|
||||
Map<String, Object> systemKeyUsage, Map<String, Object> anonymousUsage) {
|
||||
super(Security.NAME, available, enabled);
|
||||
this.enabledRealms = enabledRealms;
|
||||
this.realmsUsage = realmsUsage;
|
||||
this.rolesStoreUsage = rolesStoreUsage;
|
||||
this.sslUsage = sslUsage;
|
||||
this.auditUsage = auditUsage;
|
||||
this.ipFilterUsage = ipFilterUsage;
|
||||
this.hasSystemKey = hasSystemKey;
|
||||
this.systemKeyUsage = systemKeyUsage;
|
||||
this.anonymousUsage = anonymousUsage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeList(enabledRealms.stream().map((m) -> (Writeable) o -> o.writeMap(m)).collect(Collectors.toList()));
|
||||
out.writeMap(realmsUsage);
|
||||
out.writeMap(rolesStoreUsage);
|
||||
out.writeMap(sslUsage);
|
||||
out.writeMap(auditUsage);
|
||||
out.writeMap(ipFilterUsage);
|
||||
out.writeBoolean(hasSystemKey);
|
||||
out.writeMap(systemKeyUsage);
|
||||
out.writeMap(anonymousUsage);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
super.innerXContent(builder, params);
|
||||
if (enabled) {
|
||||
builder.field(ENABLED_REALMS_XFIELD, enabledRealms);
|
||||
builder.field(REALMS_XFIELD, realmsUsage);
|
||||
builder.field(ROLES_XFIELD, rolesStoreUsage);
|
||||
builder.field(SSL_XFIELD, sslUsage);
|
||||
builder.field(AUDIT_XFIELD, auditUsage);
|
||||
builder.field(IP_FILTER_XFIELD, ipFilterUsage);
|
||||
builder.field(SYSTEM_KEY_XFIELD, hasSystemKey);
|
||||
builder.field(SYSTEM_KEY_XFIELD, systemKeyUsage);
|
||||
builder.field(ANONYMOUS_XFIELD, anonymousUsage);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,8 @@ import org.elasticsearch.xpack.security.user.User;
|
|||
*/
|
||||
public class AuditTrailService extends AbstractComponent implements AuditTrail {
|
||||
|
||||
public static final Map<String, Object> DISABLED_USAGE_STATS = Collections.singletonMap("enabled", false);
|
||||
|
||||
private final XPackLicenseState licenseState;
|
||||
final List<AuditTrail> auditTrails;
|
||||
|
||||
|
|
|
@ -89,7 +89,6 @@ public abstract class Realm implements Comparable<Realm> {
|
|||
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> stats = new HashMap<>();
|
||||
stats.put("type", type);
|
||||
stats.put("name", name());
|
||||
stats.put("order", order());
|
||||
return stats;
|
||||
|
|
|
@ -8,13 +8,16 @@ package org.elasticsearch.xpack.security.authc;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
|
@ -178,6 +181,48 @@ public class Realms extends AbstractLifecycleComponent implements Iterable<Realm
|
|||
return realms;
|
||||
}
|
||||
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> realmMap = new HashMap<>();
|
||||
for (Realm realm : this) {
|
||||
if (ReservedRealm.TYPE.equals(realm.type())) {
|
||||
continue;
|
||||
}
|
||||
realmMap.compute(realm.type(), (key, value) -> {
|
||||
if (value == null) {
|
||||
Object realmTypeUsage = convertToMapOfLists(realm.usageStats());
|
||||
return realmTypeUsage;
|
||||
}
|
||||
assert value instanceof Map;
|
||||
combineMaps((Map<String, Object>) value, realm.usageStats());
|
||||
return value;
|
||||
});
|
||||
}
|
||||
|
||||
final AllowedRealmType allowedRealmType = licenseState.allowedRealmType();
|
||||
// iterate over the factories so we can add enabled & available info
|
||||
for (String type : factories.keySet()) {
|
||||
assert ReservedRealm.TYPE.equals(type) == false;
|
||||
realmMap.compute(type, (key, value) -> {
|
||||
if (value == null) {
|
||||
return MapBuilder.<String, Object>newMapBuilder()
|
||||
.put("enabled", false)
|
||||
.put("available", isRealmTypeAvailable(allowedRealmType, type))
|
||||
.map();
|
||||
}
|
||||
|
||||
assert value instanceof Map;
|
||||
Map<String, Object> realmTypeUsage = (Map<String, Object>) value;
|
||||
realmTypeUsage.put("enabled", true);
|
||||
// the realms iterator returned this type so it must be enabled
|
||||
assert isRealmTypeAvailable(allowedRealmType, type);
|
||||
realmTypeUsage.put("available", true);
|
||||
return value;
|
||||
});
|
||||
}
|
||||
|
||||
return realmMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the settings for the {@link FileRealm}. Typically, this realms may or may
|
||||
* not be configured. If it is not configured, it will work OOTB using default settings. If it is
|
||||
|
@ -218,4 +263,41 @@ public class Realms extends AbstractLifecycleComponent implements Iterable<Realm
|
|||
public static void addSettings(List<Setting<?>> settingsModule) {
|
||||
settingsModule.add(REALMS_GROUPS_SETTINGS);
|
||||
}
|
||||
|
||||
private static void combineMaps(Map<String, Object> mapA, Map<String, Object> mapB) {
|
||||
for (Entry<String, Object> entry : mapB.entrySet()) {
|
||||
mapA.compute(entry.getKey(), (key, value) -> {
|
||||
if (value == null) {
|
||||
return new ArrayList<>(Collections.singletonList(entry.getValue()));
|
||||
}
|
||||
|
||||
assert value instanceof List;
|
||||
((List) value).add(entry.getValue());
|
||||
return value;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static Map<String, Object> convertToMapOfLists(Map<String, Object> map) {
|
||||
Map<String, Object> converted = new HashMap<>(map.size());
|
||||
for (Entry<String, Object> entry : map.entrySet()) {
|
||||
converted.put(entry.getKey(), new ArrayList<>(Collections.singletonList(entry.getValue())));
|
||||
}
|
||||
return converted;
|
||||
}
|
||||
|
||||
private static boolean isRealmTypeAvailable(AllowedRealmType enabledRealmType, String type) {
|
||||
switch (enabledRealmType) {
|
||||
case ALL:
|
||||
return true;
|
||||
case NONE:
|
||||
return false;
|
||||
case NATIVE:
|
||||
return FileRealm.TYPE.equals(type) || NativeRealm.TYPE.equals(type);
|
||||
case DEFAULT:
|
||||
return INTERNAL_REALM_TYPES.contains(type);
|
||||
default:
|
||||
throw new IllegalStateException("unknown enabled realm type [" + enabledRealmType + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,44 +6,31 @@
|
|||
package org.elasticsearch.xpack.security.authc.esnative;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.base.Joiner;
|
||||
import javax.net.ssl.HttpsURLConnection;
|
||||
import joptsimple.OptionParser;
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cli.MultiCommand;
|
||||
import org.elasticsearch.cli.SettingCommand;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.xpack.security.action.role.PutRoleRequest;
|
||||
import org.elasticsearch.xpack.security.action.user.PutUserRequest;
|
||||
import org.elasticsearch.xpack.security.authc.Realms;
|
||||
import org.elasticsearch.xpack.security.authc.file.FileUserPasswdStore;
|
||||
import org.elasticsearch.xpack.security.authc.file.FileUserRolesStore;
|
||||
import org.elasticsearch.xpack.security.authc.support.Hasher;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
|
||||
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
|
||||
import org.elasticsearch.xpack.security.authz.permission.Permission;
|
||||
import org.elasticsearch.xpack.security.authz.store.FileRolesStore;
|
||||
import org.elasticsearch.xpack.security.ssl.ClientSSLService;
|
||||
import org.elasticsearch.xpack.security.ssl.SSLConfiguration;
|
||||
import org.elasticsearch.xpack.security.support.NoOpLogger;
|
||||
import org.elasticsearch.xpack.security.support.Validation;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
|
@ -52,16 +39,13 @@ import java.io.OutputStream;
|
|||
import java.net.HttpURLConnection;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
|
@ -151,7 +135,7 @@ public class ESNativeRealmMigrateTool extends MultiCommand {
|
|||
if ("https".equalsIgnoreCase(uri.getScheme())) {
|
||||
Settings sslSettings = settings.getByPrefix(setting("http.ssl."));
|
||||
SSLConfiguration.Global globalConfig = new SSLConfiguration.Global(settings);
|
||||
final ClientSSLService sslService = new ClientSSLService(sslSettings, env, globalConfig, null);
|
||||
final ClientSSLService sslService = new ClientSSLService(sslSettings, env, globalConfig);
|
||||
final HttpsURLConnection httpsConn = (HttpsURLConnection) url.openConnection();
|
||||
AccessController.doPrivileged(new PrivilegedAction<Void>() {
|
||||
@Override
|
||||
|
|
|
@ -13,6 +13,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
|
@ -325,7 +326,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
.execute(new ActionListener<UpdateResponse>() {
|
||||
@Override
|
||||
public void onResponse(UpdateResponse updateResponse) {
|
||||
assert updateResponse.isCreated() == false;
|
||||
assert updateResponse.getResult() == DocWriteResponse.Result.UPDATED;
|
||||
clearRealmCache(request.username(), listener, null);
|
||||
}
|
||||
|
||||
|
@ -401,7 +402,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
.execute(new ActionListener<UpdateResponse>() {
|
||||
@Override
|
||||
public void onResponse(UpdateResponse updateResponse) {
|
||||
assert updateResponse.isCreated() == false;
|
||||
assert updateResponse.getResult() == DocWriteResponse.Result.UPDATED;
|
||||
clearRealmCache(putUserRequest.username(), listener, false);
|
||||
}
|
||||
|
||||
|
@ -442,12 +443,13 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
@Override
|
||||
public void onResponse(IndexResponse indexResponse) {
|
||||
// if the document was just created, then we don't need to clear cache
|
||||
if (indexResponse.isCreated()) {
|
||||
listener.onResponse(indexResponse.isCreated());
|
||||
boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED;
|
||||
if (created) {
|
||||
listener.onResponse(true);
|
||||
return;
|
||||
}
|
||||
|
||||
clearRealmCache(putUserRequest.username(), listener, indexResponse.isCreated());
|
||||
clearRealmCache(putUserRequest.username(), listener, created);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -471,7 +473,8 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
client.delete(request, new ActionListener<DeleteResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteResponse deleteResponse) {
|
||||
clearRealmCache(deleteUserRequest.username(), listener, deleteResponse.isFound());
|
||||
clearRealmCache(deleteUserRequest.username(), listener,
|
||||
deleteResponse.getResult() == DocWriteResponse.Result.DELETED);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -61,7 +61,7 @@ public class FileRealm extends CachingUsernamePasswordRealm {
|
|||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> stats = super.usageStats();
|
||||
// here we can determine the size based on the in mem user store
|
||||
stats.put("size", UserbaseSize.resolve(userPasswdStore.usersCount()));
|
||||
stats.put("size", userPasswdStore.usersCount());
|
||||
return stats;
|
||||
}
|
||||
|
||||
|
|
|
@ -171,7 +171,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
|||
@Override
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> stats = super.usageStats();
|
||||
stats.put("size", UserbaseSize.resolve(cache.count()).toString());
|
||||
stats.put("size", cache.count());
|
||||
return stats;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,13 +6,10 @@
|
|||
package org.elasticsearch.xpack.security.authc.support;
|
||||
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
|
||||
import org.elasticsearch.xpack.security.authc.Realm;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -31,33 +28,4 @@ public abstract class UsernamePasswordRealm extends Realm {
|
|||
return token instanceof UsernamePasswordToken;
|
||||
}
|
||||
|
||||
public enum UserbaseSize {
|
||||
|
||||
TINY,
|
||||
SMALL,
|
||||
MEDIUM,
|
||||
LARGE,
|
||||
XLARGE;
|
||||
|
||||
public static UserbaseSize resolve(int count) {
|
||||
if (count < 10) {
|
||||
return TINY;
|
||||
}
|
||||
if (count < 100) {
|
||||
return SMALL;
|
||||
}
|
||||
if (count < 500) {
|
||||
return MEDIUM;
|
||||
}
|
||||
if (count < 1000) {
|
||||
return LARGE;
|
||||
}
|
||||
return XLARGE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this == XLARGE ? "x-large" : name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.util.function.Function;
|
|||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
|
@ -273,7 +274,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
client.delete(request, new ActionListener<DeleteResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteResponse deleteResponse) {
|
||||
clearRoleCache(deleteRoleRequest.name(), listener, deleteResponse.isFound());
|
||||
clearRoleCache(deleteRoleRequest.name(), listener, deleteResponse.getResult() == DocWriteResponse.Result.DELETED);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -303,11 +304,12 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
.execute(new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse indexResponse) {
|
||||
if (indexResponse.isCreated()) {
|
||||
listener.onResponse(indexResponse.isCreated());
|
||||
boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED;
|
||||
if (created) {
|
||||
listener.onResponse(true);
|
||||
return;
|
||||
}
|
||||
clearRoleCache(role.getName(), listener, indexResponse.isCreated());
|
||||
clearRoleCache(role.getName(), listener, created);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -13,13 +13,10 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.security.ssl.SSLConfiguration.Custom;
|
||||
import org.elasticsearch.xpack.security.ssl.SSLConfiguration.Global;
|
||||
import org.elasticsearch.xpack.security.ssl.TrustConfig.Reloadable.Listener;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
||||
import javax.net.ssl.KeyManager;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import javax.net.ssl.SSLEngine;
|
||||
import javax.net.ssl.SSLSessionContext;
|
||||
import javax.net.ssl.SSLSocket;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
import javax.net.ssl.TrustManager;
|
||||
|
@ -28,9 +25,12 @@ import java.net.InetAddress;
|
|||
import java.net.Socket;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
/**
|
||||
|
@ -39,19 +39,18 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||
*/
|
||||
public abstract class AbstractSSLService extends AbstractComponent {
|
||||
|
||||
private final ConcurrentHashMap<SSLConfiguration, SSLContext> sslContexts = new ConcurrentHashMap<>();
|
||||
private final SSLContextCacheLoader cacheLoader = new SSLContextCacheLoader();
|
||||
private final ConcurrentHashMap<SSLConfiguration, SSLContext> sslContexts = new ConcurrentHashMap<>();
|
||||
|
||||
protected final SSLConfiguration globalSSLConfiguration;
|
||||
protected final Environment env;
|
||||
protected final ResourceWatcherService resourceWatcherService;
|
||||
|
||||
public AbstractSSLService(Settings settings, Environment environment, Global globalSSLConfiguration,
|
||||
ResourceWatcherService resourceWatcherService) {
|
||||
private Listener listener = Listener.NOOP;
|
||||
|
||||
AbstractSSLService(Settings settings, Environment environment, Global globalSSLConfiguration) {
|
||||
super(settings);
|
||||
this.env = environment;
|
||||
this.globalSSLConfiguration = globalSSLConfiguration;
|
||||
this.resourceWatcherService = resourceWatcherService;
|
||||
}
|
||||
|
||||
public String[] supportedProtocols() {
|
||||
|
@ -167,6 +166,27 @@ public abstract class AbstractSSLService extends AbstractComponent {
|
|||
return requestedCiphersList.toArray(new String[requestedCiphersList.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the listener to the value provided. Must not be {@code null}
|
||||
*/
|
||||
void setListener(Listener listener) {
|
||||
this.listener = Objects.requireNonNull(listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the existing {@link SSLContext} for the configuration or {@code null}
|
||||
*/
|
||||
SSLContext getSSLContext(SSLConfiguration sslConfiguration) {
|
||||
return sslContexts.get(sslConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Accessor to the loaded ssl configuration objects at the current point in time. This is useful for testing
|
||||
*/
|
||||
Collection<SSLConfiguration> getLoadedSSLConfigurations() {
|
||||
return Collections.unmodifiableSet(new HashSet<>(sslContexts.keySet()));
|
||||
}
|
||||
|
||||
private class SSLContextCacheLoader {
|
||||
|
||||
public SSLContext load(SSLConfiguration sslConfiguration) {
|
||||
|
@ -175,15 +195,15 @@ public abstract class AbstractSSLService extends AbstractComponent {
|
|||
logger.debug("using ssl settings [{}]", sslConfiguration);
|
||||
}
|
||||
|
||||
ConfigRefreshListener configRefreshListener = new ConfigRefreshListener(sslConfiguration);
|
||||
TrustManager[] trustManagers = sslConfiguration.trustConfig().trustManagers(env, resourceWatcherService, configRefreshListener);
|
||||
KeyManager[] keyManagers = sslConfiguration.keyConfig().keyManagers(env, resourceWatcherService, configRefreshListener);
|
||||
TrustManager[] trustManagers = sslConfiguration.trustConfig().trustManagers(env);
|
||||
KeyManager[] keyManagers = sslConfiguration.keyConfig().keyManagers(env);
|
||||
SSLContext sslContext = createSslContext(keyManagers, trustManagers, sslConfiguration.protocol(),
|
||||
sslConfiguration.sessionCacheSize(), sslConfiguration.sessionCacheTimeout());
|
||||
|
||||
// check the supported ciphers and log them here
|
||||
supportedCiphers(sslContext.getSupportedSSLParameters().getCipherSuites(),
|
||||
sslConfiguration.ciphers().toArray(Strings.EMPTY_ARRAY), true);
|
||||
listener.onSSLContextLoaded(sslConfiguration);
|
||||
return sslContext;
|
||||
}
|
||||
|
||||
|
@ -202,37 +222,6 @@ public abstract class AbstractSSLService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
class ConfigRefreshListener implements Listener {
|
||||
|
||||
private final SSLConfiguration sslConfiguration;
|
||||
|
||||
ConfigRefreshListener(SSLConfiguration sslConfiguration) {
|
||||
this.sslConfiguration = sslConfiguration;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onReload() {
|
||||
SSLContext context = sslContexts.get(sslConfiguration);
|
||||
if (context != null) {
|
||||
invalidateSessions(context.getClientSessionContext());
|
||||
invalidateSessions(context.getServerSessionContext());
|
||||
}
|
||||
}
|
||||
|
||||
void invalidateSessions(SSLSessionContext sslSessionContext) {
|
||||
Enumeration<byte[]> sessionIds = sslSessionContext.getIds();
|
||||
while (sessionIds.hasMoreElements()) {
|
||||
byte[] sessionId = sessionIds.nextElement();
|
||||
sslSessionContext.getSession(sessionId).invalidate();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to load updated ssl context for [{}]", e, sslConfiguration);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This socket factory set the protocols and ciphers on each SSLSocket after it is created
|
||||
*/
|
||||
|
@ -305,4 +294,14 @@ public abstract class AbstractSSLService extends AbstractComponent {
|
|||
socket.setEnabledCipherSuites(ciphers);
|
||||
}
|
||||
}
|
||||
|
||||
interface Listener {
|
||||
/**
|
||||
* Called after a new SSLContext has been created
|
||||
* @param sslConfiguration the configuration used to create the SSLContext
|
||||
*/
|
||||
void onSSLContextLoaded(SSLConfiguration sslConfiguration);
|
||||
|
||||
Listener NOOP = (s) -> {};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.bouncycastle.operator.ContentSigner;
|
|||
import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
|
||||
import org.bouncycastle.pkcs.PKCS10CertificationRequest;
|
||||
import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
|
@ -48,6 +49,7 @@ import javax.net.ssl.X509ExtendedKeyManager;
|
|||
import javax.net.ssl.X509ExtendedTrustManager;
|
||||
import javax.security.auth.x500.X500Principal;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.io.Reader;
|
||||
import java.math.BigInteger;
|
||||
import java.net.InetAddress;
|
||||
|
@ -84,7 +86,7 @@ class CertUtils {
|
|||
return PathUtils.get(Strings.cleanPath(path));
|
||||
}
|
||||
|
||||
static X509ExtendedKeyManager[] keyManagers(Certificate[] certificateChain, PrivateKey privateKey, char[] password) throws Exception {
|
||||
static X509ExtendedKeyManager keyManagers(Certificate[] certificateChain, PrivateKey privateKey, char[] password) throws Exception {
|
||||
KeyStore keyStore = KeyStore.getInstance("jks");
|
||||
keyStore.load(null, null);
|
||||
// password must be non-null for keystore...
|
||||
|
@ -92,19 +94,19 @@ class CertUtils {
|
|||
return keyManagers(keyStore, password, KeyManagerFactory.getDefaultAlgorithm());
|
||||
}
|
||||
|
||||
static X509ExtendedKeyManager[] keyManagers(KeyStore keyStore, char[] password, String algorithm) throws Exception {
|
||||
static X509ExtendedKeyManager keyManagers(KeyStore keyStore, char[] password, String algorithm) throws Exception {
|
||||
KeyManagerFactory kmf = KeyManagerFactory.getInstance(algorithm);
|
||||
kmf.init(keyStore, password);
|
||||
KeyManager[] keyManagers = kmf.getKeyManagers();
|
||||
for (KeyManager keyManager : keyManagers) {
|
||||
if (keyManager instanceof X509ExtendedKeyManager) {
|
||||
return new X509ExtendedKeyManager[] { (X509ExtendedKeyManager) keyManager };
|
||||
return (X509ExtendedKeyManager) keyManager;
|
||||
}
|
||||
}
|
||||
throw new IllegalStateException("failed to find a X509ExtendedKeyManager");
|
||||
}
|
||||
|
||||
static X509ExtendedTrustManager[] trustManagers(Certificate[] certificates) throws Exception {
|
||||
static X509ExtendedTrustManager trustManagers(Certificate[] certificates) throws Exception {
|
||||
KeyStore store = KeyStore.getInstance("jks");
|
||||
store.load(null, null);
|
||||
int counter = 0;
|
||||
|
@ -115,13 +117,26 @@ class CertUtils {
|
|||
return trustManagers(store, TrustManagerFactory.getDefaultAlgorithm());
|
||||
}
|
||||
|
||||
static X509ExtendedTrustManager[] trustManagers(KeyStore keyStore, String algorithm) throws Exception {
|
||||
static X509ExtendedTrustManager trustManagers(String trustStorePath, String trustStorePassword, String trustStoreAlgorithm,
|
||||
Environment env) throws Exception {
|
||||
try (InputStream in = Files.newInputStream(resolvePath(trustStorePath, env))) {
|
||||
// TODO remove reliance on JKS since we can PKCS12 stores...
|
||||
KeyStore trustStore = KeyStore.getInstance("jks");
|
||||
assert trustStorePassword != null;
|
||||
trustStore.load(in, trustStorePassword.toCharArray());
|
||||
return CertUtils.trustManagers(trustStore, trustStoreAlgorithm);
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchException("failed to initialize a TrustManagerFactory", e);
|
||||
}
|
||||
}
|
||||
|
||||
static X509ExtendedTrustManager trustManagers(KeyStore keyStore, String algorithm) throws Exception {
|
||||
TrustManagerFactory tmf = TrustManagerFactory.getInstance(algorithm);
|
||||
tmf.init(keyStore);
|
||||
TrustManager[] trustManagers = tmf.getTrustManagers();
|
||||
for (TrustManager trustManager : trustManagers) {
|
||||
if (trustManager instanceof X509ExtendedTrustManager) {
|
||||
return new X509ExtendedTrustManager[] { (X509ExtendedTrustManager) trustManager };
|
||||
return (X509ExtendedTrustManager) trustManager ;
|
||||
}
|
||||
}
|
||||
throw new IllegalStateException("failed to find a X509ExtendedTrustManager");
|
||||
|
|
|
@ -7,14 +7,12 @@ package org.elasticsearch.xpack.security.ssl;
|
|||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.security.ssl.SSLConfiguration.Global;
|
||||
|
||||
public class ClientSSLService extends AbstractSSLService {
|
||||
|
||||
public ClientSSLService(Settings settings, Environment env, Global globalSSLConfiguration,
|
||||
ResourceWatcherService resourceWatcherService) {
|
||||
super(settings, env, globalSSLConfiguration, resourceWatcherService);
|
||||
public ClientSSLService(Settings settings, Environment env, Global globalSSLConfiguration) {
|
||||
super(settings, env, globalSSLConfiguration);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,19 +5,12 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.security.ssl;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.security.ssl.TrustConfig.Reloadable.Listener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService.Frequency;
|
||||
|
||||
import javax.net.ssl.KeyManager;
|
||||
import javax.net.ssl.SSLEngine;
|
||||
import javax.net.ssl.X509ExtendedKeyManager;
|
||||
import javax.net.ssl.X509ExtendedTrustManager;
|
||||
import java.io.IOException;
|
||||
import java.net.Socket;
|
||||
import java.nio.file.Path;
|
||||
import java.security.Principal;
|
||||
|
@ -28,18 +21,20 @@ import java.util.List;
|
|||
|
||||
abstract class KeyConfig extends TrustConfig {
|
||||
|
||||
KeyConfig(boolean includeSystem, boolean reloadEnabled) {
|
||||
super(includeSystem, reloadEnabled);
|
||||
private X509ExtendedKeyManager[] keyManagers = null;
|
||||
|
||||
KeyConfig(boolean includeSystem) {
|
||||
super(includeSystem);
|
||||
}
|
||||
|
||||
static final KeyConfig NONE = new KeyConfig(false, false) {
|
||||
static final KeyConfig NONE = new KeyConfig(false) {
|
||||
@Override
|
||||
X509ExtendedKeyManager[] loadKeyManagers(@Nullable Environment environment) {
|
||||
X509ExtendedKeyManager loadKeyManager(@Nullable Environment environment) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
X509ExtendedTrustManager[] nonSystemTrustManagers(@Nullable Environment environment) {
|
||||
X509ExtendedTrustManager nonSystemTrustManager(@Nullable Environment environment) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -58,39 +53,48 @@ abstract class KeyConfig extends TrustConfig {
|
|||
}
|
||||
};
|
||||
|
||||
final KeyManager[] keyManagers(@Nullable Environment environment, @Nullable ResourceWatcherService resourceWatcherService,
|
||||
@Nullable Listener listener) {
|
||||
X509ExtendedKeyManager[] keyManagers = loadKeyManagers(environment);
|
||||
if (reloadEnabled && resourceWatcherService != null && listener != null) {
|
||||
ReloadableX509KeyManager reloadableX509KeyManager = new ReloadableX509KeyManager(keyManagers[0], environment);
|
||||
List<Path> filesToMonitor = filesToMonitor(environment);
|
||||
if (filesToMonitor.isEmpty() == false) {
|
||||
ChangeListener changeListener = new ChangeListener(filesToMonitor, reloadableX509KeyManager, listener);
|
||||
try {
|
||||
for (Path dir : directoriesToMonitor(filesToMonitor)) {
|
||||
FileWatcher fileWatcher = new FileWatcher(dir);
|
||||
fileWatcher.addListener(changeListener);
|
||||
resourceWatcherService.add(fileWatcher, Frequency.HIGH);
|
||||
}
|
||||
return new X509ExtendedKeyManager[]{reloadableX509KeyManager};
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("failed to add file watcher", e);
|
||||
}
|
||||
}
|
||||
final synchronized X509ExtendedKeyManager[] keyManagers(@Nullable Environment environment) {
|
||||
if (keyManagers == null) {
|
||||
X509ExtendedKeyManager keyManager = loadKeyManager(environment);
|
||||
setKeyManagers(keyManager);
|
||||
}
|
||||
return keyManagers;
|
||||
}
|
||||
|
||||
abstract X509ExtendedKeyManager[] loadKeyManagers(@Nullable Environment environment);
|
||||
@Override
|
||||
synchronized void reload(@Nullable Environment environment) {
|
||||
if (trustManagers == null) {
|
||||
// trust managers were never initialized... do it lazily!
|
||||
X509ExtendedKeyManager loadedKeyManager = loadKeyManager(environment);
|
||||
setKeyManagers(loadedKeyManager);
|
||||
return;
|
||||
}
|
||||
|
||||
final class ReloadableX509KeyManager extends X509ExtendedKeyManager implements Reloadable {
|
||||
X509ExtendedTrustManager loadedTrustManager = loadAndMergeIfNecessary(environment);
|
||||
X509ExtendedKeyManager loadedKeyManager = loadKeyManager(environment);
|
||||
setTrustManagers(loadedTrustManager);
|
||||
setKeyManagers(loadedKeyManager);
|
||||
}
|
||||
|
||||
final synchronized void setKeyManagers(X509ExtendedKeyManager loadedKeyManager) {
|
||||
if (loadedKeyManager == null) {
|
||||
this.keyManagers = new X509ExtendedKeyManager[0];
|
||||
} else if (this.keyManagers == null || this.keyManagers.length == 0) {
|
||||
this.keyManagers = new X509ExtendedKeyManager[] { new ReloadableX509KeyManager(loadedKeyManager) };
|
||||
} else {
|
||||
assert this.keyManagers[0] instanceof ReloadableX509KeyManager;
|
||||
((ReloadableX509KeyManager)this.keyManagers[0]).setKeyManager(loadedKeyManager);
|
||||
}
|
||||
}
|
||||
|
||||
abstract X509ExtendedKeyManager loadKeyManager(@Nullable Environment environment);
|
||||
|
||||
final class ReloadableX509KeyManager extends X509ExtendedKeyManager {
|
||||
|
||||
private final Environment environment;
|
||||
private volatile X509ExtendedKeyManager keyManager;
|
||||
|
||||
ReloadableX509KeyManager(X509ExtendedKeyManager keyManager, @Nullable Environment environment) {
|
||||
ReloadableX509KeyManager(X509ExtendedKeyManager keyManager) {
|
||||
this.keyManager = keyManager;
|
||||
this.environment = environment;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -133,13 +137,13 @@ abstract class KeyConfig extends TrustConfig {
|
|||
return keyManager.chooseEngineServerAlias(s, principals, engine);
|
||||
}
|
||||
|
||||
public synchronized void reload() {
|
||||
X509ExtendedKeyManager[] keyManagers = loadKeyManagers(environment);
|
||||
this.keyManager = keyManagers[0];
|
||||
}
|
||||
|
||||
synchronized void setKeyManager(X509ExtendedKeyManager x509ExtendedKeyManager) {
|
||||
this.keyManager = x509ExtendedKeyManager;
|
||||
}
|
||||
|
||||
// pkg-private accessor for testing
|
||||
X509ExtendedKeyManager getKeyManager() {
|
||||
return keyManager;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,23 +28,27 @@ class PEMKeyConfig extends KeyConfig {
|
|||
final String keyPassword;
|
||||
final List<String> certPaths;
|
||||
|
||||
PEMKeyConfig(boolean includeSystem, boolean reloadEnabled, String keyPath, String keyPassword, List<String> certPaths) {
|
||||
super(includeSystem, reloadEnabled);
|
||||
PEMKeyConfig(boolean includeSystem, String keyPath, String keyPassword, List<String> certPaths) {
|
||||
super(includeSystem);
|
||||
this.keyPath = keyPath;
|
||||
this.keyPassword = keyPassword;
|
||||
this.certPaths = certPaths;
|
||||
}
|
||||
|
||||
@Override
|
||||
X509ExtendedKeyManager[] loadKeyManagers(@Nullable Environment environment) {
|
||||
X509ExtendedKeyManager loadKeyManager(@Nullable Environment environment) {
|
||||
// password must be non-null for keystore...
|
||||
char[] password = keyPassword == null ? new char[0] : keyPassword.toCharArray();
|
||||
try {
|
||||
PrivateKey privateKey = readPrivateKey(CertUtils.resolvePath(keyPath, environment));
|
||||
Certificate[] certificateChain = CertUtils.readCertificates(certPaths, environment);
|
||||
// password must be non-null for keystore...
|
||||
char[] password = keyPassword == null ? new char[0] : keyPassword.toCharArray();
|
||||
return CertUtils.keyManagers(certificateChain, privateKey, password);
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchException("failed to initialize a KeyManagerFactory", e);
|
||||
} finally {
|
||||
if (password != null) {
|
||||
Arrays.fill(password, (char) 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,7 +64,7 @@ class PEMKeyConfig extends KeyConfig {
|
|||
}
|
||||
|
||||
@Override
|
||||
X509ExtendedTrustManager[] nonSystemTrustManagers(@Nullable Environment environment) {
|
||||
X509ExtendedTrustManager nonSystemTrustManager(@Nullable Environment environment) {
|
||||
try {
|
||||
Certificate[] certificates = CertUtils.readCertificates(certPaths, environment);
|
||||
return CertUtils.trustManagers(certificates);
|
||||
|
|
|
@ -20,13 +20,13 @@ class PEMTrustConfig extends TrustConfig {
|
|||
|
||||
final List<String> caPaths;
|
||||
|
||||
PEMTrustConfig(boolean includeSystem, boolean reloadEnabled, List<String> caPaths) {
|
||||
super(includeSystem, reloadEnabled);
|
||||
PEMTrustConfig(boolean includeSystem, List<String> caPaths) {
|
||||
super(includeSystem);
|
||||
this.caPaths = caPaths;
|
||||
}
|
||||
|
||||
@Override
|
||||
X509ExtendedTrustManager[] nonSystemTrustManagers(@Nullable Environment environment) {
|
||||
X509ExtendedTrustManager nonSystemTrustManager(@Nullable Environment environment) {
|
||||
try {
|
||||
Certificate[] certificates = CertUtils.readCertificates(caPaths, environment);
|
||||
return CertUtils.trustManagers(certificates);
|
||||
|
|
|
@ -7,6 +7,8 @@ package org.elasticsearch.xpack.security.ssl;
|
|||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -14,10 +16,12 @@ import java.util.Objects;
|
|||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
import static org.elasticsearch.xpack.security.support.OptionalSettings.createInt;
|
||||
|
@ -43,6 +47,42 @@ public abstract class SSLConfiguration {
|
|||
|
||||
public abstract List<String> supportedProtocols();
|
||||
|
||||
/**
|
||||
* Provides the list of paths to files that back this configuration
|
||||
*/
|
||||
public List<Path> filesToMonitor(@Nullable Environment environment) {
|
||||
if (keyConfig() == trustConfig()) {
|
||||
return keyConfig().filesToMonitor(environment);
|
||||
}
|
||||
List<Path> paths = new ArrayList<>(keyConfig().filesToMonitor(environment));
|
||||
paths.addAll(trustConfig().filesToMonitor(environment));
|
||||
return paths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reloads the portion of this configuration that makes use of the modified file
|
||||
*/
|
||||
public void reload(Path file, @Nullable Environment environment) {
|
||||
if (keyConfig() == trustConfig()) {
|
||||
keyConfig().reload(environment);
|
||||
return;
|
||||
}
|
||||
|
||||
for (Path path : keyConfig().filesToMonitor(environment)) {
|
||||
if (file.equals(path)) {
|
||||
keyConfig().reload(environment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (Path path : trustConfig().filesToMonitor(environment)) {
|
||||
if (file.equals(path)) {
|
||||
trustConfig().reload(environment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
|
@ -258,14 +298,14 @@ public abstract class SSLConfiguration {
|
|||
if (certPaths == null) {
|
||||
throw new IllegalArgumentException("you must specify the certificates to use with the key");
|
||||
}
|
||||
return new PEMKeyConfig(includeSystem, reloadEnabled, keyPath, keyPassword, certPaths);
|
||||
return new PEMKeyConfig(includeSystem, keyPath, keyPassword, certPaths);
|
||||
} else {
|
||||
assert keyStorePath != null;
|
||||
String keyStorePassword = KEYSTORE_PASSWORD_SETTING.get(settings).orElse(null);
|
||||
String keyStoreAlgorithm = KEYSTORE_ALGORITHM_SETTING.get(settings);
|
||||
String keyStoreKeyPassword = KEYSTORE_KEY_PASSWORD_SETTING.get(settings).orElse(keyStorePassword);
|
||||
String trustStoreAlgorithm = TRUSTSTORE_ALGORITHM_SETTING.get(settings);
|
||||
return new StoreKeyConfig(includeSystem, reloadEnabled, keyStorePath, keyStorePassword, keyStoreKeyPassword,
|
||||
return new StoreKeyConfig(includeSystem, keyStorePath, keyStorePassword, keyStoreKeyPassword,
|
||||
keyStoreAlgorithm, trustStoreAlgorithm);
|
||||
}
|
||||
}
|
||||
|
@ -274,19 +314,18 @@ public abstract class SSLConfiguration {
|
|||
String trustStorePath = TRUSTSTORE_PATH_SETTING.get(settings).orElse(null);
|
||||
List<String> caPaths = getListOrNull(CA_PATHS_SETTING, settings);
|
||||
boolean includeSystem = INCLUDE_JDK_CERTS_SETTING.get(settings);
|
||||
boolean reloadEnabled = RELOAD_ENABLED_SETTING.get(settings);
|
||||
if (trustStorePath != null && caPaths != null) {
|
||||
throw new IllegalArgumentException("you cannot specify a truststore and ca files");
|
||||
} else if (caPaths != null) {
|
||||
return new PEMTrustConfig(includeSystem, reloadEnabled, caPaths);
|
||||
return new PEMTrustConfig(includeSystem, caPaths);
|
||||
} else if (trustStorePath != null) {
|
||||
String trustStorePassword = TRUSTSTORE_PASSWORD_SETTING.get(settings).orElse(null);
|
||||
String trustStoreAlgorithm = TRUSTSTORE_ALGORITHM_SETTING.get(settings);
|
||||
return new StoreTrustConfig(includeSystem, reloadEnabled, trustStorePath, trustStorePassword, trustStoreAlgorithm);
|
||||
return new StoreTrustConfig(includeSystem, trustStorePath, trustStorePassword, trustStoreAlgorithm);
|
||||
} else if (keyInfo != KeyConfig.NONE) {
|
||||
return keyInfo;
|
||||
} else {
|
||||
return new StoreTrustConfig(includeSystem, reloadEnabled, null, null, null);
|
||||
return new StoreTrustConfig(includeSystem, null, null, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -413,14 +452,14 @@ public abstract class SSLConfiguration {
|
|||
if (certPaths == null) {
|
||||
throw new IllegalArgumentException("you must specify the certificates to use with the key");
|
||||
}
|
||||
return new PEMKeyConfig(includeSystem, reloadEnabled, keyPath, keyPassword, certPaths);
|
||||
return new PEMKeyConfig(includeSystem, keyPath, keyPassword, certPaths);
|
||||
} else {
|
||||
assert keyStorePath != null;
|
||||
String keyStorePassword = KEYSTORE_PASSWORD_SETTING.get(settings).orElse(null);
|
||||
String keyStoreAlgorithm = KEYSTORE_ALGORITHM_SETTING.get(settings);
|
||||
String keyStoreKeyPassword = KEYSTORE_KEY_PASSWORD_SETTING.get(settings).orElse(keyStorePassword);
|
||||
String trustStoreAlgorithm = TRUSTSTORE_ALGORITHM_SETTING.get(settings);
|
||||
return new StoreKeyConfig(includeSystem, reloadEnabled, keyStorePath, keyStorePassword, keyStoreKeyPassword,
|
||||
return new StoreKeyConfig(includeSystem, keyStorePath, keyStorePassword, keyStoreKeyPassword,
|
||||
keyStoreAlgorithm, trustStoreAlgorithm);
|
||||
}
|
||||
}
|
||||
|
@ -431,11 +470,11 @@ public abstract class SSLConfiguration {
|
|||
if (trustStorePath != null && caPaths != null) {
|
||||
throw new IllegalArgumentException("you cannot specify a truststore and ca files");
|
||||
} else if (caPaths != null) {
|
||||
return new PEMTrustConfig(INCLUDE_JDK_CERTS_SETTING.get(settings), RELOAD_ENABLED_SETTING.get(settings), caPaths);
|
||||
return new PEMTrustConfig(INCLUDE_JDK_CERTS_SETTING.get(settings), caPaths);
|
||||
} else if (trustStorePath != null) {
|
||||
String trustStorePassword = TRUSTSTORE_PASSWORD_SETTING.get(settings).orElse(null);
|
||||
String trustStoreAlgorithm = TRUSTSTORE_ALGORITHM_SETTING.get(settings);
|
||||
return new StoreTrustConfig(INCLUDE_JDK_CERTS_SETTING.get(settings), RELOAD_ENABLED_SETTING.get(settings),
|
||||
return new StoreTrustConfig(INCLUDE_JDK_CERTS_SETTING.get(settings),
|
||||
trustStorePath, trustStorePassword, trustStoreAlgorithm);
|
||||
} else if (keyConfig == global.keyConfig()) {
|
||||
return global.trustConfig();
|
||||
|
|
|
@ -0,0 +1,150 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.ssl;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService.Frequency;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
import javax.net.ssl.SSLSessionContext;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.CopyOnWriteArraySet;
|
||||
|
||||
/**
|
||||
* Ensures that the files backing an {@link SSLConfiguration} are monitored for changes and the underlying key/trust material is reloaded
|
||||
* and the {@link SSLContext} has existing sessions invalidated to force the use of the new key/trust material
|
||||
*/
|
||||
public class SSLConfigurationReloader extends AbstractComponent implements AbstractSSLService.Listener {
|
||||
|
||||
private final ConcurrentHashMap<Path, ChangeListener> pathToChangeListenerMap = new ConcurrentHashMap<>();
|
||||
private final Environment environment;
|
||||
private final ResourceWatcherService resourceWatcherService;
|
||||
private final ServerSSLService serverSSLService;
|
||||
private final ClientSSLService clientSSLService;
|
||||
|
||||
public SSLConfigurationReloader(Settings settings, Environment env, ServerSSLService serverSSLService,
|
||||
ClientSSLService clientSSLService, ResourceWatcherService resourceWatcher) {
|
||||
super(settings);
|
||||
this.environment = env;
|
||||
this.resourceWatcherService = resourceWatcher;
|
||||
this.serverSSLService = serverSSLService;
|
||||
this.clientSSLService = clientSSLService;
|
||||
serverSSLService.setListener(this);
|
||||
clientSSLService.setListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSSLContextLoaded(SSLConfiguration sslConfiguration) {
|
||||
startWatching(Collections.singleton(sslConfiguration));
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects all of the directories that need to be monitored for the provided {@link SSLConfiguration} instances and ensures that
|
||||
* they are being watched for changes
|
||||
*/
|
||||
private void startWatching(Collection<SSLConfiguration> sslConfigurations) {
|
||||
for (SSLConfiguration sslConfiguration : sslConfigurations) {
|
||||
for (Path directory : directoriesToMonitor(sslConfiguration.filesToMonitor(environment))) {
|
||||
pathToChangeListenerMap.compute(directory, (path, listener) -> {
|
||||
if (listener != null) {
|
||||
listener.addSSLConfiguration(sslConfiguration);
|
||||
return listener;
|
||||
}
|
||||
|
||||
ChangeListener changeListener = new ChangeListener();
|
||||
changeListener.addSSLConfiguration(sslConfiguration);
|
||||
FileWatcher fileWatcher = new FileWatcher(path);
|
||||
fileWatcher.addListener(changeListener);
|
||||
try {
|
||||
resourceWatcherService.add(fileWatcher, Frequency.HIGH);
|
||||
return changeListener;
|
||||
} catch (IOException e) {
|
||||
logger.error("failed to start watching directory [{}] for ssl configuration [{}]", path, sslConfiguration);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates all of the sessions in the provided {@link SSLContext}
|
||||
*/
|
||||
private static void invalidateAllSessions(SSLContext context) {
|
||||
if (context != null) {
|
||||
invalidateSessions(context.getClientSessionContext());
|
||||
invalidateSessions(context.getServerSessionContext());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates the sessions in the provided {@link SSLSessionContext}
|
||||
*/
|
||||
private static void invalidateSessions(SSLSessionContext sslSessionContext) {
|
||||
Enumeration<byte[]> sessionIds = sslSessionContext.getIds();
|
||||
while (sessionIds.hasMoreElements()) {
|
||||
byte[] sessionId = sessionIds.nextElement();
|
||||
sslSessionContext.getSession(sessionId).invalidate();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a unique set of directories that need to be monitored based on the provided file paths
|
||||
*/
|
||||
private static Set<Path> directoriesToMonitor(List<Path> filePaths) {
|
||||
Set<Path> paths = new HashSet<>();
|
||||
for (Path path : filePaths) {
|
||||
paths.add(path.getParent());
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
private class ChangeListener implements FileChangesListener {
|
||||
|
||||
private final CopyOnWriteArraySet<SSLConfiguration> sslConfigurations = new CopyOnWriteArraySet<>();
|
||||
|
||||
/**
|
||||
* Adds the given ssl configuration to those that have files within the directory watched by this change listener
|
||||
*/
|
||||
private void addSSLConfiguration(SSLConfiguration sslConfiguration) {
|
||||
sslConfigurations.add(sslConfiguration);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileCreated(Path file) {
|
||||
onFileChanged(file);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileDeleted(Path file) {
|
||||
onFileChanged(file);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileChanged(Path file) {
|
||||
for (SSLConfiguration sslConfiguration : sslConfigurations) {
|
||||
if (sslConfiguration.filesToMonitor(environment).contains(file)) {
|
||||
sslConfiguration.reload(file, environment);
|
||||
invalidateAllSessions(serverSSLService.getSSLContext(sslConfiguration));
|
||||
invalidateAllSessions(clientSSLService.getSSLContext(sslConfiguration));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -7,14 +7,12 @@ package org.elasticsearch.xpack.security.ssl;
|
|||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.security.ssl.SSLConfiguration.Global;
|
||||
|
||||
public class ServerSSLService extends AbstractSSLService {
|
||||
|
||||
public ServerSSLService(Settings settings, Environment environment, Global globalSSLConfiguration,
|
||||
ResourceWatcherService resourceWatcherService) {
|
||||
super(settings, environment, globalSSLConfiguration, resourceWatcherService);
|
||||
public ServerSSLService(Settings settings, Environment environment, Global globalSSLConfiguration) {
|
||||
super(settings, environment, globalSSLConfiguration);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -26,9 +26,9 @@ class StoreKeyConfig extends KeyConfig {
|
|||
final String keyPassword;
|
||||
final String trustStoreAlgorithm;
|
||||
|
||||
StoreKeyConfig(boolean includeSystem, boolean reloadEnabled, String keyStorePath, String keyStorePassword, String keyPassword,
|
||||
StoreKeyConfig(boolean includeSystem, String keyStorePath, String keyStorePassword, String keyPassword,
|
||||
String keyStoreAlgorithm, String trustStoreAlgorithm) {
|
||||
super(includeSystem, reloadEnabled);
|
||||
super(includeSystem);
|
||||
this.keyStorePath = keyStorePath;
|
||||
this.keyStorePassword = keyStorePassword;
|
||||
this.keyPassword = keyPassword;
|
||||
|
@ -37,7 +37,7 @@ class StoreKeyConfig extends KeyConfig {
|
|||
}
|
||||
|
||||
@Override
|
||||
X509ExtendedKeyManager[] loadKeyManagers(@Nullable Environment environment) {
|
||||
X509ExtendedKeyManager loadKeyManager(@Nullable Environment environment) {
|
||||
try (InputStream in = Files.newInputStream(CertUtils.resolvePath(keyStorePath, environment))) {
|
||||
// TODO remove reliance on JKS since we can PKCS12 stores...
|
||||
KeyStore ks = KeyStore.getInstance("jks");
|
||||
|
@ -50,14 +50,9 @@ class StoreKeyConfig extends KeyConfig {
|
|||
}
|
||||
|
||||
@Override
|
||||
X509ExtendedTrustManager[] nonSystemTrustManagers(@Nullable Environment environment) {
|
||||
try (InputStream in = Files.newInputStream(CertUtils.resolvePath(keyStorePath, environment))) {
|
||||
// TODO remove reliance on JKS since we can PKCS12 stores...
|
||||
KeyStore ks = KeyStore.getInstance("jks");
|
||||
assert keyStorePassword != null;
|
||||
ks.load(in, keyStorePassword.toCharArray());
|
||||
|
||||
return CertUtils.trustManagers(ks, trustStoreAlgorithm);
|
||||
X509ExtendedTrustManager nonSystemTrustManager(@Nullable Environment environment) {
|
||||
try {
|
||||
return CertUtils.trustManagers(keyStorePath, keyStorePassword, trustStoreAlgorithm, environment);
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchException("failed to initialize a TrustManagerFactory", e);
|
||||
}
|
||||
|
|
|
@ -23,25 +23,20 @@ class StoreTrustConfig extends TrustConfig {
|
|||
final String trustStorePassword;
|
||||
final String trustStoreAlgorithm;
|
||||
|
||||
StoreTrustConfig(boolean includeSystem, boolean reloadEnabled, String trustStorePath, String trustStorePassword,
|
||||
String trustStoreAlgorithm) {
|
||||
super(includeSystem, reloadEnabled);
|
||||
StoreTrustConfig(boolean includeSystem, String trustStorePath, String trustStorePassword, String trustStoreAlgorithm) {
|
||||
super(includeSystem);
|
||||
this.trustStorePath = trustStorePath;
|
||||
this.trustStorePassword = trustStorePassword;
|
||||
this.trustStoreAlgorithm = trustStoreAlgorithm;
|
||||
}
|
||||
|
||||
@Override
|
||||
X509ExtendedTrustManager[] nonSystemTrustManagers(@Nullable Environment environment) {
|
||||
X509ExtendedTrustManager nonSystemTrustManager(@Nullable Environment environment) {
|
||||
if (trustStorePath == null) {
|
||||
return null;
|
||||
}
|
||||
try (InputStream in = Files.newInputStream(CertUtils.resolvePath(trustStorePath, environment))) {
|
||||
// TODO remove reliance on JKS since we can PKCS12 stores...
|
||||
KeyStore trustStore = KeyStore.getInstance("jks");
|
||||
assert trustStorePassword != null;
|
||||
trustStore.load(in, trustStorePassword.toCharArray());
|
||||
return CertUtils.trustManagers(trustStore, trustStoreAlgorithm);
|
||||
try {
|
||||
return CertUtils.trustManagers(trustStorePath, trustStorePassword, trustStoreAlgorithm, environment);
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchException("failed to initialize a TrustManagerFactory", e);
|
||||
}
|
||||
|
|
|
@ -8,60 +8,53 @@ package org.elasticsearch.xpack.security.ssl;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.security.ssl.TrustConfig.Reloadable.Listener;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService.Frequency;
|
||||
|
||||
import javax.net.ssl.SSLEngine;
|
||||
import javax.net.ssl.TrustManager;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
import javax.net.ssl.X509ExtendedTrustManager;
|
||||
import javax.net.ssl.X509TrustManager;
|
||||
import java.io.IOException;
|
||||
import java.net.Socket;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.KeyStore;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
abstract class TrustConfig {
|
||||
|
||||
protected final boolean includeSystem;
|
||||
protected final boolean reloadEnabled;
|
||||
|
||||
TrustConfig(boolean includeSystem, boolean reloadEnabled) {
|
||||
X509ExtendedTrustManager[] trustManagers = null;
|
||||
|
||||
TrustConfig(boolean includeSystem) {
|
||||
this.includeSystem = includeSystem;
|
||||
this.reloadEnabled = reloadEnabled;
|
||||
}
|
||||
|
||||
final TrustManager[] trustManagers(@Nullable Environment environment, @Nullable ResourceWatcherService resourceWatcherService,
|
||||
@Nullable Listener listener) {
|
||||
X509ExtendedTrustManager[] trustManagers = loadAndMergeIfNecessary(environment);
|
||||
if (reloadEnabled && resourceWatcherService != null && listener != null) {
|
||||
ReloadableTrustManager reloadableTrustManager = new ReloadableTrustManager(trustManagers[0], environment);
|
||||
try {
|
||||
List<Path> filesToMonitor = filesToMonitor(environment);
|
||||
ChangeListener changeListener = new ChangeListener(filesToMonitor, reloadableTrustManager, listener);
|
||||
for (Path path : directoriesToMonitor(filesToMonitor)) {
|
||||
FileWatcher fileWatcher = new FileWatcher(path);
|
||||
fileWatcher.addListener(changeListener);
|
||||
resourceWatcherService.add(fileWatcher, Frequency.HIGH);
|
||||
}
|
||||
return new X509ExtendedTrustManager[] { reloadableTrustManager };
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("failed to add file watcher", e);
|
||||
}
|
||||
final synchronized X509ExtendedTrustManager[] trustManagers(@Nullable Environment environment) {
|
||||
if (trustManagers == null) {
|
||||
X509ExtendedTrustManager loadedTrustManager = loadAndMergeIfNecessary(environment);
|
||||
setTrustManagers(loadedTrustManager);
|
||||
}
|
||||
return trustManagers;
|
||||
}
|
||||
|
||||
abstract X509ExtendedTrustManager[] nonSystemTrustManagers(@Nullable Environment environment);
|
||||
synchronized void reload(@Nullable Environment environment) {
|
||||
X509ExtendedTrustManager loadedTrustManager = loadAndMergeIfNecessary(environment);
|
||||
setTrustManagers(loadedTrustManager);
|
||||
}
|
||||
|
||||
final synchronized void setTrustManagers(X509ExtendedTrustManager loadedTrustManager) {
|
||||
if (loadedTrustManager == null) {
|
||||
this.trustManagers = new X509ExtendedTrustManager[0];
|
||||
} else if (this.trustManagers == null || this.trustManagers.length == 0) {
|
||||
this.trustManagers = new X509ExtendedTrustManager[] { new ReloadableTrustManager(loadedTrustManager) };
|
||||
} else {
|
||||
assert this.trustManagers[0] instanceof ReloadableTrustManager;
|
||||
((ReloadableTrustManager)this.trustManagers[0]).setTrustManager(loadedTrustManager);
|
||||
}
|
||||
}
|
||||
|
||||
abstract X509ExtendedTrustManager nonSystemTrustManager(@Nullable Environment environment);
|
||||
|
||||
abstract void validate();
|
||||
|
||||
|
@ -69,56 +62,47 @@ abstract class TrustConfig {
|
|||
|
||||
public abstract String toString();
|
||||
|
||||
private X509ExtendedTrustManager[] loadAndMergeIfNecessary(@Nullable Environment environment) {
|
||||
X509ExtendedTrustManager[] nonSystemTrustManagers = nonSystemTrustManagers(environment);
|
||||
final X509ExtendedTrustManager loadAndMergeIfNecessary(@Nullable Environment environment) {
|
||||
X509ExtendedTrustManager trustManager = nonSystemTrustManager(environment);
|
||||
if (includeSystem) {
|
||||
return mergeWithSystem(nonSystemTrustManagers);
|
||||
} else if (nonSystemTrustManagers == null || nonSystemTrustManagers.length == 0) {
|
||||
return new X509ExtendedTrustManager[0];
|
||||
trustManager = mergeWithSystem(trustManager);
|
||||
} else if (trustManager == null) {
|
||||
return null;
|
||||
}
|
||||
return nonSystemTrustManagers;
|
||||
return trustManager;
|
||||
}
|
||||
|
||||
private X509ExtendedTrustManager[] mergeWithSystem(X509ExtendedTrustManager[] nonSystemTrustManagers) {
|
||||
private X509ExtendedTrustManager mergeWithSystem(X509ExtendedTrustManager nonSystemTrustManager) {
|
||||
try {
|
||||
TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
|
||||
tmf.init((KeyStore) null);
|
||||
TrustManager[] systemTrustManagers = tmf.getTrustManagers();
|
||||
X509ExtendedTrustManager system = findFirstX509TrustManager(systemTrustManagers);
|
||||
if (nonSystemTrustManagers == null || nonSystemTrustManagers.length == 0) {
|
||||
return new X509ExtendedTrustManager[] { system };
|
||||
X509ExtendedTrustManager system = findFirstX509ExtendedTrustManager(systemTrustManagers);
|
||||
if (nonSystemTrustManager == null) {
|
||||
return system;
|
||||
}
|
||||
|
||||
return new X509ExtendedTrustManager[] { new CombiningX509TrustManager(nonSystemTrustManagers[0], system) };
|
||||
return new CombiningX509TrustManager(nonSystemTrustManager, system);
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchException("failed to initialize a trust managers", e);
|
||||
}
|
||||
}
|
||||
|
||||
private static X509ExtendedTrustManager findFirstX509TrustManager(TrustManager[] trustManagers) {
|
||||
private static X509ExtendedTrustManager findFirstX509ExtendedTrustManager(TrustManager[] trustManagers) {
|
||||
X509ExtendedTrustManager x509TrustManager = null;
|
||||
for (TrustManager trustManager : trustManagers) {
|
||||
if (trustManager instanceof X509TrustManager) {
|
||||
if (trustManager instanceof X509ExtendedTrustManager) {
|
||||
// first one wins like in the JDK
|
||||
x509TrustManager = (X509ExtendedTrustManager) trustManager;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (x509TrustManager == null) {
|
||||
throw new IllegalArgumentException("did not find a X509TrustManager");
|
||||
throw new IllegalArgumentException("did not find a X509ExtendedTrustManager");
|
||||
}
|
||||
return x509TrustManager;
|
||||
}
|
||||
|
||||
static Set<Path> directoriesToMonitor(List<Path> filePaths) {
|
||||
Set<Path> paths = new HashSet<>();
|
||||
for (Path path : filePaths) {
|
||||
assert Files.isDirectory(path) == false;
|
||||
paths.add(path.getParent());
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
private static class CombiningX509TrustManager extends X509ExtendedTrustManager {
|
||||
|
||||
private final X509ExtendedTrustManager first;
|
||||
|
@ -196,14 +180,12 @@ abstract class TrustConfig {
|
|||
}
|
||||
}
|
||||
|
||||
final class ReloadableTrustManager extends X509ExtendedTrustManager implements Reloadable {
|
||||
final class ReloadableTrustManager extends X509ExtendedTrustManager {
|
||||
|
||||
private final Environment environment;
|
||||
private volatile X509ExtendedTrustManager trustManager;
|
||||
|
||||
ReloadableTrustManager(X509ExtendedTrustManager trustManager, @Nullable Environment environment) {
|
||||
ReloadableTrustManager(X509ExtendedTrustManager trustManager) {
|
||||
this.trustManager = trustManager;
|
||||
this.environment = environment;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -241,59 +223,12 @@ abstract class TrustConfig {
|
|||
return trustManager.getAcceptedIssuers();
|
||||
}
|
||||
|
||||
public synchronized void reload() {
|
||||
X509ExtendedTrustManager[] array = loadAndMergeIfNecessary(environment);
|
||||
this.trustManager = array[0];
|
||||
}
|
||||
|
||||
synchronized void setTrustManager(X509ExtendedTrustManager trustManager) {
|
||||
this.trustManager = trustManager;
|
||||
}
|
||||
}
|
||||
|
||||
interface Reloadable {
|
||||
|
||||
void reload();
|
||||
|
||||
interface Listener {
|
||||
|
||||
void onReload();
|
||||
|
||||
void onFailure(Exception e);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
protected static class ChangeListener implements FileChangesListener {
|
||||
|
||||
private final List<Path> paths;
|
||||
private final Reloadable reloadable;
|
||||
private final Listener listener;
|
||||
|
||||
protected ChangeListener(List<Path> paths, Reloadable reloadable, Listener listener) {
|
||||
this.paths = paths;
|
||||
this.reloadable = reloadable;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileDeleted(Path file) {
|
||||
onFileChanged(file);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileChanged(Path file) {
|
||||
for (Path path : paths) {
|
||||
if (file.equals(path)) {
|
||||
try {
|
||||
reloadable.reload();
|
||||
listener.onReload();
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
X509ExtendedTrustManager getTrustManager() {
|
||||
return trustManager;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.transport.filter;
|
|||
|
||||
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
@ -69,6 +70,10 @@ public class IPFilter {
|
|||
public static final Setting<List<String>> HTTP_FILTER_DENY_SETTING = Setting.listSetting(setting("http.filter.deny"),
|
||||
HTTP_FILTER_DENY_FALLBACK, Function.identity(), Property.Dynamic, Property.NodeScope);
|
||||
|
||||
public static final Map<String, Object> DISABLED_USAGE_STATS = new MapBuilder<String, Object>()
|
||||
.put("http", false)
|
||||
.put("transport", false)
|
||||
.immutableMap();
|
||||
|
||||
public static final SecurityIpFilterRule DEFAULT_PROFILE_ACCEPT_ALL = new SecurityIpFilterRule(true, "default:accept_all") {
|
||||
@Override
|
||||
|
@ -132,8 +137,11 @@ public class IPFilter {
|
|||
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> map = new HashMap<>(2);
|
||||
map.put("http", Collections.singletonMap("enabled", isHttpFilterEnabled));
|
||||
map.put("transport", Collections.singletonMap("enabled", isIpFilterEnabled));
|
||||
final boolean httpFilterEnabled = isHttpFilterEnabled && (httpAllowFilter.isEmpty() == false || httpDenyFilter.isEmpty() == false);
|
||||
final boolean transportFilterEnabled = isIpFilterEnabled &&
|
||||
(transportAllowFilter.isEmpty() == false || transportDenyFilter.isEmpty() == false);
|
||||
map.put("http", httpFilterEnabled);
|
||||
map.put("transport", transportFilterEnabled);
|
||||
return map;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,9 +10,9 @@ import org.apache.http.entity.ContentType;
|
|||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -28,7 +28,6 @@ import java.util.Collections;
|
|||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class BulkUpdateTests extends SecurityIntegTestCase {
|
||||
|
||||
|
@ -42,7 +41,8 @@ public class BulkUpdateTests extends SecurityIntegTestCase {
|
|||
}
|
||||
|
||||
public void testThatBulkUpdateDoesNotLoseFields() {
|
||||
assertThat(client().prepareIndex("index1", "type").setSource("{\"test\": \"test\"}").setId("1").get().isCreated(), is(true));
|
||||
assertEquals(DocWriteResponse.Result.CREATED,
|
||||
client().prepareIndex("index1", "type").setSource("{\"test\": \"test\"}").setId("1").get().getResult());
|
||||
GetResponse getResponse = internalCluster().transportClient().prepareGet("index1", "type", "1").setFields("test").get();
|
||||
assertThat(getResponse.getField("test").getValue(), equalTo("test"));
|
||||
|
||||
|
@ -51,9 +51,8 @@ public class BulkUpdateTests extends SecurityIntegTestCase {
|
|||
}
|
||||
|
||||
// update with a new field
|
||||
boolean created = internalCluster().transportClient().prepareUpdate("index1", "type", "1").setDoc("{\"not test\": \"not test\"}")
|
||||
.get().isCreated();
|
||||
assertThat(created, is(false));
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, internalCluster().transportClient().prepareUpdate("index1", "type", "1")
|
||||
.setDoc("{\"not test\": \"not test\"}").get().getResult());
|
||||
getResponse = internalCluster().transportClient().prepareGet("index1", "type", "1").setFields("test", "not test").get();
|
||||
assertThat(getResponse.getField("test").getValue(), equalTo("test"));
|
||||
assertThat(getResponse.getField("not test").getValue(), equalTo("not test"));
|
||||
|
@ -65,7 +64,7 @@ public class BulkUpdateTests extends SecurityIntegTestCase {
|
|||
// do it in a bulk
|
||||
BulkResponse response = internalCluster().transportClient().prepareBulk().add(client().prepareUpdate("index1", "type", "1")
|
||||
.setDoc("{\"bulk updated\": \"bulk updated\"}")).get();
|
||||
assertThat(((UpdateResponse)response.getItems()[0].getResponse()).isCreated(), is(false));
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, response.getItems()[0].getResponse().getResult());
|
||||
getResponse = internalCluster().transportClient().prepareGet("index1", "type", "1").
|
||||
setFields("test", "not test", "bulk updated").get();
|
||||
assertThat(getResponse.getField("test").getValue(), equalTo("test"));
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.integration;
|
||||
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
|
@ -119,7 +120,7 @@ public class ClearRolesCacheTests extends NativeRealmIntegTestCase {
|
|||
.setDoc("run_as", new String[] { role })
|
||||
.setRefreshPolicy(refresh ? IMMEDIATE : NONE)
|
||||
.get();
|
||||
assertThat(response.isCreated(), is(false));
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, response.getResult());
|
||||
logger.debug("--> updated role [{}] with run_as", role);
|
||||
}
|
||||
|
||||
|
@ -161,7 +162,7 @@ public class ClearRolesCacheTests extends NativeRealmIntegTestCase {
|
|||
.prepareDelete(SecurityTemplateService.SECURITY_INDEX_NAME, NativeRolesStore.ROLE_DOC_TYPE, role)
|
||||
.setRefreshPolicy(refresh ? IMMEDIATE : NONE)
|
||||
.get();
|
||||
assertThat(response.isFound(), is(true));
|
||||
assertEquals(DocWriteResponse.Result.DELETED, response.getResult());
|
||||
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.integration;
|
||||
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
|
@ -69,7 +70,7 @@ public class DateMathExpressionIntegTests extends SecurityIntegTestCase {
|
|||
IndexResponse response = client.prepareIndex(expression, "type").setSource("foo", "bar")
|
||||
.setRefreshPolicy(refeshOnOperation ? IMMEDIATE : NONE).get();
|
||||
|
||||
assertThat(response.isCreated(), is(true));
|
||||
assertEquals(DocWriteResponse.Result.CREATED, response.getResult());
|
||||
assertThat(response.getIndex(), containsString(expectedIndexName));
|
||||
|
||||
if (refeshOnOperation == false) {
|
||||
|
@ -89,7 +90,7 @@ public class DateMathExpressionIntegTests extends SecurityIntegTestCase {
|
|||
.setDoc("new", "field")
|
||||
.setRefreshPolicy(refeshOnOperation ? IMMEDIATE : NONE)
|
||||
.get();
|
||||
assertThat(updateResponse.isCreated(), is(false));
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
|
||||
if (refeshOnOperation == false) {
|
||||
client.admin().indices().prepareRefresh(expression).get();
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.integration;
|
||||
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
|
||||
|
@ -186,13 +187,13 @@ public class KibanaUserRoleIntegTests extends SecurityIntegTestCase {
|
|||
.setSource("foo", "bar")
|
||||
.setRefreshPolicy(IMMEDIATE)
|
||||
.get();
|
||||
assertThat(response.isCreated(), is(true));
|
||||
assertEquals(DocWriteResponse.Result.CREATED, response.getResult());
|
||||
|
||||
DeleteResponse deleteResponse = client()
|
||||
.filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)))
|
||||
.prepareDelete(index, "dashboard", response.getId())
|
||||
.get();
|
||||
assertThat(deleteResponse.isFound(), is(true));
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
}
|
||||
|
||||
// TODO: When we have an XPackIntegTestCase, this should test that we can send MonitoringBulkActions
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.integration;
|
||||
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.MultiSearchResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -75,14 +76,14 @@ public class MultipleIndicesPermissionsTests extends SecurityIntegTestCase {
|
|||
.startObject()
|
||||
.field("name", "value")
|
||||
.endObject());
|
||||
assertThat(indexResponse.isCreated(), is(true));
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
||||
|
||||
indexResponse = index("test1", "type", jsonBuilder()
|
||||
.startObject()
|
||||
.field("name", "value1")
|
||||
.endObject());
|
||||
assertThat(indexResponse.isCreated(), is(true));
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
||||
refresh();
|
||||
|
||||
|
@ -150,13 +151,13 @@ public class MultipleIndicesPermissionsTests extends SecurityIntegTestCase {
|
|||
.startObject()
|
||||
.field("name", "value_a")
|
||||
.endObject());
|
||||
assertThat(indexResponse.isCreated(), is(true));
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
||||
indexResponse = index("b", "type", jsonBuilder()
|
||||
.startObject()
|
||||
.field("name", "value_b")
|
||||
.endObject());
|
||||
assertThat(indexResponse.isCreated(), is(true));
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
||||
refresh();
|
||||
|
||||
|
|
|
@ -3,43 +3,26 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.integration;
|
||||
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.indices.TermsLookup;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.mustache.MustachePlugin;
|
||||
import org.elasticsearch.script.mustache.TemplateQueryBuilder;
|
||||
import org.elasticsearch.test.SecurityIntegTestCase;
|
||||
import org.elasticsearch.test.SecuritySettingsSource;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
@SecurityIntegTestCase.AwaitsFix(bugUrl = "clean up test to not use mustache templates, otherwise needs many resources here")
|
||||
public class SecurityCachePermissionIT extends SecurityIntegTestCase {
|
||||
static final String READ_ONE_IDX_USER = "read_user";
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
Collection<Class<? extends Plugin>> types = new ArrayList<>();
|
||||
types.addAll(super.nodePlugins());
|
||||
types.add(MustachePlugin.class);
|
||||
return types;
|
||||
}
|
||||
public class SecurityCachePermissionTests extends SecurityIntegTestCase {
|
||||
|
||||
private final String READ_ONE_IDX_USER = "read_user";
|
||||
|
||||
@Override
|
||||
public String configUsers() {
|
||||
return super.configUsers()
|
||||
|
@ -61,11 +44,6 @@ public class SecurityCachePermissionIT extends SecurityIntegTestCase {
|
|||
+ "read_one_idx:" + READ_ONE_IDX_USER + "\n";
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void checkVersion() {
|
||||
assumeTrue("These tests are only valid with elasticsearch 1.6.0+", Version.CURRENT.id >= 1060099);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void loadData() {
|
||||
index("data", "a", "1", "{ \"name\": \"John\", \"token\": \"token1\" }");
|
||||
|
@ -93,33 +71,4 @@ public class SecurityCachePermissionIT extends SecurityIntegTestCase {
|
|||
assertThat(e.toString(), containsString("unauthorized"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testThatScriptServiceDoesntLeakData() {
|
||||
String source = "{\n" +
|
||||
"\"template\": {\n" +
|
||||
" \"query\": {\n" +
|
||||
" \"exists\": {\n" +
|
||||
" \"field\": \"{{name}}\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
//Template template = new Template(source, INLINE, MustacheScriptEngineService.NAME, null, singletonMap("name", "token"));
|
||||
SearchResponse response = client().prepareSearch("data").setTypes("a")
|
||||
.setQuery(new TemplateQueryBuilder(source, ScriptService.ScriptType.INLINE, singletonMap("name", "token")))
|
||||
.execute().actionGet();
|
||||
assertThat(response.isTimedOut(), is(false));
|
||||
assertThat(response.getHits().hits().length, is(1));
|
||||
|
||||
// Repeat with unauthorized user!!!!
|
||||
ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> client()
|
||||
.filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(READ_ONE_IDX_USER,
|
||||
new SecuredString("changeme".toCharArray()))))
|
||||
.prepareSearch("data").setTypes("a")
|
||||
.setQuery(new TemplateQueryBuilder(source, ScriptService.ScriptType.INLINE, singletonMap("name", "token")))
|
||||
.execute().actionGet());
|
||||
assertThat(e.toString(), containsString("ElasticsearchSecurityException[action"));
|
||||
assertThat(e.toString(), containsString("unauthorized"));
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue