Merge branch 'master' into example_watch
Original commit: elastic/x-pack-elasticsearch@1e27b915a2
This commit is contained in:
commit
65ffc63e1e
|
@ -1,3 +1,5 @@
|
|||
import org.elasticsearch.gradle.precommit.LicenseHeadersTask
|
||||
|
||||
File checkstyleSuppressions = file('checkstyle_suppressions.xml')
|
||||
subprojects {
|
||||
tasks.withType(Checkstyle) {
|
||||
|
@ -7,4 +9,9 @@ subprojects {
|
|||
suppressions: checkstyleSuppressions
|
||||
]
|
||||
}
|
||||
|
||||
tasks.withType(LicenseHeadersTask.class) {
|
||||
approvedLicenses = ['Elasticsearch Confidential']
|
||||
additionalLicense 'ESCON', 'Elasticsearch Confidential', 'ELASTICSEARCH CONFIDENTIAL'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
apply plugin: 'elasticsearch.build'
|
||||
|
||||
dependencies {
|
||||
compile project(':x-plugins:elasticsearch:license:base')
|
||||
compile project(':x-plugins:elasticsearch:x-pack')
|
||||
compile "org.elasticsearch:elasticsearch:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
}
|
||||
|
||||
project.forbiddenPatterns {
|
||||
exclude '**/*.key'
|
||||
}
|
||||
|
||||
dependencyLicenses.enabled = false
|
|
@ -7,13 +7,12 @@ package org.elasticsearch.license.licensor;
|
|||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.CryptUtils;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.CryptUtils;
|
||||
import org.elasticsearch.license.License;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
|
@ -20,8 +20,8 @@ import java.security.KeyPair;
|
|||
import java.security.KeyPairGenerator;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
import static org.elasticsearch.license.core.CryptUtils.writeEncryptedPrivateKey;
|
||||
import static org.elasticsearch.license.core.CryptUtils.writeEncryptedPublicKey;
|
||||
import static org.elasticsearch.license.CryptUtils.writeEncryptedPrivateKey;
|
||||
import static org.elasticsearch.license.CryptUtils.writeEncryptedPublicKey;
|
||||
|
||||
public class KeyPairGeneratorTool extends Command {
|
||||
|
|
@ -20,7 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.licensor.LicenseSigner;
|
||||
|
||||
public class LicenseGeneratorTool extends Command {
|
|
@ -20,8 +20,8 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.core.LicenseVerifier;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.LicenseVerifier;
|
||||
|
||||
public class LicenseVerificationTool extends Command {
|
||||
|
|
@ -6,9 +6,9 @@
|
|||
package org.elasticsearch.license.licensor;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.DateUtils;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.core.LicenseVerifier;
|
||||
import org.elasticsearch.license.DateUtils;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.LicenseVerifier;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
|
@ -13,8 +13,8 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.DateUtils;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.DateUtils;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.cli.Command;
|
|||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
import org.junit.Before;
|
||||
|
|
@ -14,7 +14,7 @@ import org.elasticsearch.cli.CommandTestCase;
|
|||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
import org.junit.Before;
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
elasticsearch-license
|
||||
=====================
|
||||
|
||||
Elasticsearch Licensing core, tools and plugin
|
||||
|
||||
## Core
|
||||
|
||||
Contains core data structures, utilities used by **Licensor** and **Plugin**.
|
||||
|
||||
See `core/` and `core-shaded/`
|
||||
|
||||
## Licensor
|
||||
|
||||
Contains a collection of tools to generate key-pairs, licenses and validate licenses.
|
||||
|
||||
See `licensor/`
|
||||
|
||||
see [wiki] (https://github.com/elasticsearch/elasticsearch-license/wiki) for documentation on
|
||||
[Licensing Tools Usage & Reference] (https://github.com/elasticsearch/elasticsearch-license/wiki/License-Tools-Usage-&-Reference)
|
||||
|
||||
## Plugin
|
||||
|
||||
**NOTE**: The license plugin has to be packaged with the right public key when being deployed to public repositories in maven
|
||||
or uploaded to s3. Use `-Dkeys.path=<PATH_TO_KEY_DIR>` with maven command to package the plugin with a specified key.
|
||||
|
||||
See `plugin/`
|
||||
|
||||
see [Getting Started] (https://github.com/elasticsearch/elasticsearch-license/blob/master/docs/getting-started.asciidoc) to install license plugin.
|
||||
|
||||
see [Licensing REST APIs] (https://github.com/elasticsearch/elasticsearch-license/blob/master/docs/license.asciidoc)
|
||||
to use the license plugin from an elasticsearch deployment.
|
||||
|
||||
see [wiki] (https://github.com/elasticsearch/elasticsearch-license/wiki) for documentation on
|
||||
- [License Plugin Consumer Interface] (https://github.com/elasticsearch/elasticsearch-license/wiki/License---Consumer-Interface)
|
||||
- [License Plugin Release Process] (https://github.com/elasticsearch/elasticsearch-license/wiki/Plugin-Release-Process)
|
||||
- [License Plugin Design] (https://github.com/elasticsearch/elasticsearch-license/wiki/License-Plugin--Design)
|
|
@ -1,20 +0,0 @@
|
|||
apply plugin: 'elasticsearch.build'
|
||||
|
||||
dependencies {
|
||||
compile "org.elasticsearch:elasticsearch:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
}
|
||||
|
||||
compactProfile = 'full'
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
|
||||
jar {
|
||||
baseName = 'license-core'
|
||||
}
|
||||
|
||||
modifyPom {
|
||||
project {
|
||||
artifactId 'license-core'
|
||||
}
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
subprojects {
|
||||
project.afterEvaluate {
|
||||
project.forbiddenPatterns {
|
||||
exclude '**/*.key'
|
||||
}
|
||||
// someone figure out what the x-plugins logic should be
|
||||
project.licenseHeaders.enabled = false
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
es.logger.level=INFO
|
||||
log4j.rootLogger=${es.logger.level}, out
|
||||
|
||||
log4j.logger.org.apache.http=INFO, out
|
||||
log4j.additivity.org.apache.http=false
|
||||
|
||||
log4j.logger.org.elasticsearch.license=TRACE
|
||||
|
||||
log4j.appender.out=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.out.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n
|
Binary file not shown.
|
@ -13,6 +13,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
@ -113,6 +114,7 @@ public class IndexAuditIT extends ESIntegTestCase {
|
|||
protected Settings externalClusterClientSettings() {
|
||||
return Settings.builder()
|
||||
.put(Security.USER_SETTING.getKey(), USER + ":" + PASS)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom("security3", "security4"))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
subprojects {
|
||||
tasks.withType(org.elasticsearch.gradle.precommit.LicenseHeadersTask) {
|
||||
// someone figure out what the x-plugins logic should be
|
||||
project.licenseHeaders.enabled = false
|
||||
}
|
||||
}
|
|
@ -7,29 +7,30 @@ package org.elasticsearch.xpack.security;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
public class RestIT extends ESRestTestCase {
|
||||
public class CoreWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
private static final String USER = "test_user";
|
||||
private static final String PASS = "changeme";
|
||||
|
||||
public RestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public CoreWithSecurityClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Override
|
|
@ -1,12 +0,0 @@
|
|||
|
||||
/*
|
||||
* Messy tests that depend on groovy directly. Fix these!
|
||||
* https://github.com/elastic/x-plugins/issues/724
|
||||
*/
|
||||
|
||||
apply plugin: 'elasticsearch.messy-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: ':x-plugins:elasticsearch:x-pack', configuration: 'testArtifacts')
|
||||
testCompile project(path: ':modules:lang-groovy', configuration: 'runtime')
|
||||
}
|
|
@ -1,181 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.util.Callback;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.xpack.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction;
|
||||
import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
import static org.elasticsearch.xpack.watcher.condition.ConditionBuilders.scriptCondition;
|
||||
import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput;
|
||||
import static org.elasticsearch.xpack.watcher.transform.TransformBuilders.scriptTransform;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.cron;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.nullValue;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class ExecutionVarsIT extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
@Override
|
||||
protected List<Class<? extends Plugin>> pluginTypes() {
|
||||
List<Class<? extends Plugin>> types = super.pluginTypes();
|
||||
types.add(GroovyPlugin.class);
|
||||
return types;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean timeWarped() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testVars() throws Exception {
|
||||
WatcherClient watcherClient = watcherClient();
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient.preparePutWatch("_id").setSource(watchBuilder()
|
||||
.trigger(schedule(cron("0/1 * * * * ?")))
|
||||
.input(simpleInput("value", 5))
|
||||
.condition(scriptCondition("ctx.vars.condition_value = ctx.payload.value + 5; return ctx.vars.condition_value > 5;"))
|
||||
.transform(scriptTransform("ctx.vars.watch_transform_value = ctx.vars.condition_value + 5; return ctx.payload;"))
|
||||
.addAction(
|
||||
"a1",
|
||||
scriptTransform("ctx.vars.a1_transform_value = ctx.vars.watch_transform_value + 10; ctx.payload" +
|
||||
".a1_transformed_value = ctx.vars.a1_transform_value; return ctx.payload;"),
|
||||
loggingAction("_text"))
|
||||
.addAction(
|
||||
"a2",
|
||||
scriptTransform("ctx.vars.a2_transform_value = ctx.vars.watch_transform_value + 20; ctx.payload" +
|
||||
".a2_transformed_value = ctx.vars.a2_transform_value; return ctx.payload;"),
|
||||
loggingAction("_text")))
|
||||
.get();
|
||||
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
|
||||
timeWarp().scheduler().trigger("_id");
|
||||
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = searchWatchRecords(new Callback<SearchRequestBuilder>() {
|
||||
@Override
|
||||
public void handle(SearchRequestBuilder builder) {
|
||||
// defaults to match all;
|
||||
}
|
||||
});
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), is(1L));
|
||||
|
||||
Map<String, Object> source = searchResponse.getHits().getAt(0).getSource();
|
||||
|
||||
assertValue(source, "watch_id", is("_id"));
|
||||
assertValue(source, "state", is("executed"));
|
||||
|
||||
// we don't store the computed vars in history
|
||||
assertValue(source, "vars", nullValue());
|
||||
|
||||
assertValue(source, "result.condition.status", is("success"));
|
||||
assertValue(source, "result.transform.status", is("success"));
|
||||
|
||||
List<Map<String, Object>> actions = ObjectPath.eval("result.actions", source);
|
||||
for (Map<String, Object> action : actions) {
|
||||
String id = (String) action.get("id");
|
||||
switch (id) {
|
||||
case "a1":
|
||||
assertValue(action, "status", is("success"));
|
||||
assertValue(action, "transform.status", is("success"));
|
||||
assertValue(action, "transform.payload.a1_transformed_value", equalTo(25));
|
||||
break;
|
||||
case "a2":
|
||||
assertValue(action, "status", is("success"));
|
||||
assertValue(action, "transform.status", is("success"));
|
||||
assertValue(action, "transform.payload.a2_transformed_value", equalTo(35));
|
||||
break;
|
||||
default:
|
||||
fail("there should not be an action result for action with an id other than a1 or a2");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testVarsManual() throws Exception {
|
||||
WatcherClient watcherClient = watcherClient();
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient.preparePutWatch("_id").setSource(watchBuilder()
|
||||
.trigger(schedule(cron("0/1 * * * * ? 2020")))
|
||||
.input(simpleInput("value", 5))
|
||||
.condition(scriptCondition("ctx.vars.condition_value = ctx.payload.value + 5; return ctx.vars.condition_value > 5;"))
|
||||
.transform(scriptTransform("ctx.vars.watch_transform_value = ctx.vars.condition_value + 5; return ctx.payload;"))
|
||||
.addAction(
|
||||
"a1",
|
||||
scriptTransform("ctx.vars.a1_transform_value = ctx.vars.watch_transform_value + 10; ctx.payload" +
|
||||
".a1_transformed_value = ctx.vars.a1_transform_value; return ctx.payload;"),
|
||||
loggingAction("_text"))
|
||||
.addAction(
|
||||
"a2",
|
||||
scriptTransform("ctx.vars.a2_transform_value = ctx.vars.watch_transform_value + 20; ctx.payload" +
|
||||
".a2_transformed_value = ctx.vars.a2_transform_value; return ctx.payload;"),
|
||||
loggingAction("_text")))
|
||||
.get();
|
||||
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
|
||||
boolean debug = randomBoolean();
|
||||
|
||||
ExecuteWatchResponse executeWatchResponse = watcherClient
|
||||
.prepareExecuteWatch("_id")
|
||||
.setDebug(debug)
|
||||
.get();
|
||||
assertThat(executeWatchResponse.getRecordId(), notNullValue());
|
||||
XContentSource source = executeWatchResponse.getRecordSource();
|
||||
|
||||
assertValue(source, "watch_id", is("_id"));
|
||||
assertValue(source, "state", is("executed"));
|
||||
|
||||
if (debug) {
|
||||
assertValue(source, "vars.condition_value", is(10));
|
||||
assertValue(source, "vars.watch_transform_value", is(15));
|
||||
assertValue(source, "vars.a1_transform_value", is(25));
|
||||
assertValue(source, "vars.a2_transform_value", is(35));
|
||||
}
|
||||
|
||||
assertValue(source, "result.condition.status", is("success"));
|
||||
assertValue(source, "result.transform.status", is("success"));
|
||||
|
||||
List<Map<String, Object>> actions = source.getValue("result.actions");
|
||||
for (Map<String, Object> action : actions) {
|
||||
String id = (String) action.get("id");
|
||||
switch (id) {
|
||||
case "a1":
|
||||
assertValue(action, "status", is("success"));
|
||||
assertValue(action, "transform.status", is("success"));
|
||||
assertValue(action, "transform.payload.a1_transformed_value", equalTo(25));
|
||||
break;
|
||||
case "a2":
|
||||
assertValue(action, "status", is("success"));
|
||||
assertValue(action, "transform.status", is("success"));
|
||||
assertValue(action, "transform.payload.a2_transformed_value", equalTo(35));
|
||||
break;
|
||||
default:
|
||||
fail("there should not be an action result for action with an id other than a1 or a2");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,121 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder;
|
||||
import org.elasticsearch.xpack.watcher.condition.script.ScriptCondition;
|
||||
import org.elasticsearch.xpack.watcher.execution.ManualExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.execution.ManualExecutionTests.ExecutionRunner;
|
||||
import org.elasticsearch.xpack.watcher.history.WatchRecord;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherScript;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.delete.DeleteWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchRequest;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
import org.elasticsearch.xpack.watcher.trigger.manual.ManualTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction;
|
||||
import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.cron;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
|
||||
|
||||
/**
|
||||
* Two groovy-using methods from ManualExecutionTests.
|
||||
* They appear to be using groovy as a way to sleep.
|
||||
*/
|
||||
public class GroovyManualExecutionIT extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
@Override
|
||||
protected List<Class<? extends Plugin>> pluginTypes() {
|
||||
List<Class<? extends Plugin>> types = super.pluginTypes();
|
||||
types.add(GroovyPlugin.class);
|
||||
return types;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean enableSecurity() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testWatchExecutionDuration() throws Exception {
|
||||
WatchSourceBuilder watchBuilder = watchBuilder()
|
||||
.trigger(schedule(cron("0 0 0 1 * ? 2099")))
|
||||
.input(simpleInput("foo", "bar"))
|
||||
.condition(new ScriptCondition((new WatcherScript.Builder.Inline("sleep 100; return true")).build()))
|
||||
.addAction("log", loggingAction("foobar"));
|
||||
|
||||
Watch watch = watchParser().parse("_id", false, watchBuilder.buildAsBytes(XContentType.JSON));
|
||||
ManualExecutionContext.Builder ctxBuilder = ManualExecutionContext.builder(watch, false, new ManualTriggerEvent("_id",
|
||||
new ScheduleTriggerEvent(new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC))),
|
||||
new TimeValue(1, TimeUnit.HOURS));
|
||||
WatchRecord record = executionService().execute(ctxBuilder.build());
|
||||
assertThat(record.result().executionDurationMs(), greaterThanOrEqualTo(100L));
|
||||
}
|
||||
|
||||
public void testForceDeletionOfLongRunningWatch() throws Exception {
|
||||
WatchSourceBuilder watchBuilder = watchBuilder()
|
||||
.trigger(schedule(cron("0 0 0 1 * ? 2099")))
|
||||
.input(simpleInput("foo", "bar"))
|
||||
.condition(new ScriptCondition((new WatcherScript.Builder.Inline("sleep 10000; return true")).build()))
|
||||
.defaultThrottlePeriod(new TimeValue(1, TimeUnit.HOURS))
|
||||
.addAction("log", loggingAction("foobar"));
|
||||
|
||||
int numberOfThreads = scaledRandomIntBetween(1, 5);
|
||||
PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchBuilder)).actionGet();
|
||||
assertThat(putWatchResponse.getVersion(), greaterThan(0L));
|
||||
refresh();
|
||||
assertThat(watcherClient().getWatch(new GetWatchRequest("_id")).actionGet().isFound(), equalTo(true));
|
||||
|
||||
CountDownLatch startLatch = new CountDownLatch(1);
|
||||
|
||||
List<Thread> threads = new ArrayList<>();
|
||||
for (int i = 0; i < numberOfThreads; ++i) {
|
||||
threads.add(new Thread(new ExecutionRunner(watchService(), executionService(), "_id", startLatch)));
|
||||
}
|
||||
|
||||
for (Thread thread : threads) {
|
||||
thread.start();
|
||||
}
|
||||
DeleteWatchResponse deleteWatchResponse = watcherClient().prepareDeleteWatch("_id").setForce(true).get();
|
||||
assertThat(deleteWatchResponse.isFound(), is(true));
|
||||
|
||||
deleteWatchResponse = watcherClient().prepareDeleteWatch("_id").get();
|
||||
assertThat(deleteWatchResponse.isFound(), is(false));
|
||||
|
||||
startLatch.countDown();
|
||||
|
||||
long startJoin = System.currentTimeMillis();
|
||||
for (Thread thread : threads) {
|
||||
thread.join();
|
||||
}
|
||||
long endJoin = System.currentTimeMillis();
|
||||
TimeValue tv = new TimeValue(10 * (numberOfThreads+1), TimeUnit.SECONDS);
|
||||
assertThat("Shouldn't take longer than [" + tv.getSeconds() + "] seconds for all the threads to stop", (endJoin - startJoin),
|
||||
lessThan(tv.getMillis()));
|
||||
}
|
||||
|
||||
}
|
|
@ -1,113 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.watcher.condition.script.ExecutableScriptCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.script.ScriptCondition;
|
||||
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherScript;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.messy.tests.MessyTestUtils.createScriptService;
|
||||
import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.mockExecutionContext;
|
||||
|
||||
public class GroovyScriptConditionIT extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
@Override
|
||||
protected List<Class<? extends Plugin>> pluginTypes() {
|
||||
List<Class<? extends Plugin>> types = super.pluginTypes();
|
||||
types.add(GroovyPlugin.class);
|
||||
return types;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean enableSecurity() {
|
||||
return false;
|
||||
}
|
||||
|
||||
private static ThreadPool THREAD_POOL;
|
||||
private ScriptService scriptService;
|
||||
|
||||
@BeforeClass
|
||||
public static void startThreadPool() {
|
||||
THREAD_POOL = new TestThreadPool(GroovyScriptConditionIT.class.getSimpleName());
|
||||
}
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
scriptService = createScriptService(THREAD_POOL);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void stopThreadPool() throws InterruptedException {
|
||||
ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS);
|
||||
// since static must set to null to be eligible for collection
|
||||
THREAD_POOL = null;
|
||||
}
|
||||
|
||||
public void testGroovyClosureWithAggregations() throws Exception {
|
||||
for (int seconds = 0; seconds < 60; seconds += 5) {
|
||||
String timestamp = "2005-01-01T00:00:" + String.format(Locale.ROOT, "%02d", seconds);
|
||||
client().prepareIndex(".monitoring", "cluster_stats")
|
||||
.setSource("status", randomFrom("green", "yellow"), "@timestamp", timestamp).get();
|
||||
}
|
||||
|
||||
refresh();
|
||||
|
||||
SearchRequestBuilder builder = client().prepareSearch(".monitoring")
|
||||
.addAggregation(
|
||||
AggregationBuilders
|
||||
.dateHistogram("minutes").field("@timestamp").interval(TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS))
|
||||
.order(Histogram.Order.COUNT_DESC)
|
||||
.subAggregation(AggregationBuilders.terms("status").field("status.keyword").size(3)));
|
||||
SearchResponse unmetResponse = builder.get();
|
||||
|
||||
ExecutableScriptCondition condition =
|
||||
new ExecutableScriptCondition(new ScriptCondition(WatcherScript.inline(
|
||||
String.join(
|
||||
" ",
|
||||
"if (ctx.payload.hits.total < 1) return false;",
|
||||
"def rows = ctx.payload.hits.hits;",
|
||||
"if (ctx.payload.aggregations.minutes.buckets.size() < 12) return false;",
|
||||
"def last60Seconds = ctx.payload.aggregations.minutes.buckets[-12..-1];",
|
||||
"return last60Seconds.every { it.status.buckets.every { s -> s.key == 'red' } }"
|
||||
)
|
||||
).lang("groovy").build()), logger, scriptService);
|
||||
|
||||
WatchExecutionContext unmetContext = mockExecutionContext("_name", new Payload.XContent(unmetResponse));
|
||||
assertFalse(condition.execute(unmetContext).met());
|
||||
|
||||
for (int seconds = 0; seconds < 60; seconds += 5) {
|
||||
String timestamp = "2005-01-01T00:01:" + String.format(Locale.ROOT, "%02d", seconds);
|
||||
client().prepareIndex(".monitoring", "cluster_stats").setSource("status", randomFrom("red"), "@timestamp", timestamp).get();
|
||||
}
|
||||
|
||||
refresh();
|
||||
|
||||
SearchResponse metResponse = builder.get();
|
||||
|
||||
WatchExecutionContext metContext = mockExecutionContext("_name", new Payload.XContent(metResponse));
|
||||
assertTrue(condition.execute(metContext).met());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.xpack.watcher.execution.ExecutionState;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction;
|
||||
import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
import static org.elasticsearch.xpack.watcher.condition.ConditionBuilders.alwaysCondition;
|
||||
import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput;
|
||||
import static org.elasticsearch.xpack.watcher.transform.TransformBuilders.scriptTransform;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class HistoryTemplateTransformMappingsIT extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
@Override
|
||||
protected List<Class<? extends Plugin>> pluginTypes() {
|
||||
List<Class<? extends Plugin>> types = super.pluginTypes();
|
||||
types.add(GroovyPlugin.class);
|
||||
return types;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean timeWarped() {
|
||||
return true; // just to have better control over the triggers
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean enableSecurity() {
|
||||
return false; // remove security noise from this test
|
||||
}
|
||||
|
||||
public void testTransformFields() throws Exception {
|
||||
String index = "the-index";
|
||||
String type = "the-type";
|
||||
createIndex(index);
|
||||
index(index, type, "{}");
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id1").setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(simpleInput())
|
||||
.condition(alwaysCondition())
|
||||
.transform(scriptTransform("return [ 'key' : 'value1' ];"))
|
||||
.addAction("logger", scriptTransform("return [ 'key' : 'value2' ];"), loggingAction("indexed")))
|
||||
.get();
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
timeWarp().scheduler().trigger("_id1");
|
||||
|
||||
// adding another watch which with a transform that should conflict with the preview watch. Since the
|
||||
// mapping for the transform construct is disabled, there should be nor problems.
|
||||
putWatchResponse = watcherClient().preparePutWatch("_id2").setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(simpleInput())
|
||||
.condition(alwaysCondition())
|
||||
.transform(scriptTransform("return [ 'key' : [ 'key1' : 'value1' ] ];"))
|
||||
.addAction("logger", scriptTransform("return [ 'key' : [ 'key1' : 'value2' ] ];"), loggingAction("indexed")))
|
||||
.get();
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
timeWarp().scheduler().trigger("_id2");
|
||||
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
assertWatchWithMinimumActionsCount("_id1", ExecutionState.EXECUTED, 1);
|
||||
assertWatchWithMinimumActionsCount("_id2", ExecutionState.EXECUTED, 1);
|
||||
|
||||
refresh();
|
||||
|
||||
assertBusy(() -> {
|
||||
GetFieldMappingsResponse getFieldMappingsResponse = client().admin().indices()
|
||||
.prepareGetFieldMappings(".watcher-history*").setFields("result.actions.transform.payload")
|
||||
.setTypes("watch_record").includeDefaults(true).get();
|
||||
|
||||
for (Map<String, Map<String, FieldMappingMetaData>> map : getFieldMappingsResponse.mappings().values()) {
|
||||
Map<String, FieldMappingMetaData> watchRecord = map.get("watch_record");
|
||||
assertThat(watchRecord, hasKey("result.actions.transform.payload"));
|
||||
FieldMappingMetaData fieldMappingMetaData = watchRecord.get("result.actions.transform.payload");
|
||||
assertThat(fieldMappingMetaData.isNull(), is(true));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.script.ScriptContextRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
import org.elasticsearch.script.groovy.GroovyScriptEngineService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherScript;
|
||||
import org.junit.Ignore;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
@Ignore // not a test.
|
||||
@SuppressForbidden(reason = "gradle is broken and tries to run me as a test")
|
||||
public final class MessyTestUtils {
|
||||
public static ScriptService createScriptService(ThreadPool tp) throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("script.inline", "true")
|
||||
.put("script.indexed", "true")
|
||||
.put("path.home", LuceneTestCase.createTempDir())
|
||||
.build();
|
||||
GroovyScriptEngineService groovyScriptEngineService = new GroovyScriptEngineService(settings);
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singleton(groovyScriptEngineService));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Arrays.asList(WatcherScript.CTX_PLUGIN));
|
||||
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
return new ScriptService(settings, new Environment(settings), new ResourceWatcherService(settings, tp),
|
||||
scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
}
|
||||
}
|
|
@ -1,231 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherScript;
|
||||
import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.watcher.test.WatcherTestUtils;
|
||||
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.indexAction;
|
||||
import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
import static org.elasticsearch.xpack.watcher.condition.ConditionBuilders.alwaysCondition;
|
||||
import static org.elasticsearch.xpack.watcher.input.InputBuilders.searchInput;
|
||||
import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput;
|
||||
import static org.elasticsearch.xpack.watcher.transform.TransformBuilders.chainTransform;
|
||||
import static org.elasticsearch.xpack.watcher.transform.TransformBuilders.scriptTransform;
|
||||
import static org.elasticsearch.xpack.watcher.transform.TransformBuilders.searchTransform;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class TransformIT extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
@Override
|
||||
protected List<Class<? extends Plugin>> pluginTypes() {
|
||||
List<Class<? extends Plugin>> types = super.pluginTypes();
|
||||
types.add(GroovyPlugin.class);
|
||||
return types;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
Path configDir = createTempDir();
|
||||
Path scripts = configDir.resolve("scripts");
|
||||
try {
|
||||
Files.createDirectories(scripts);
|
||||
try (InputStream stream = TransformIT.class.getResourceAsStream("/config/scripts/my-script.groovy");
|
||||
OutputStream output = Files.newOutputStream(scripts.resolve("my-script.groovy"))) {
|
||||
Streams.copy(stream, output);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
//Set path so ScriptService will pick up the test scripts
|
||||
return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put("path.conf", configDir.toString()).build();
|
||||
}
|
||||
|
||||
public void testScriptTransform() throws Exception {
|
||||
final WatcherScript script;
|
||||
if (randomBoolean()) {
|
||||
logger.info("testing script transform with an inline script");
|
||||
script = WatcherScript.inline("return [key3 : ctx.payload.key1 + ctx.payload.key2]").lang("groovy").build();
|
||||
} else if (randomBoolean()) {
|
||||
logger.info("testing script transform with an indexed script");
|
||||
client().admin().cluster().preparePutStoredScript()
|
||||
.setId("_id")
|
||||
.setScriptLang("groovy")
|
||||
.setSource(new BytesArray("{\"script\" : \"return [key3 : ctx.payload.key1 + ctx.payload.key2]\"}"))
|
||||
.get();
|
||||
script = WatcherScript.indexed("_id").lang("groovy").build();
|
||||
} else {
|
||||
logger.info("testing script transform with a file script");
|
||||
script = WatcherScript.file("my-script").lang("groovy").build();
|
||||
}
|
||||
|
||||
// put a watch that has watch level transform:
|
||||
PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id1")
|
||||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(simpleInput(MapBuilder.<String, Object>newMapBuilder().put("key1", 10).put("key2", 10)))
|
||||
.condition(alwaysCondition())
|
||||
.transform(scriptTransform(script))
|
||||
.addAction("_id", indexAction("output1", "type")))
|
||||
.get();
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
// put a watch that has a action level transform:
|
||||
putWatchResponse = watcherClient().preparePutWatch("_id2")
|
||||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(simpleInput(MapBuilder.<String, Object>newMapBuilder().put("key1", 10).put("key2", 10)))
|
||||
.condition(alwaysCondition())
|
||||
.addAction("_id", scriptTransform(script), indexAction("output2", "type")))
|
||||
.get();
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().scheduler().trigger("_id1");
|
||||
timeWarp().scheduler().trigger("_id2");
|
||||
refresh();
|
||||
}
|
||||
|
||||
assertWatchWithMinimumPerformedActionsCount("_id1", 1, false);
|
||||
assertWatchWithMinimumPerformedActionsCount("_id2", 1, false);
|
||||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch("output1").get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
|
||||
assertThat(response.getHits().getAt(0).sourceAsMap().size(), equalTo(1));
|
||||
assertThat(response.getHits().getAt(0).sourceAsMap().get("key3").toString(), equalTo("20"));
|
||||
|
||||
response = client().prepareSearch("output2").get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
|
||||
assertThat(response.getHits().getAt(0).sourceAsMap().size(), equalTo(1));
|
||||
assertThat(response.getHits().getAt(0).sourceAsMap().get("key3").toString(), equalTo("20"));
|
||||
}
|
||||
|
||||
public void testSearchTransform() throws Exception {
|
||||
createIndex("my-condition-index", "my-payload-index");
|
||||
ensureGreen("my-condition-index", "my-payload-index");
|
||||
|
||||
index("my-payload-index", "payload", "mytestresult");
|
||||
refresh();
|
||||
|
||||
SearchRequest inputRequest = WatcherTestUtils.newInputSearchRequest("my-condition-index")
|
||||
.source(searchSource().query(matchAllQuery()));
|
||||
SearchRequest transformRequest = WatcherTestUtils.newInputSearchRequest("my-payload-index")
|
||||
.source(searchSource().query(matchAllQuery()));
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id1")
|
||||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(searchInput(inputRequest))
|
||||
.transform(searchTransform(transformRequest))
|
||||
.addAction("_id", indexAction("output1", "result"))
|
||||
).get();
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
putWatchResponse = watcherClient().preparePutWatch("_id2")
|
||||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(searchInput(inputRequest))
|
||||
.addAction("_id", searchTransform(transformRequest), indexAction("output2", "result"))
|
||||
).get();
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().scheduler().trigger("_id1");
|
||||
timeWarp().scheduler().trigger("_id2");
|
||||
refresh();
|
||||
}
|
||||
|
||||
assertWatchWithMinimumPerformedActionsCount("_id1", 1, false);
|
||||
assertWatchWithMinimumPerformedActionsCount("_id2", 1, false);
|
||||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch("output1").get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
|
||||
assertThat(response.getHits().getAt(0).sourceAsString(), containsString("mytestresult"));
|
||||
|
||||
response = client().prepareSearch("output2").get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
|
||||
assertThat(response.getHits().getAt(0).sourceAsString(), containsString("mytestresult"));
|
||||
}
|
||||
|
||||
public void testChainTransform() throws Exception {
|
||||
final WatcherScript script1 = WatcherScript.inline("return [key3 : ctx.payload.key1 + ctx.payload.key2]").lang("groovy").build();
|
||||
final WatcherScript script2 = WatcherScript.inline("return [key4 : ctx.payload.key3 + 10]").lang("groovy").build();
|
||||
// put a watch that has watch level transform:
|
||||
PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id1")
|
||||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(simpleInput(MapBuilder.<String, Object>newMapBuilder().put("key1", 10).put("key2", 10)))
|
||||
.condition(alwaysCondition())
|
||||
.transform(chainTransform(scriptTransform(script1), scriptTransform(script2)))
|
||||
.addAction("_id", indexAction("output1", "type")))
|
||||
.get();
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
// put a watch that has a action level transform:
|
||||
putWatchResponse = watcherClient().preparePutWatch("_id2")
|
||||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(simpleInput(MapBuilder.<String, Object>newMapBuilder().put("key1", 10).put("key2", 10)))
|
||||
.condition(alwaysCondition())
|
||||
.addAction("_id", chainTransform(scriptTransform(script1), scriptTransform(script2)),
|
||||
indexAction("output2", "type")))
|
||||
.get();
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().scheduler().trigger("_id1");
|
||||
timeWarp().scheduler().trigger("_id2");
|
||||
refresh();
|
||||
}
|
||||
|
||||
assertWatchWithMinimumPerformedActionsCount("_id1", 1, false);
|
||||
assertWatchWithMinimumPerformedActionsCount("_id2", 1, false);
|
||||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch("output1").get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
|
||||
assertThat(response.getHits().getAt(0).sourceAsMap().size(), equalTo(1));
|
||||
assertThat(response.getHits().getAt(0).sourceAsMap().get("key4").toString(), equalTo("30"));
|
||||
|
||||
response = client().prepareSearch("output2").get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
|
||||
assertThat(response.getHits().getAt(0).sourceAsMap().size(), equalTo(1));
|
||||
assertThat(response.getHits().getAt(0).sourceAsMap().get("key4").toString(), equalTo("30"));
|
||||
}
|
||||
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
|
||||
/*
|
||||
* Messy tests that depend on mustache directly. Fix these!
|
||||
*/
|
||||
|
||||
apply plugin: 'elasticsearch.messy-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: ':x-plugins:elasticsearch:x-pack', configuration: 'testArtifacts')
|
||||
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
|
||||
}
|
|
@ -1,393 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.mustache.MustachePlugin;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.xpack.common.text.TextTemplate;
|
||||
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
|
||||
import org.elasticsearch.xpack.watcher.actions.ExecutableActions;
|
||||
import org.elasticsearch.xpack.watcher.condition.always.ExecutableAlwaysCondition;
|
||||
import org.elasticsearch.xpack.watcher.execution.TriggeredExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.input.Input;
|
||||
import org.elasticsearch.xpack.watcher.input.search.ExecutableSearchInput;
|
||||
import org.elasticsearch.xpack.watcher.input.search.SearchInput;
|
||||
import org.elasticsearch.xpack.watcher.input.search.SearchInputFactory;
|
||||
import org.elasticsearch.xpack.watcher.input.simple.ExecutableSimpleInput;
|
||||
import org.elasticsearch.xpack.watcher.input.simple.SimpleInput;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherScript;
|
||||
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
|
||||
import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest;
|
||||
import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateService;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
|
||||
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.elasticsearch.xpack.watcher.watch.Watch;
|
||||
import org.elasticsearch.xpack.watcher.watch.WatchStatus;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.chrono.ISOChronology;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.getRandomSupportedSearchType;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, randomDynamicTemplates = false, supportsDedicatedMasters = false,
|
||||
numDataNodes = 1)
|
||||
public class SearchInputIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
Collection<Class<? extends Plugin>> types = new ArrayList<>();
|
||||
types.addAll(super.nodePlugins());
|
||||
types.add(MustachePlugin.class);
|
||||
types.add(CustomScriptContextPlugin.class);
|
||||
return types;
|
||||
}
|
||||
|
||||
private static final String TEMPLATE_QUERY = "{\"query\":{\"bool\":{\"must\":{\"match\":{\"event_type\":{\"query\":\"a\"," +
|
||||
"\"type\":\"boolean\"}}},\"filter\":{\"range\":{\"_timestamp\":" +
|
||||
"{\"from\":\"{{ctx.trigger.scheduled_time}}||-{{seconds_param}}\",\"to\":\"{{ctx.trigger.scheduled_time}}\"," +
|
||||
"\"include_lower\":true,\"include_upper\":true}}}}}}";
|
||||
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
final Path tempDir = createTempDir();
|
||||
final Path configPath = tempDir.resolve("config");
|
||||
final Path scriptPath = configPath.resolve("scripts");
|
||||
try {
|
||||
Files.createDirectories(scriptPath);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("failed to create config dir");
|
||||
|
||||
}
|
||||
try (InputStream stream = SearchInputIT.class.getResourceAsStream("/org/elasticsearch/xpack/watcher/input/search/config/scripts" +
|
||||
"/test_disk_template.mustache");
|
||||
OutputStream out = Files.newOutputStream(scriptPath.resolve("test_disk_template.mustache"))) {
|
||||
Streams.copy(stream, out);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("failed to copy mustache template");
|
||||
}
|
||||
|
||||
|
||||
//Set path so ScriptService will pick up the test scripts
|
||||
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
|
||||
.put("path.conf", configPath).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings transportClientSettings() {
|
||||
return Settings.builder()
|
||||
.put(super.transportClientSettings())
|
||||
.build();
|
||||
}
|
||||
|
||||
public void testExecute() throws Exception {
|
||||
SearchSourceBuilder searchSourceBuilder = searchSource().query(
|
||||
boolQuery().must(matchQuery("event_type", "a")).must(rangeQuery("_timestamp")
|
||||
.from("{{ctx.trigger.scheduled_time}}||-30s").to("{{ctx.trigger.triggered_time}}")));
|
||||
SearchRequest searchRequest = client()
|
||||
.prepareSearch()
|
||||
.setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.request()
|
||||
.source(searchSourceBuilder);
|
||||
|
||||
WatcherSearchTemplateRequest request = new WatcherSearchTemplateRequest(searchRequest);
|
||||
ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), logger,
|
||||
WatcherClientProxy.of(client()), watcherSearchTemplateService(), null);
|
||||
WatchExecutionContext ctx = new TriggeredExecutionContext(
|
||||
new Watch("test-watch",
|
||||
new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))),
|
||||
new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger),
|
||||
new ExecutableAlwaysCondition(logger),
|
||||
null,
|
||||
null,
|
||||
new ExecutableActions(new ArrayList<ActionWrapper>()),
|
||||
null,
|
||||
new WatchStatus(new DateTime(0, UTC), emptyMap())),
|
||||
new DateTime(0, UTC),
|
||||
new ScheduleTriggerEvent("test-watch", new DateTime(0, UTC), new DateTime(0, UTC)),
|
||||
timeValueSeconds(5));
|
||||
SearchInput.Result result = searchInput.execute(ctx, new Payload.Simple());
|
||||
|
||||
assertThat(XContentMapValues.extractValue("hits.total", result.payload().data()), equalTo(0));
|
||||
assertNotNull(result.executedRequest());
|
||||
assertThat(result.status(), is(Input.Result.Status.SUCCESS));
|
||||
assertEquals(result.executedRequest().searchType(), request.getRequest().searchType());
|
||||
assertArrayEquals(result.executedRequest().indices(), request.getRequest().indices());
|
||||
assertEquals(result.executedRequest().indicesOptions(), request.getRequest().indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(result);
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.from"), equalTo("1970-01-01T00:00:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.to"), equalTo("1970-01-01T00:00:00.000Z"));
|
||||
}
|
||||
|
||||
public void testSearchInlineTemplate() throws Exception {
|
||||
WatchExecutionContext ctx = createContext();
|
||||
|
||||
Map<String, Object> triggerParams = new HashMap<String, Object>();
|
||||
triggerParams.put("triggered_time", new DateTime(1970, 01, 01, 00, 01, 00, 000, ISOChronology.getInstanceUTC()));
|
||||
triggerParams.put("scheduled_time", new DateTime(1970, 01, 01, 00, 01, 00, 000, ISOChronology.getInstanceUTC()));
|
||||
Map<String, Object> ctxParams = new HashMap<String, Object>();
|
||||
ctxParams.put("id", ctx.id().value());
|
||||
ctxParams.put("metadata", null);
|
||||
ctxParams.put("vars", new HashMap<String, Object>());
|
||||
ctxParams.put("watch_id", "test-watch");
|
||||
ctxParams.put("trigger", triggerParams);
|
||||
ctxParams.put("payload", new Payload.Simple().data());
|
||||
ctxParams.put("execution_time", new DateTime(1970, 01, 01, 00, 01, 00, 000, ISOChronology.getInstanceUTC()));
|
||||
Map<String, Object> expectedParams = new HashMap<String, Object>();
|
||||
expectedParams.put("seconds_param", "30s");
|
||||
expectedParams.put("ctx", ctxParams);
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("seconds_param", "30s");
|
||||
|
||||
WatcherScript template = WatcherScript.inline(TEMPLATE_QUERY).lang("mustache").params(params).build();
|
||||
|
||||
SearchRequest request = client().prepareSearch()
|
||||
.setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.setIndices("test-search-index").request();
|
||||
|
||||
SearchInput.Result executedResult = executeSearchInput(request, template, ctx);
|
||||
|
||||
assertNotNull(executedResult.executedRequest());
|
||||
assertThat(executedResult.status(), is(Input.Result.Status.SUCCESS));
|
||||
if (getNumShards("test-search-index").numPrimaries > 1) {
|
||||
assertEquals(executedResult.executedRequest().searchType(), request.searchType());
|
||||
}
|
||||
assertArrayEquals(executedResult.executedRequest().indices(), request.indices());
|
||||
assertEquals(executedResult.executedRequest().indicesOptions(), request.indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(executedResult);
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.from"), equalTo("1970-01-01T00:01:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.to"), equalTo("1970-01-01T00:01:00.000Z"));
|
||||
}
|
||||
|
||||
public void testSearchIndexedTemplate() throws Exception {
|
||||
WatchExecutionContext ctx = createContext();
|
||||
|
||||
PutStoredScriptRequest indexedScriptRequest = client().admin().cluster().preparePutStoredScript()
|
||||
.setId("test-template")
|
||||
.setScriptLang("mustache")
|
||||
.setSource(new BytesArray(TEMPLATE_QUERY))
|
||||
.request();
|
||||
assertThat(client().admin().cluster().putStoredScript(indexedScriptRequest).actionGet().isAcknowledged(), is(true));
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("seconds_param", "30s");
|
||||
|
||||
WatcherScript template = WatcherScript.indexed("test-template").lang("mustache").params(params).build();
|
||||
|
||||
jsonBuilder().value(TextTemplate.indexed("test-template").params(params).build()).bytes();
|
||||
SearchRequest request = client().prepareSearch().setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.setIndices("test-search-index").request();
|
||||
|
||||
SearchInput.Result executedResult = executeSearchInput(request, template, ctx);
|
||||
|
||||
assertNotNull(executedResult.executedRequest());
|
||||
assertThat(executedResult.status(), is(Input.Result.Status.SUCCESS));
|
||||
if (getNumShards("test-search-index").numPrimaries > 1) {
|
||||
assertEquals(executedResult.executedRequest().searchType(), request.searchType());
|
||||
}
|
||||
assertArrayEquals(executedResult.executedRequest().indices(), request.indices());
|
||||
assertEquals(executedResult.executedRequest().indicesOptions(), request.indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(executedResult);
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.from"), equalTo("1970-01-01T00:01:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.filter.0.range._timestamp.to"), equalTo("1970-01-01T00:01:00.000Z"));
|
||||
|
||||
}
|
||||
|
||||
public void testSearchOnDiskTemplate() throws Exception {
|
||||
WatchExecutionContext ctx = createContext();
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("seconds_param", "30s");
|
||||
|
||||
WatcherScript template = WatcherScript.file("test_disk_template").lang("mustache").params(params).build();
|
||||
SearchRequest request = client().prepareSearch().setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.setIndices("test-search-index").request();
|
||||
|
||||
SearchInput.Result executedResult = executeSearchInput(request, template, ctx);
|
||||
|
||||
assertNotNull(executedResult.executedRequest());
|
||||
assertThat(executedResult.status(), is(Input.Result.Status.SUCCESS));
|
||||
assertArrayEquals(executedResult.executedRequest().indices(), request.indices());
|
||||
assertEquals(executedResult.executedRequest().indicesOptions(), request.indicesOptions());
|
||||
}
|
||||
|
||||
public void testDifferentSearchType() throws Exception {
|
||||
SearchSourceBuilder searchSourceBuilder = searchSource().query(
|
||||
boolQuery().must(matchQuery("event_type", "a")).must(rangeQuery("_timestamp")
|
||||
.from("{{ctx.trigger.scheduled_time}}||-30s").to("{{ctx.trigger.triggered_time}}"))
|
||||
);
|
||||
SearchType searchType = getRandomSupportedSearchType();
|
||||
|
||||
SearchRequest searchRequest = client()
|
||||
.prepareSearch()
|
||||
.setSearchType(searchType)
|
||||
.request()
|
||||
.source(searchSourceBuilder);
|
||||
|
||||
WatcherSearchTemplateRequest request = new WatcherSearchTemplateRequest(searchRequest);
|
||||
|
||||
ExecutableSearchInput searchInput = new ExecutableSearchInput(new SearchInput(request, null, null, null), logger,
|
||||
WatcherClientProxy.of(client()), watcherSearchTemplateService(), null);
|
||||
WatchExecutionContext ctx = new TriggeredExecutionContext(
|
||||
new Watch("test-watch",
|
||||
new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))),
|
||||
new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger),
|
||||
new ExecutableAlwaysCondition(logger),
|
||||
null,
|
||||
null,
|
||||
new ExecutableActions(new ArrayList<ActionWrapper>()),
|
||||
null,
|
||||
new WatchStatus(new DateTime(0, UTC), emptyMap())),
|
||||
new DateTime(0, UTC),
|
||||
new ScheduleTriggerEvent("test-watch", new DateTime(0, UTC), new DateTime(0, UTC)),
|
||||
timeValueSeconds(5));
|
||||
SearchInput.Result result = searchInput.execute(ctx, new Payload.Simple());
|
||||
|
||||
assertThat(XContentMapValues.extractValue("hits.total", result.payload().data()), equalTo(0));
|
||||
assertNotNull(result.executedRequest());
|
||||
assertThat(result.status(), is(Input.Result.Status.SUCCESS));
|
||||
assertEquals(result.executedRequest().searchType(), searchType);
|
||||
assertArrayEquals(result.executedRequest().indices(), searchRequest.indices());
|
||||
assertEquals(result.executedRequest().indicesOptions(), searchRequest.indicesOptions());
|
||||
|
||||
XContentSource source = toXContentSource(result);
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.from"), equalTo("1970-01-01T00:00:00.000Z||-30s"));
|
||||
assertThat(source.getValue("query.bool.must.1.range._timestamp.to"), equalTo("1970-01-01T00:00:00.000Z"));
|
||||
}
|
||||
|
||||
public void testParserValid() throws Exception {
|
||||
SearchRequest searchRequest = client().prepareSearch()
|
||||
.setSearchType(ExecutableSearchInput.DEFAULT_SEARCH_TYPE)
|
||||
.request()
|
||||
.source(searchSource()
|
||||
.query(boolQuery().must(matchQuery("event_type", "a")).must(rangeQuery("_timestamp")
|
||||
.from("{{ctx.trigger.scheduled_time}}||-30s").to("{{ctx.trigger.triggered_time}}"))));
|
||||
|
||||
TimeValue timeout = randomBoolean() ? TimeValue.timeValueSeconds(randomInt(10)) : null;
|
||||
XContentBuilder builder = jsonBuilder().value(
|
||||
new SearchInput(new WatcherSearchTemplateRequest(searchRequest), null, timeout, null));
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
IndicesQueriesRegistry indicesQueryRegistry = internalCluster().getInstance(IndicesQueriesRegistry.class);
|
||||
SearchInputFactory factory = new SearchInputFactory(Settings.EMPTY, WatcherClientProxy.of(client()), indicesQueryRegistry,
|
||||
null, null, scriptService());
|
||||
|
||||
SearchInput searchInput = factory.parseInput("_id", parser);
|
||||
assertEquals(SearchInput.TYPE, searchInput.type());
|
||||
assertThat(searchInput.getTimeout(), equalTo(timeout));
|
||||
}
|
||||
|
||||
private WatchExecutionContext createContext() {
|
||||
return new TriggeredExecutionContext(
|
||||
new Watch("test-watch",
|
||||
new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))),
|
||||
new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger),
|
||||
new ExecutableAlwaysCondition(logger),
|
||||
null,
|
||||
null,
|
||||
new ExecutableActions(new ArrayList<ActionWrapper>()),
|
||||
null,
|
||||
new WatchStatus(new DateTime(50000, UTC), emptyMap())),
|
||||
new DateTime(60000, UTC),
|
||||
new ScheduleTriggerEvent("test-watch", new DateTime(60000, UTC), new DateTime(60000, UTC)),
|
||||
timeValueSeconds(5));
|
||||
}
|
||||
|
||||
private SearchInput.Result executeSearchInput(SearchRequest request, WatcherScript template,
|
||||
WatchExecutionContext ctx) throws IOException {
|
||||
createIndex("test-search-index");
|
||||
ensureGreen("test-search-index");
|
||||
SearchInput.Builder siBuilder = SearchInput.builder(new WatcherSearchTemplateRequest(request, template));
|
||||
|
||||
SearchInput si = siBuilder.build();
|
||||
|
||||
ExecutableSearchInput searchInput = new ExecutableSearchInput(si, logger, WatcherClientProxy.of(client()),
|
||||
watcherSearchTemplateService(), null);
|
||||
return searchInput.execute(ctx, new Payload.Simple());
|
||||
}
|
||||
|
||||
protected WatcherSearchTemplateService watcherSearchTemplateService() {
|
||||
String master = internalCluster().getMasterName();
|
||||
return new WatcherSearchTemplateService(internalCluster().clusterService(master).getSettings(),
|
||||
internalCluster().getInstance(ScriptService.class, master),
|
||||
internalCluster().getInstance(IndicesQueriesRegistry.class, master),
|
||||
internalCluster().getInstance(AggregatorParsers.class, master),
|
||||
internalCluster().getInstance(Suggesters.class, master)
|
||||
);
|
||||
}
|
||||
|
||||
protected ScriptService scriptService() {
|
||||
return internalCluster().getInstance(ScriptService.class);
|
||||
}
|
||||
|
||||
private XContentSource toXContentSource(SearchInput.Result result) throws IOException {
|
||||
try (XContentBuilder builder = jsonBuilder()) {
|
||||
result.executedRequest().source().toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
return new XContentSource(builder);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom plugin that registers XPack script context.
|
||||
*/
|
||||
public static class CustomScriptContextPlugin extends Plugin implements ScriptPlugin {
|
||||
|
||||
@Override
|
||||
public ScriptContext.Plugin getCustomScriptContexts() {
|
||||
return WatcherScript.CTX_PLUGIN;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This package contains tests that use mustache to test what looks
|
||||
* to be unrelated functionality, or functionality that should be
|
||||
* tested with a mock instead. Instead of doing an epic battle
|
||||
* with these tests, they are temporarily moved here to the mustache
|
||||
* module's tests, but that is likely not where they belong. Please
|
||||
* help by cleaning them up and we can remove this package!
|
||||
*
|
||||
* <ul>
|
||||
* <li>If the test is testing templating integration with another core subsystem,
|
||||
* fix it to use a mock instead, so it can be in the core tests again</li>
|
||||
* <li>If the test is just being lazy, and does not really need templating to test
|
||||
* something, clean it up!</li>
|
||||
* </ul>
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must" : [
|
||||
{
|
||||
"match": {
|
||||
"event_type": {
|
||||
"query": "a",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"_timestamp": {
|
||||
"from": "{{ctx.trigger.scheduled_time}}||-{{seconds_param}}",
|
||||
"to": "{{ctx.trigger.scheduled_time}}",
|
||||
"include_lower": true,
|
||||
"include_upper": true
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must" : [
|
||||
{
|
||||
"match": {
|
||||
"event_type": {
|
||||
"query": "a",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"_timestamp": {
|
||||
"from": "{{ctx.trigger.scheduled_time}}||-{{seconds_param}}",
|
||||
"to": "{{ctx.trigger.scheduled_time}}",
|
||||
"include_lower": true,
|
||||
"include_upper": true
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -8,6 +8,8 @@ integTest {
|
|||
cluster {
|
||||
setting 'script.inline', 'true'
|
||||
plugin ':x-plugins:elasticsearch:x-pack'
|
||||
// Whitelist reindexing from the local node so we can test it.
|
||||
setting 'reindex.remote.whitelist', 'myself'
|
||||
extraConfigFile 'x-pack/roles.yml', 'roles.yml'
|
||||
[
|
||||
test_admin: 'superuser',
|
||||
|
|
|
@ -10,6 +10,8 @@ admin:
|
|||
# Search and write on both source and destination indices. It should work if you could just search on the source and
|
||||
# write to the destination but that isn't how security works.
|
||||
minimal:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: source
|
||||
privileges:
|
||||
|
@ -26,18 +28,24 @@ minimal:
|
|||
|
||||
# Read only operations on indices
|
||||
readonly:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: '*'
|
||||
privileges: [ read ]
|
||||
|
||||
# Write operations on destination index, none on source index
|
||||
dest_only:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: dest
|
||||
privileges: [ write ]
|
||||
|
||||
# Search and write on both source and destination indices with document level security filtering out some docs.
|
||||
can_not_see_hidden_docs:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: source
|
||||
privileges:
|
||||
|
@ -59,6 +67,8 @@ can_not_see_hidden_docs:
|
|||
|
||||
# Search and write on both source and destination indices with field level security.
|
||||
can_not_see_hidden_fields:
|
||||
cluster:
|
||||
- cluster:monitor/main
|
||||
indices:
|
||||
- names: source
|
||||
privileges:
|
||||
|
|
|
@ -7,28 +7,29 @@ package org.elasticsearch.xpack.security;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
public class RestIT extends ESRestTestCase {
|
||||
public class ReindexWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
private static final String USER = "test_admin";
|
||||
private static final String PASS = "changeme";
|
||||
|
||||
public RestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public ReindexWithSecurityClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
/**
|
|
@ -147,10 +147,9 @@
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
headers: {es-security-runas-user: dest_only_user}
|
||||
headers: {es-security-runas-user: minimal_user}
|
||||
catch: forbidden
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
index: source
|
||||
|
|
|
@ -0,0 +1,418 @@
|
|||
---
|
||||
"Reindex from remote as superuser works":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: test_admin
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
- match: {created: 1}
|
||||
|
||||
---
|
||||
"Reindex from remote searching as user with minimal privileges works":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: minimal_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
- match: {created: 1}
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
text: test
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
---
|
||||
"Reindex from remote reading as readonly user works when the indexing user is allowed to index":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: readonly_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
text: test
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
---
|
||||
"Reindex from remote as user that can't read from the source is forbidden":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
catch: forbidden
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: dest_only_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
|
||||
---
|
||||
"Using a script to write to an index to which you don't have access is forbidden even if you read as a superuser":
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: tweet
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: tweet
|
||||
id: 2
|
||||
body: { "user": "another" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
headers: {es-security-runas-user: minimal_user}
|
||||
catch: forbidden
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: test_admin
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
script:
|
||||
inline: if (ctx._source.user == "kimchy") {ctx._index = 'other_dest'}
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# The index to which the user tried the unauthorized write didn't even get created
|
||||
- do:
|
||||
catch: missing
|
||||
search:
|
||||
index: other_dest
|
||||
|
||||
# Even the authorized index won't have made it because it was in the same batch as the unauthorized one.
|
||||
# If there had been lots of documents being copied then some might have made it into the authorized index.
|
||||
- do:
|
||||
catch: missing
|
||||
search:
|
||||
index: dest
|
||||
|
||||
---
|
||||
"Reindex from remote misses hidden docs":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 2
|
||||
body: { "text": "test", "hidden": true }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: can_not_see_hidden_docs_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
- match: {created: 1}
|
||||
|
||||
# We copied just one doc, presumably the one without the hidden field
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
text: test
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
# We didn't copy the doc with the hidden field
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
hidden: true
|
||||
- match: { hits.total: 0 }
|
||||
|
||||
---
|
||||
"Reindex misses hidden fields":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test", "foo": "z", "bar": "z" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
reindex:
|
||||
refresh: true
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: can_not_see_hidden_fields_user
|
||||
password: changeme
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
- match: {created: 1}
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
foo: z
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
bar: z
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: dest
|
||||
body:
|
||||
query:
|
||||
match:
|
||||
text: test
|
||||
- match: { hits.total: 0 }
|
||||
|
||||
|
||||
---
|
||||
"Reindex from remote with bad password is unauthorized":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
catch: unauthorized
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
username: test_admin
|
||||
password: badpass
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
||||
|
||||
---
|
||||
"Reindex from remote with no username or password is unauthorized":
|
||||
- skip:
|
||||
features: catch_unauthorized
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Fetch the http host. We use the host of the master because we know there will always be a master.
|
||||
- do:
|
||||
cluster.state: {}
|
||||
- set: { master_node: master }
|
||||
- do:
|
||||
nodes.info:
|
||||
metric: [ http ]
|
||||
- is_true: nodes.$master.http.publish_address
|
||||
- set: {nodes.$master.http.publish_address: host}
|
||||
- do:
|
||||
catch: unauthorized
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
remote:
|
||||
host: http://${host}
|
||||
index: source
|
||||
dest:
|
||||
index: dest
|
|
@ -10,10 +10,11 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
|||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.transport.MockTransportClient;
|
||||
import org.elasticsearch.xpack.XPackTransportClient;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -39,6 +40,7 @@ public class SecurityTransportClientIT extends ESIntegTestCase {
|
|||
protected Settings externalClusterClientSettings() {
|
||||
return Settings.builder()
|
||||
.put(Security.USER_SETTING.getKey(), ADMIN_USER_PW)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom("security3", "security4"))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
@ -116,6 +118,6 @@ public class SecurityTransportClientIT extends ESIntegTestCase {
|
|||
.put("cluster.name", clusterName)
|
||||
.build();
|
||||
|
||||
return new MockTransportClient(settings, XPackPlugin.class).addTransportAddress(publishAddress);
|
||||
return new XPackTransportClient(settings).addTransportAddress(publishAddress);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
|
@ -22,7 +23,6 @@ import org.elasticsearch.test.ESIntegTestCase;
|
|||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackTransportClient;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -39,6 +39,7 @@ public class CustomRealmIT extends ESIntegTestCase {
|
|||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + "." + CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER)
|
||||
.put(ThreadContext.PREFIX + "." + CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom("security3", "security4"))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -7,31 +7,31 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
|
||||
public class GraphWithSecurityIT extends ESRestTestCase {
|
||||
public class GraphWithSecurityIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
private static final String TEST_ADMIN_USERNAME = "test_admin";
|
||||
private static final String TEST_ADMIN_PASSWORD = "changeme";
|
||||
|
||||
public GraphWithSecurityIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public GraphWithSecurityIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
protected String[] getCredentials() {
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
package org.elasticsearch.smoketest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -14,7 +15,7 @@ import static org.hamcrest.Matchers.containsString;
|
|||
|
||||
public class GraphWithSecurityInsufficientRoleIT extends GraphWithSecurityIT {
|
||||
|
||||
public GraphWithSecurityInsufficientRoleIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public GraphWithSecurityInsufficientRoleIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
|
|
|
@ -7,27 +7,28 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class MonitoringWithSecurityInsufficientRoleIT extends ESRestTestCase {
|
||||
public class SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public MonitoringWithSecurityInsufficientRoleIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Override
|
|
@ -7,26 +7,27 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
public class MonitoringWithSecurityIT extends ESRestTestCase {
|
||||
public class SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public MonitoringWithSecurityIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Override
|
|
@ -154,12 +154,12 @@ project.rootProject.subprojects.findAll { it.path.startsWith(':plugins:') }.each
|
|||
integTest {
|
||||
cluster {
|
||||
setting 'xpack.monitoring.collection.interval', '3s'
|
||||
setting 'xpack.monitoring.collection.exporters._http.type', 'http'
|
||||
setting 'xpack.monitoring.collection.exporters._http.enabled', 'false'
|
||||
setting 'xpack.monitoring.collection.exporters._http.ssl.truststore.path', clientKeyStore.name
|
||||
setting 'xpack.monitoring.collection.exporters._http.ssl.truststore.password', 'keypass'
|
||||
setting 'xpack.monitoring.collection.exporters._http.auth.username', 'monitoring_agent'
|
||||
setting 'xpack.monitoring.collection.exporters._http.auth.password', 'changeme'
|
||||
setting 'xpack.monitoring.exporters._http.type', 'http'
|
||||
setting 'xpack.monitoring.exporters._http.enabled', 'false'
|
||||
setting 'xpack.monitoring.exporters._http.ssl.truststore.path', clientKeyStore.name
|
||||
setting 'xpack.monitoring.exporters._http.ssl.truststore.password', 'keypass'
|
||||
setting 'xpack.monitoring.exporters._http.auth.username', 'monitoring_agent'
|
||||
setting 'xpack.monitoring.exporters._http.auth.password', 'changeme'
|
||||
|
||||
setting 'xpack.security.transport.ssl.enabled', 'true'
|
||||
setting 'xpack.security.http.ssl.enabled', 'true'
|
||||
|
|
|
@ -47,6 +47,15 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
|||
*/
|
||||
public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
|
||||
|
||||
private boolean useSecurity3;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
useSecurity3 = randomBoolean();
|
||||
}
|
||||
|
||||
private static final String USER = "test_user";
|
||||
private static final String PASS = "changeme";
|
||||
private static final String KEYSTORE_PASS = "keypass";
|
||||
|
@ -59,13 +68,18 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected Settings externalClusterClientSettings() {
|
||||
return Settings.builder()
|
||||
final Settings.Builder builder =
|
||||
Settings.builder()
|
||||
.put(Security.USER_SETTING.getKey(), USER + ":" + PASS)
|
||||
.put(SecurityNetty3Transport.SSL_SETTING.getKey(), true)
|
||||
.put("xpack.security.ssl.keystore.path", clientKeyStore)
|
||||
.put("xpack.security.ssl.keystore.password", KEYSTORE_PASS)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, Security.NAME)
|
||||
.build();
|
||||
.put("xpack.security.ssl.keystore.password", KEYSTORE_PASS);
|
||||
if (useSecurity3) {
|
||||
builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Security.NAME3);
|
||||
} else {
|
||||
builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Security.NAME4);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -74,8 +88,8 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
|
|||
URI uri = new URI("https", null, httpAddress.getHostString(), httpAddress.getPort(), "/", null, null);
|
||||
|
||||
Settings exporterSettings = Settings.builder()
|
||||
.put("xpack.monitoring.collection.exporters._http.enabled", true)
|
||||
.put("xpack.monitoring.collection.exporters._http.host", uri.toString())
|
||||
.put("xpack.monitoring.exporters._http.enabled", true)
|
||||
.put("xpack.monitoring.exporters._http.host", uri.toString())
|
||||
.build();
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(exporterSettings));
|
||||
}
|
||||
|
@ -83,8 +97,8 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
|
|||
@After
|
||||
public void disableExporter() {
|
||||
Settings exporterSettings = Settings.builder()
|
||||
.putNull("xpack.monitoring.collection.exporters._http.enabled")
|
||||
.putNull("xpack.monitoring.collection.exporters._http.host")
|
||||
.putNull("xpack.monitoring.exporters._http.enabled")
|
||||
.putNull("xpack.monitoring.exporters._http.host")
|
||||
.build();
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(exporterSettings));
|
||||
}
|
||||
|
|
|
@ -7,15 +7,16 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.client.RestTestClient;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
|
@ -26,19 +27,19 @@ import java.nio.file.Path;
|
|||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
public class SmokeTestPluginsSslIT extends ESRestTestCase {
|
||||
public class SmokeTestPluginsSslClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
private static final String USER = "test_user";
|
||||
private static final String PASS = "changeme";
|
||||
private static final String KEYSTORE_PASS = "keypass";
|
||||
|
||||
public SmokeTestPluginsSslIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public SmokeTestPluginsSslClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
static Path keyStore;
|
||||
|
@ -46,7 +47,7 @@ public class SmokeTestPluginsSslIT extends ESRestTestCase {
|
|||
@BeforeClass
|
||||
public static void getKeyStore() {
|
||||
try {
|
||||
keyStore = PathUtils.get(SmokeTestPluginsSslIT.class.getResource("/test-node.jks").toURI());
|
||||
keyStore = PathUtils.get(SmokeTestPluginsSslClientYamlTestSuiteIT.class.getResource("/test-node.jks").toURI());
|
||||
} catch (URISyntaxException e) {
|
||||
throw new ElasticsearchException("exception while reading the store", e);
|
||||
}
|
||||
|
@ -65,9 +66,13 @@ public class SmokeTestPluginsSslIT extends ESRestTestCase {
|
|||
String token = basicAuthHeaderValue(USER, new SecuredString(PASS.toCharArray()));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.put(RestTestClient.PROTOCOL, "https")
|
||||
.put(RestTestClient.TRUSTSTORE_PATH, keyStore)
|
||||
.put(RestTestClient.TRUSTSTORE_PASSWORD, KEYSTORE_PASS)
|
||||
.put(ESRestTestCase.TRUSTSTORE_PATH, keyStore)
|
||||
.put(ESRestTestCase.TRUSTSTORE_PASSWORD, KEYSTORE_PASS)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getProtocol() {
|
||||
return "https";
|
||||
}
|
||||
}
|
|
@ -11,9 +11,9 @@
|
|||
metric: [ settings ]
|
||||
|
||||
- is_true: nodes
|
||||
- is_true: nodes.$master.settings.xpack.monitoring.collection.exporters._http.type
|
||||
- is_true: nodes.$master.settings.xpack.monitoring.exporters._http.type
|
||||
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.auth.username
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.auth.password
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.ssl.truststore.path
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.ssl.truststore.password
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.exporters._http.auth.username
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.exporters._http.auth.password
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.exporters._http.ssl.truststore.path
|
||||
- is_false: nodes.$master.settings.xpack.monitoring.exporters._http.ssl.truststore.password
|
||||
|
|
|
@ -7,29 +7,30 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
public class SmokeTestPluginsIT extends ESRestTestCase {
|
||||
public class XSmokeTestPluginsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
private static final String USER = "test_user";
|
||||
private static final String PASS = "changeme";
|
||||
|
||||
public SmokeTestPluginsIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public XSmokeTestPluginsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Override
|
|
@ -7,28 +7,29 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
public class RestIT extends ESRestTestCase {
|
||||
public class SmokeTestSecurityWithMustacheClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
private static final String BASIC_AUTH_VALUE = basicAuthHeaderValue("test_admin", new SecuredString("changeme".toCharArray()));
|
||||
|
||||
public RestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public SmokeTestSecurityWithMustacheClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Override
|
|
@ -7,9 +7,10 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -18,15 +19,15 @@ import java.io.IOException;
|
|||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
public abstract class WatcherRestTestCase extends ESRestTestCase {
|
||||
public abstract class WatcherRestTestCase extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public WatcherRestTestCase(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public WatcherRestTestCase(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Before
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
*/
|
||||
package org.elasticsearch.smoketest;
|
||||
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
|
||||
/** Runs rest tests against external cluster */
|
||||
public class WatcherWithGroovyIT extends WatcherRestTestCase {
|
||||
|
||||
public WatcherWithGroovyIT(RestTestCandidate testCandidate) {
|
||||
public WatcherWithGroovyIT(ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,137 @@
|
|||
---
|
||||
"Test the execution of a Groovy closure in script condition":
|
||||
|
||||
- do:
|
||||
bulk:
|
||||
refresh: true
|
||||
body: |
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "green", "@timestamp": "2005-01-01T00:00:00" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "yellow", "@timestamp": "2005-01-01T00:00:05" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "green", "@timestamp": "2005-01-01T00:00:55" }
|
||||
|
||||
- do:
|
||||
xpack.watcher.put_watch:
|
||||
id: "watch_with_groovy_closure"
|
||||
body: >
|
||||
{
|
||||
"trigger" : {
|
||||
"schedule" : { "cron" : "0 0 0 1 * ? 2099" }
|
||||
},
|
||||
"input" : {
|
||||
"search" : {
|
||||
"request" : {
|
||||
"indices" : [ ".monitoring" ],
|
||||
"body" : {
|
||||
"query" : {
|
||||
"match_all" : {
|
||||
}
|
||||
},
|
||||
"aggregations" : {
|
||||
"minutes" : {
|
||||
"date_histogram" : {
|
||||
"field" : "@timestamp",
|
||||
"interval" : "5s",
|
||||
"order" : {
|
||||
"_count" : "desc"
|
||||
}
|
||||
},
|
||||
"aggregations": {
|
||||
"status" : {
|
||||
"terms" : {
|
||||
"field" : "status.keyword",
|
||||
"size" : 3
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"condition" : {
|
||||
"script" : {
|
||||
"inline" : "if (ctx.payload.hits.total < 1) return false; def rows = ctx.payload.hits.hits; if (ctx.payload.aggregations.minutes.buckets.size() < 12) return false; def last60Seconds = ctx.payload.aggregations.minutes.buckets[-12..-1]; return last60Seconds.every { it.status.buckets.every { s -> s.key == 'red' } }",
|
||||
"lang": "groovy"
|
||||
}
|
||||
},
|
||||
"actions" : {
|
||||
"log" : {
|
||||
"logging" : {
|
||||
"text" : "executed at {{ctx.execution_time}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- match: { _id: "watch_with_groovy_closure" }
|
||||
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
id: "watch_with_groovy_closure"
|
||||
body: >
|
||||
{
|
||||
"trigger_data" : {
|
||||
"scheduled_time" : "2015-05-05T20:58:02.443Z",
|
||||
"triggered_time" : "2015-05-05T20:58:02.443Z"
|
||||
},
|
||||
"ignore_condition" : false,
|
||||
"action_modes" : {
|
||||
"_all" : "force_simulate"
|
||||
},
|
||||
"record_execution" : false
|
||||
}
|
||||
|
||||
- match: { "watch_record.state": "execution_not_needed" }
|
||||
- match: { "watch_record.result.condition.met": false }
|
||||
|
||||
- do:
|
||||
bulk:
|
||||
refresh: true
|
||||
body: |
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:00" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:05" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:10" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:15" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:20" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:25" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:30" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:35" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:40" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:45" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:50" }
|
||||
{"index": {"_index": ".monitoring", "_type": "cluster_stats"}}
|
||||
{ "status": "red", "@timestamp": "2005-01-01T00:01:55" }
|
||||
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
id: "watch_with_groovy_closure"
|
||||
body: >
|
||||
{
|
||||
"trigger_data" : {
|
||||
"scheduled_time" : "2015-05-05T20:58:02.443Z",
|
||||
"triggered_time" : "2015-05-05T20:58:02.443Z"
|
||||
},
|
||||
"ignore_condition" : false,
|
||||
"action_modes" : {
|
||||
"_all" : "force_simulate"
|
||||
},
|
||||
"record_execution" : false
|
||||
}
|
||||
|
||||
- match: { "watch_record.state": "executed" }
|
||||
- match: { "watch_record.result.condition.met": true }
|
|
@ -7,9 +7,10 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -18,15 +19,15 @@ import java.io.IOException;
|
|||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
public abstract class WatcherRestTestCase extends ESRestTestCase {
|
||||
public abstract class SmokeTestWatchesWithMustacheClientYamlTestSuiteTestCase extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public WatcherRestTestCase(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public SmokeTestWatchesWithMustacheClientYamlTestSuiteTestCase(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Before
|
|
@ -17,9 +17,8 @@ import org.elasticsearch.script.ScriptSettings;
|
|||
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.common.text.DefaultTextTemplateEngine;
|
||||
import org.elasticsearch.xpack.common.text.TextTemplate;
|
||||
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
|
||||
import org.elasticsearch.xpack.common.text.TextTemplate;
|
||||
import org.elasticsearch.xpack.watcher.support.WatcherScript;
|
||||
import org.junit.Before;
|
||||
import org.mockito.Mockito;
|
||||
|
@ -35,7 +34,7 @@ import java.util.Map;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class WatcherTemplateTests extends ESTestCase {
|
||||
public class WatcherTemplateIT extends ESTestCase {
|
||||
|
||||
private TextTemplateEngine engine;
|
||||
|
||||
|
@ -52,7 +51,7 @@ public class WatcherTemplateTests extends ESTestCase {
|
|||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, registry);
|
||||
ScriptService scriptService = new ScriptService(setting, environment, resourceWatcherService, scriptEngineRegistry,
|
||||
registry, scriptSettings);
|
||||
engine = new DefaultTextTemplateEngine(Settings.EMPTY, scriptService);
|
||||
engine = new TextTemplateEngine(Settings.EMPTY, scriptService);
|
||||
}
|
||||
|
||||
public void testEscaping() throws Exception {
|
|
@ -7,22 +7,23 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Runs rest tests against external cluster */
|
||||
public class WatcherWithMustacheIT extends WatcherRestTestCase {
|
||||
public class WatcherWithMustacheIT extends SmokeTestWatchesWithMustacheClientYamlTestSuiteTestCase {
|
||||
|
||||
public WatcherWithMustacheIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public WatcherWithMustacheIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,157 @@
|
|||
---
|
||||
setup:
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
- do: {xpack.watcher.stats:{}}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 1
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-01T00:00:00",
|
||||
"value" : "val_1"
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 2
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-02T00:00:00",
|
||||
"value" : "val_2"
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 3
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-03T00:00:00",
|
||||
"value" : "val_3"
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: idx
|
||||
type: type
|
||||
id: 4
|
||||
body: >
|
||||
{
|
||||
"date" : "2015-01-04T00:00:00",
|
||||
"value" : "val_4"
|
||||
}
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: idx
|
||||
|
||||
---
|
||||
"Test input mustache integration":
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
body: >
|
||||
{
|
||||
"trigger_data" : {
|
||||
"scheduled_time" : "2015-01-04T00:00:00"
|
||||
},
|
||||
"watch" : {
|
||||
"trigger" : { "schedule" : { "interval" : "10s" } },
|
||||
"actions" : {
|
||||
"dummy" : {
|
||||
"logging" : {
|
||||
"text" : "executed!"
|
||||
}
|
||||
}
|
||||
},
|
||||
"input" : {
|
||||
"search" : {
|
||||
"request" : {
|
||||
"indices" : "idx",
|
||||
"body" : {
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"filter" : [
|
||||
{
|
||||
"range" : {
|
||||
"date" : {
|
||||
"lte" : "{{ctx.trigger.scheduled_time}}",
|
||||
"gte" : "{{ctx.trigger.scheduled_time}}||-3d"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
- match: { "watch_record.result.input.type": "search" }
|
||||
- match: { "watch_record.result.input.status": "success" }
|
||||
- match: { "watch_record.result.input.payload.hits.total": 4 }
|
||||
# makes sure that the mustache template snippets have been resolved correctly:
|
||||
- match: { "watch_record.result.input.search.request.body.query.bool.filter.0.range.date.from": "2015-01-04T00:00:00.000Z||-3d" }
|
||||
- match: { "watch_record.result.input.search.request.body.query.bool.filter.0.range.date.to": "2015-01-04T00:00:00.000Z" }
|
||||
|
||||
---
|
||||
"Test transform mustache integration":
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
body: >
|
||||
{
|
||||
"trigger_data" : {
|
||||
"scheduled_time" : "2015-01-04T00:00:00"
|
||||
},
|
||||
"watch" : {
|
||||
"trigger" : { "schedule" : { "interval" : "10s" } },
|
||||
"input" : { "simple" : { "value" : "val_3" } },
|
||||
"actions" : {
|
||||
"dummy" : {
|
||||
"logging" : {
|
||||
"text" : "executed!"
|
||||
}
|
||||
}
|
||||
},
|
||||
"transform" : {
|
||||
"search" : {
|
||||
"request" : {
|
||||
"indices" : "idx",
|
||||
"body" : {
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"filter" : [
|
||||
{
|
||||
"range" : {
|
||||
"date" : {
|
||||
"lte" : "{{ctx.trigger.scheduled_time}}",
|
||||
"gte" : "{{ctx.trigger.scheduled_time}}||-1d"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term" : {
|
||||
"value" : "{{ctx.payload.value}}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
- match: { "watch_record.result.transform.type": "search" }
|
||||
- match: { "watch_record.result.transform.status": "success" }
|
||||
- match: { "watch_record.result.transform.payload.hits.total": 1 }
|
||||
- match: { "watch_record.result.transform.payload.hits.hits.0._id": "3" }
|
||||
# makes sure that the mustache template snippets have been resolved correctly:
|
||||
- match: { "watch_record.result.transform.search.request.body.query.bool.filter.0.range.date.from": "2015-01-04T00:00:00.000Z||-1d" }
|
||||
- match: { "watch_record.result.transform.search.request.body.query.bool.filter.0.range.date.to": "2015-01-04T00:00:00.000Z" }
|
||||
- match: { "watch_record.result.transform.search.request.body.query.bool.filter.1.term.value.value": "val_3" }
|
|
@ -7,9 +7,10 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -18,15 +19,15 @@ import java.io.IOException;
|
|||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
public abstract class WatcherRestTestCase extends ESRestTestCase {
|
||||
public abstract class WatcherRestTestCase extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public WatcherRestTestCase(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public WatcherRestTestCase(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Before
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
*/
|
||||
package org.elasticsearch.smoketest;
|
||||
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
|
||||
/** Runs rest tests against external cluster */
|
||||
public class WatcherWithPainlessIT extends WatcherRestTestCase {
|
||||
|
||||
public WatcherWithPainlessIT(RestTestCandidate testCandidate) {
|
||||
public WatcherWithPainlessIT(ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
|
|
|
@ -7,36 +7,35 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
|
||||
public class WatcherWithSecurityIT extends ESRestTestCase {
|
||||
public class SmokeTestWatcherWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
private static final String TEST_ADMIN_USERNAME = "test_admin";
|
||||
private static final String TEST_ADMIN_PASSWORD = "changeme";
|
||||
|
||||
public WatcherWithSecurityIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public SmokeTestWatcherWithSecurityClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Before
|
|
@ -7,9 +7,10 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -19,15 +20,15 @@ import static java.util.Collections.emptyList;
|
|||
import static java.util.Collections.emptyMap;
|
||||
|
||||
/** Runs rest tests against external cluster */
|
||||
public class WatcherGettingStartedIT extends ESRestTestCase {
|
||||
public class SmokeTestWatcherClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public WatcherGettingStartedIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public SmokeTestWatcherClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESRestTestCase.createParameters(0, 1);
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
||||
@Before
|
|
@ -1,440 +0,0 @@
|
|||
SOFTWARE END USER LICENSE AGREEMENT
|
||||
|
||||
READ THIS AGREEMENT CAREFULLY, WHICH CONSTITUTES A LEGALLY BINDING AGREEMENT AND GOVERNS YOUR USE OF
|
||||
ELASTICSEARCH’S PROPRIETARY SOFTWARE. BY INSTALLING AND/OR USING SUCH SOFTWARE, YOU ARE INDICATING THAT YOU AGREE TO THE
|
||||
TERMS AND CONDITIONS SET FORTH IN THIS AGREEMENT. IF YOU DO NOT AGREE WITH SUCH TERMS AND CONDITIONS, YOU MAY NOT
|
||||
INSTALL OR USE ANY OF THE SOFTWARE.
|
||||
|
||||
This END USER LICENSE AGREEMENT (this “Agreement") is entered into by and between the applicable Elasticsearch
|
||||
entity referenced in Attachment 1 hereto (“Elasticsearch”) and the person or entity (“You”) that has downloaded any of
|
||||
Elasticsearch’s proprietary software to which this Agreement is attached or in connection with which this Agreement is
|
||||
presented to You (collectively, the “Software”). This Agreement is effective upon the earlier of the date on the
|
||||
commencement of any License granted pursuant to Section 1.1. below (as applicable, the “Effective Date”).
|
||||
|
||||
1. SOFTWARE LICENSE AND RESTRICTIONS
|
||||
1.1 License Grants.
|
||||
(a) Trial Version License. Subject to the terms and conditions of this Agreement, Elasticsearch agrees to
|
||||
grant, and does hereby grant to You, for a period of thirty (30) days from the date on which You first install the
|
||||
Software (the “Trial Term”), a License to the to use the Eligible Features and Functions of the Software that are
|
||||
applicable to the Trial Version of the Software. You understand and agree that upon the expiration of a Trial Term,
|
||||
You will no longer be able to use the Software, unless you either (i) purchase a Subscription, in which case You will
|
||||
receive a License under Section 1.1(b) below to use the Eligible Features and Functions of the Software that are
|
||||
applicable to the Subscription that You purchase, (ii) undertake the Registration of Your use of the Software with
|
||||
Elasticsearch, in which case You will receive a License under Section 1.1(c) below to the Basic Version of the Software
|
||||
or (iii) obtain from Elasticsearch written consent (e-mail sufficient) to extend the Trial Term, which may be granted by
|
||||
Elasticsearch in its sole and absolute discretion.
|
||||
(b) Subscription License. Subject to the terms and conditions of this Agreement and complete payment of any and
|
||||
all applicable Subscription fees, Elasticsearch agrees to grant, and does hereby grant to You during the Subscription
|
||||
Term, and for the restricted scope of this Agreement, a License (i) to use the Eligible Features and Functions of the
|
||||
Software that are applicable to the Subscription level that You have purchased, (ii) for the number of Nodes (as defined
|
||||
in the Elasticsearch Support Services Policy) and (iii) for the specific project for which you have purchased a
|
||||
Subscription. The level of Subscription, the number of Nodes and specific project for which you have purchased such
|
||||
Subscription, are set forth on the applicable ordering document entered into by Elasticsearch and You for the purchase
|
||||
of the applicable Subscription (“Order Form”).
|
||||
(c) Basic Version License. Subject to the terms and conditions of this Agreement, and in consideration of the
|
||||
Registration of Your use the Software, Elasticsearch agrees to grant, and does hereby grant to You, for a period of one
|
||||
(1) year from the date of Registration (“Basic Term”), a License to use the Eligible Features and Functions of the
|
||||
Software that are applicable to the Basic Version of the Software.
|
||||
1.2 Reservation of Rights; Restrictions. As between Elasticsearch and You, Elasticsearch owns all right title and
|
||||
interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1
|
||||
above, no other license to the Software is granted to You by implication, estoppel or otherwise. You agree not to: (i)
|
||||
reverse engineer or decompile, decrypt, disassemble or otherwise reduce any Software or any portion thereof to
|
||||
human-readable form, except and only to the extent any such restriction is prohibited by applicable law, (ii) deploy the
|
||||
Software on more Nodes (as defined in Elasticsearch’s Support Services Policy) than are permitted under the applicable
|
||||
License grant in Section 1.1 above (iii) where You have purchased a Subscription, use the Software in connection with
|
||||
any project other than the project for which you have purchased such Subscription, as identified on the applicable Order
|
||||
Form, (iv) prepare derivative works from, modify, copy or use the Software in any manner except as expressly permitted
|
||||
in this Agreement; (v) except as expressly permitted in Section 1.1 above, transfer, sell, rent, lease, distribute,
|
||||
sublicense, loan or otherwise transfer the Software in whole or in part to any third party; (vi) except as may be
|
||||
expressly permitted on an applicable Order Form, use the Software for providing time-sharing services, any
|
||||
software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other
|
||||
service offering; (vii) circumvent the limitations on use of the Software that are imposed or preserved by any License
|
||||
Key, (viii) alter or remove any proprietary notices in the Software; or (ix) make available to any third party any
|
||||
analysis of the results of operation of the Software, including benchmarking results, without the prior written consent
|
||||
of Elasticsearch. The Software may contain or be provided with open source libraries, components, utilities and other
|
||||
open source software (collectively, “Open Source Software”), which Open Source Software may have applicable license
|
||||
terms as identified on a website designated by Elasticsearch or otherwise provided with the Software or Documentation.
|
||||
Notwithstanding anything to the contrary herein, use of the Open Source Software shall be subject to the license terms
|
||||
and conditions applicable to such Open Source Software, to the extent required by the applicable licensor (which terms
|
||||
shall not restrict the license rights granted to You hereunder, but may contain additional rights).
|
||||
1.3 Audit Rights. You agree that, unless such right is waived in writing by Elasticsearch, Elasticsearch shall have the
|
||||
right, upon fifteen (15) days’ notice to You, to audit Your use of the Software for compliance with any quantitative
|
||||
limitations on Your use of the Software that are set forth in the applicable Order Form. You agree to provide
|
||||
Elasticsearch with the necessary access to the Software to conduct such an audit either (i) remotely, or (ii) if remote
|
||||
performance is not possible, at Your facilities, during normal business hours and no more than one (1) time in any
|
||||
twelve (12) month period. In the event any such audit reveals that You have used the Software in excess of the
|
||||
applicable quantitative limitations, You agree to solely for Your internal business operations, a limited,
|
||||
non-exclusive, non-transferable, fully paid up, right and license (without the right to grant or authorize sublicenses)
|
||||
promptly pay to Elasticsearch an amount equal to the difference between the fees actually paid and the fees that You
|
||||
should have paid to remain in compliance with such quantitative limitations. This Section 1.3 shall survive for a
|
||||
period of two (2) years from the termination or expiration of this Agreement.
|
||||
1.4 Cluster Metadata. You understand and agree that once deployed, and on a daily basis, the Software may provide
|
||||
metadata to Elasticsearch about Your cluster statistics and associates that metadata with Your IP address. However, no
|
||||
other information is provided to Elasticsearch by the Software, including any information about the data You process or
|
||||
store in connection with your use of the Software. Instructions for disabling this feature are contained in the
|
||||
Software, however leaving this feature active enables Elasticsearch to gather cluster statistics and provide an improved
|
||||
level of support to You.
|
||||
|
||||
2. TERM AND TERMINATION
|
||||
2.1 Term. Unless earlier terminated under Section 2.2 below, this Agreement shall commence on the Effective Date, and
|
||||
shall continue in force for the term of the last to expire applicable license set forth in Section 1.1 above.
|
||||
2.2 Termination. Either party may, upon written notice to the other party, terminate this Agreement for material breach
|
||||
by the other party automatically and without any other formality, if such party has failed to cure such material breach
|
||||
within thirty (30) days of receiving written notice of such material breach from the non-breaching party.
|
||||
Notwithstanding the foregoing, this Agreement shall automatically terminate in the event that You intentionally breach
|
||||
the scope of the license granted in Section 1.1 of this Agreement, provided that Elasticsearch reserves the right to
|
||||
retroactively waive such automatic termination upon written notice to You.
|
||||
2.3 Post Termination or Expiration. Upon termination or expiration of this Agreement, for any reason, You shall
|
||||
promptly cease the use of the Software and Documentation and destroy (and certify to Elasticsearch in writing the fact
|
||||
of such destruction), or return to Elasticsearch, all copies of the Software and Documentation then in Your possession
|
||||
or under Your control.
|
||||
2.4 Survival. Sections 2.3, 2.4, 3, 4 and 5 (as any such Sections may be modified by Attachment 1, if applicable) shall
|
||||
survive any termination or expiration of this Agreement.
|
||||
3. LIMITED WARRANTY AND DISCLAIMER OF WARRANTIES
|
||||
3.1 Limited Performance Warranty. Subject to You purchasing a Subscription, Elasticsearch warrants that during the
|
||||
applicable Subscription Term, the Software will perform in all material respects in accordance with the Documentation.
|
||||
In the event of a breach of the foregoing warranty, Elasticsearch’s sole obligation, and Your exclusive remedy shall be
|
||||
for Elasticsearch to (i) correct any failure(s) of the Software to perform in all material respects in accordance with
|
||||
the Documentation or (ii) if Elasticsearch is unable to provide such a correction within thirty (30) days of receipt of
|
||||
notice of the applicable non-conformity, promptly refund to Customer any pre-paid, unused fees paid by You to
|
||||
Elasticsearch for the applicable Subscription. The warranty set forth in this Section 3.1 does not apply if the
|
||||
applicable Software or any portion thereof: (a) has been altered, except by or on behalf Elasticsearch; (b) has not been
|
||||
used, installed, operated, repaired, or maintained in accordance with this Agreement and/or the Documentation; (c) has
|
||||
been subjected to abnormal physical or electrical stress, misuse, negligence, or accident; or (d) is used on equipment,
|
||||
products, or systems not meeting specifications identified by Elasticsearch in the Documentation. Additionally, the
|
||||
warranties set forth herein only apply when notice of a warranty claim is provided to Elasticsearch within the
|
||||
applicable warranty period specified herein and do not apply to any bug, defect or error caused by or attributable to
|
||||
software or hardware not supplied by Elasticsearch.
|
||||
3.2 Malicious Code. Elasticsearch represents and warrants that prior to making it available for delivery to You,
|
||||
Elasticsearch will use standard industry practices including, without limitation, the use of an updated commercial
|
||||
anti-virus program, to test the Software for Malicious Code and remove any Malicious Code it discovers. In the event of
|
||||
a breach of the foregoing warranty, Elasticsearch’s sole obligation, and Your exclusive remedy shall be for
|
||||
Elasticsearch to replace the Software with Software that does not contain any Malicious Code.
|
||||
3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT
|
||||
WARRANTY OF ANY KIND, AND ELASTICSEARCH AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY
|
||||
REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW,
|
||||
ELASTICSEARCH AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF
|
||||
THE FOREGOING. FURTHER, ELASTICSEARCH DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT
|
||||
THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED.
|
||||
4. LIMITATION OF LIABILITY
|
||||
The provisions of this Section 4 apply if You have not purchased a Subscription. If you have purchased a Subscription,
|
||||
then the limitations of liability set forth in the applicable Subscription Agreement will apply in lieu of those set
|
||||
forth in this Section 4.
|
||||
4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTICSEARCH OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF
|
||||
PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT,
|
||||
SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO
|
||||
USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR
|
||||
TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH THROUGH GROSS NEGLIGENCE
|
||||
OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY OTHER LIABILITY THAT
|
||||
CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW.
|
||||
4.2 Damages Cap. IN NO EVENT SHALL ELASTICSEARCH’S OR ITS LICENSORS’ AGGREGATE, CUMULATIVE LIABILITY UNDER THIS
|
||||
AGREEMENT EXCEED ONE THOUSAND DOLLARS ($1,000).
|
||||
4.3 YOU AGREE THAT THE FOREGOING LIMITATIONS, EXCLUSIONS AND DISCLAIMERS ARE A REASONABLE ALLOCATION OF THE RISK BETWEEN
|
||||
THE PARTIES AND WILL APPLY TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, EVEN IF ANY REMEDY FAILS IN ITS ESSENTIAL
|
||||
PURPOSE.
|
||||
5. DEFINITIONS
|
||||
The following terms have the meanings ascribed:
|
||||
5.1 “License” means a limited, non-exclusive, non-transferable, fully paid up, right and license (without the right to
|
||||
grant or authorize sublicenses) solely for Your internal business operations to (i) install and use, in object code
|
||||
format, the applicable Eligible Features and Functions of the Software, (ii) use, and distribute internally a reasonable
|
||||
number of copies of the Documentation, provided that You must include on such copies all Marks and Notices; (iii) permit
|
||||
Contractors to use the Software and Documentation as set forth in (i) and (ii) above, provided that such use must be
|
||||
solely for Your benefit, and You shall be responsible for all acts and omissions of such Contractors in connection with
|
||||
their use of the Software that are contrary to the terms and conditions of this Agreement..
|
||||
5.2 “License Key” means an alphanumeric code that enables the Eligible Features and Functions of the Software.
|
||||
5.3 “Basic Version” means that version of the Software available for use without the purchase of a Qualifying
|
||||
Subscription, but which does require Registration.
|
||||
5.4 “Contractor” means third party contractors performing services on Your behalf.
|
||||
5.5 “Documentation” means the published end user documentation provided by Elasticsearch with the Software.
|
||||
5.6 “Eligible Features and Functions” means those features and functions of the Software that are eligible for use with
|
||||
respect to a particular version of the Software or level of the Subscription. A list of the Eligible Features and
|
||||
Functions that correspond to each version of the Software and Subscription levels may be found at
|
||||
https://www.elastic.co/subscriptions.
|
||||
5.7 “Malicious Code” means any code that is designed to harm, or otherwise disrupt in any unauthorized manner, the
|
||||
operation of a recipient’s computer programs or computer systems or destroy or damage recipient’s data. For clarity,
|
||||
Malicious Code shall not include any software bugs or errors handled through Support Services, or any standard features
|
||||
of functions of the Software and/or any License Key that are intended to enforce the temporal and other limitations on
|
||||
the scope of the use of the Software to the scope of the license purchased by You.
|
||||
5.8 “Marks and Notices” means all Elasticsearch trademarks, trade names, logos and notices present on the Documentation
|
||||
as originally provided by Elasticsearch.
|
||||
5.9 “Registration” means Elasticsearch’s then-current process under which You may register Your use of the Software with
|
||||
Elasticsearch by providing certain information to Elasticsearch regarding your use of the Software.
|
||||
5.10 “Subscription” means the right to receive Support Services and a License to the Software.
|
||||
5.11 “Subscription Term” means the period of time for which You have purchased a Subscription.
|
||||
5.12 “Trial Version” means that version of the Software available for use without the purchase of a Qualifying
|
||||
Subscription and without Registration.
|
||||
6. MISCELLANEOUS
|
||||
This Agreement, including Attachment 1 hereto, which is hereby incorporated herein by this reference, completely and
|
||||
exclusively states the entire agreement of the parties regarding the subject matter herein, and it supersedes, and its
|
||||
terms govern, all prior proposals, agreements, or other communications between the parties, oral or written, regarding
|
||||
such subject matter. For the avoidance of doubt, the parties hereby expressly acknowledge and agree that if You issue
|
||||
any purchase order or similar document in connection with its purchase of a license to the Software, You will do so only
|
||||
for Your internal, administrative purposes and not with the intent to provide any contractual terms. This Agreement may
|
||||
not be modified except by a subsequently dated, written amendment that expressly amends this Agreement and which is
|
||||
signed on behalf of Elasticsearch and You, by duly authorized representatives. If any provision hereof is held
|
||||
unenforceable, this Agreement will continue without said provision and be interpreted to reflect the original intent of
|
||||
the parties.
|
||||
|
||||
|
||||
ATTACHMENT 1
|
||||
ADDITIONAL TERMS AND CONDITIONS
|
||||
|
||||
A. The following additional terms and conditions apply to all Customers with principal offices in the United States
|
||||
of America:
|
||||
|
||||
(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch, Inc., a Delaware corporation.
|
||||
|
||||
(2) Government Rights. The Software product is "Commercial Computer Software," as that term is defined in 48 C.F.R.
|
||||
2.101, and as the term is used in 48 C.F.R. Part 12, and is a Commercial Item comprised of "commercial computer
|
||||
software" and "commercial computer software documentation". If acquired by or on behalf of a civilian agency, the U.S.
|
||||
Government acquires this commercial computer software and/or commercial computer software documentation subject to the
|
||||
terms of this Agreement, as specified in 48 C.F.R. 12.212 (Computer Software) and 12.211 (Technical Data) of the Federal
|
||||
Acquisition Regulation ("FAR") and its successors. If acquired by or on behalf of any agency within the Department of
|
||||
Defense ("DOD"), the U.S. Government acquires this commercial computer software and/or commercial computer software
|
||||
documentation subject to the terms of the Elasticsearch Software License Agreement as specified in 48 C.F.R. 227.7202-3
|
||||
and 48 C.F.R. 227.7202-4 of the DOD FAR Supplement ("DFARS") and its successors, and consistent with 48 C.F.R. 227.7202.
|
||||
This U.S. Government Rights clause, consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202 is in lieu of, and
|
||||
supersedes, any other FAR, DFARS, or other clause or provision that addresses Government rights in computer software,
|
||||
computer software documentation or technical data related to the Software under this Agreement and in any Subcontract
|
||||
under which this commercial computer software and commercial computer software documentation is acquired or licensed.
|
||||
|
||||
(3) Export Control. You acknowledge that the goods, software and technology acquired from Elasticsearch are subject
|
||||
to U.S. export control laws and regulations, including but not limited to the International Traffic In Arms Regulations
|
||||
(“ITAR”) (22 C.F.R. Parts 120-130 (2010)); the Export Administration Regulations ("EAR") (15 C.F.R. Parts 730-774
|
||||
(2010)); the U.S. antiboycott regulations in the EAR and U.S. Department of the Treasury regulations; the economic
|
||||
sanctions regulations and guidelines of the U.S. Department of the Treasury, Office of Foreign Assets Control, and the
|
||||
USA Patriot Act (Title III of Pub. L. 107-56, signed into law October 26, 2001), as amended. You are now and will
|
||||
remain in the future compliant with all such export control laws and regulations, and will not export, re-export,
|
||||
otherwise transfer any Elasticsearch goods, software or technology or disclose any Elasticsearch software or technology
|
||||
to any person contrary to such laws or regulations. You acknowledge that remote access to the Software may in certain
|
||||
circumstances be considered a re-export of Software, and accordingly, may not be granted in contravention of U.S. export
|
||||
control laws and regulations.
|
||||
(4) Governing Law, Jurisdiction and Venue.
|
||||
(a) Customers in California. If Customer is located in California (as determined by the Customer address on the
|
||||
applicable Order Form, or for a trial license under 1.1(a), the location of person who installed the Software), this
|
||||
Agreement will be governed by the laws of the State of California, without regard to its conflict of laws principles,
|
||||
and all suits hereunder will be brought solely in Federal Court for the Northern District of California, or if that
|
||||
court lacks subject matter jurisdiction, in any California State Court located in Santa Clara County.
|
||||
(b) Customers Outside of California. If Customer is located anywhere other than California (as determined by the
|
||||
Customer address on the applicable Order Form, or for a trial license under 1.1(a), the location of person who installed
|
||||
the Software), this Agreement will be governed by the laws of the State of Delaware, without regard to its conflict of
|
||||
laws principles, and all suits hereunder will be brought solely in Federal Court for the District of Delaware, or if
|
||||
that court lacks subject matter jurisdiction, in any Delaware State Court located in Wilmington, Delaware.
|
||||
(c) All Customers. This Agreement shall not be governed by the 1980 UN Convention on Contracts for the International
|
||||
Sale of Goods. The parties hereby irrevocably waive any and all claims and defenses either might otherwise have in any
|
||||
action or proceeding in any of the applicable courts set forth in (a) or (b) above, based upon any alleged lack of
|
||||
personal jurisdiction, improper venue, forum non conveniens, or any similar claim or defense.
|
||||
(d) Equitable Relief. A breach or threatened breach, by either party of Section 4 may cause irreparable harm for
|
||||
which the non-breaching party shall be entitled to seek injunctive relief without being required to post a bond.
|
||||
|
||||
B. The following additional terms and conditions apply to all Customers with principal offices in Canada:
|
||||
|
||||
(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch B.C. Ltd., a corporation
|
||||
incorporated under laws of the Province of British Columbia.
|
||||
|
||||
(2) Export Control. You acknowledge that the goods, software and technology acquired from Elasticsearch are subject
|
||||
to the restrictions and controls set out in Section A(3) above as well as those imposed by the Export and Import Permits
|
||||
Act (Canada) and the regulations thereunder and that you will comply with all applicable laws and regulations. Without
|
||||
limitation, You acknowledge that the Marvel Software, or any portion thereof, will not be exported: (a) to any country
|
||||
on Canada's Area Control List; (b) to any country subject to UN Security Council embargo or action; or (c) contrary to
|
||||
Canada's Export Control List Item 5505. You are now and will remain in the future compliant with all such export control
|
||||
laws and regulations, and will not export, re-export, otherwise transfer any Elasticsearch goods, software or technology
|
||||
or disclose any Elasticsearch software or technology to any person contrary to such laws or regulations. You will not
|
||||
export or re-export the Marvel Software, or any portion thereof, directly or indirectly, in violation of the Canadian
|
||||
export administration laws and regulations to any country or end user, or to any end user who you know or have reason to
|
||||
know will utilize them in the design, development or production of nuclear, chemical or biological weapons. You further
|
||||
acknowledge that the Marvel Software product may include technical data subject to such Canadian export regulations.
|
||||
Elasticsearch does not represent that the Marvel Software is appropriate or available for use in all countries.
|
||||
Elasticsearch prohibits accessing materials from countries or states where contents are illegal. You are using the
|
||||
Marvel Software on your own initiative and you are responsible for compliance with all applicable laws. You hereby agree
|
||||
to indemnify Elasticsearch and its affiliates from any claims, actions, liability or expenses (including reasonable
|
||||
lawyers' fees) resulting from Your failure to act in accordance with the acknowledgements, agreements, and
|
||||
representations in this Section B(2).
|
||||
(3) Governing Law and Dispute Resolution. This Agreement shall be governed by the Province of Ontario and the
|
||||
federal laws of Canada applicable therein without regard to conflict of laws provisions. The parties hereby irrevocably
|
||||
waive any and all claims and defenses either might otherwise have in any such action or proceeding in any of such courts
|
||||
based upon any alleged lack of personal jurisdiction, improper venue, forum non conveniens or any similar claim or
|
||||
defense. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence, breach,
|
||||
termination, enforcement, interpretation or validity thereof, including the determination of the scope or applicability
|
||||
of this agreement to arbitrate, (each, a “Dispute”), which the parties are unable to resolve after good faith
|
||||
negotiations, shall be submitted first to the upper management level of the parties. The parties, through their upper
|
||||
management level representatives shall meet within thirty (30) days of the Dispute being referred to them and if the
|
||||
parties are unable to resolve such Dispute within thirty (30) days of meeting, the parties agree to seek to resolve the
|
||||
Dispute through mediation with ADR Chambers in the City of Toronto, Ontario, Canada before pursuing any other
|
||||
proceedings. The costs of the mediator shall be shared equally by the parties. If the Dispute has not been resolved
|
||||
within thirty (30) days of the notice to desire to mediate, any party may terminate the mediation and proceed to
|
||||
arbitration and the matter shall be referred to and finally resolved by arbitration at ADR Chambers pursuant to the
|
||||
general ADR Chambers Rules for Arbitration in the City of Toronto, Ontario, Canada. The arbitration shall proceed in
|
||||
accordance with the provisions of the Arbitration Act (Ontario). The arbitral panel shall consist of three (3)
|
||||
arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall
|
||||
discuss and select a chairman. If the two (2) party-appointed arbitrators are unable to agree on the chairman, the
|
||||
chairman shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be
|
||||
independent of each of the parties. The arbitrators shall have the authority to grant specific performance and to
|
||||
allocate between the parties the costs of arbitration (including service fees, arbitrator fees and all other fees
|
||||
related to the arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any
|
||||
arbitration shall be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith.
|
||||
Judgment upon the award so rendered may be entered in a court having jurisdiction or application may be made to such
|
||||
court for judicial acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the
|
||||
foregoing, Elasticsearch shall have the right to institute an action in a court of proper jurisdiction for preliminary
|
||||
injunctive relief pending a final decision by the arbitrator, provided that a permanent injunction and damages shall
|
||||
only be awarded by the arbitrator. The language to be used in the arbitral proceedings shall be English.
|
||||
(4) Language. Any translation of this Agreement is done for local requirements and in the event of a dispute
|
||||
between the English and any non-English version, the English version of this Agreement shall govern. At the request of
|
||||
the parties, the official language of this Agreement and all communications and documents relating hereto is the English
|
||||
language, and the English-language version shall govern all interpretation of the Agreement. À la demande des parties,
|
||||
la langue officielle de la présente convention ainsi que toutes communications et tous documents s'y rapportant est la
|
||||
langue anglaise, et la version anglaise est celle qui régit toute interprétation de la présente convention.
|
||||
(5) Warranty Disclaimer. For Customers with principal offices in the Province of Québec, the following new sentence
|
||||
is to be added to the end of Section 3.3: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN TYPES OF
|
||||
DAMAGES AND/OR WARRANTIES AND CONDITIONS. THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS AGREEMENT SHALL
|
||||
NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES BEYOND AND
|
||||
DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.”
|
||||
(6) Limitation of Liability. For Customers with principal offices in the Province of Québec, the following new
|
||||
sentence is to be added to the end of Section 4.1: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN
|
||||
TYPES OF DAMAGES AND/OR WARRANTIES AND CONDITIONS. THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS
|
||||
AGREEMENT SHALL NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES
|
||||
BEYOND AND DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.”
|
||||
|
||||
C. The following additional terms and conditions apply to all Customers with principal offices outside of the United
|
||||
States of America and Canada:
|
||||
|
||||
(1) Applicable Elasticsearch Entity. The entity providing the license in Germany is Elasticsearch Gmbh; in France is
|
||||
Elasticsearch SARL, in the United Kingdom is Elasticsearch Ltd, in Australia is Elasticsearch Pty Ltd., in Japan is
|
||||
Elasticsearch KK, in Sweden is Elasticsearch AB, in Norway is Elasticsearch AS and in all other countries is
|
||||
Elasticsearch BV.
|
||||
|
||||
(2) Choice of Law. This Agreement shall be governed by and construed in accordance with the laws of the State of New
|
||||
York, without reference to or application of choice of law rules or principles. Notwithstanding any choice of law
|
||||
provision or otherwise, the Uniform Computer Information Transactions Act (UCITA) and the United Nations Convention on
|
||||
the International Sale of Goods shall not apply.
|
||||
|
||||
(3) Arbitration. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence,
|
||||
breach, termination, enforcement, interpretation or validity thereof, including the determination of the scope or
|
||||
applicability of this agreement to arbitrate, (each, a “Dispute”) shall be referred to and finally resolved by
|
||||
arbitration under the rules and at the location identified below. The arbitral panel shall consist of three (3)
|
||||
arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall
|
||||
discuss and select a chairman. If the two party-appointed arbitrators are unable to agree on the chairman, the chairman
|
||||
shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be independent
|
||||
of each of the parties. The arbitrators shall have the authority to grant specific performance and to allocate between
|
||||
the parties the costs of arbitration (including service fees, arbitrator fees and all other fees related to the
|
||||
arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any arbitration shall
|
||||
be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith. Judgment upon the
|
||||
award so rendered may be entered in a court having jurisdiction or application may be made to such court for judicial
|
||||
acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the foregoing, Elasticsearch
|
||||
shall have the right to institute an action in a court of proper jurisdiction for preliminary injunctive relief pending
|
||||
a final decision by the arbitrator, provided that a permanent injunction and damages shall only be awarded by the
|
||||
arbitrator. The language to be used in the arbitral proceedings shall be English.
|
||||
|
||||
In addition, the following terms only apply to Customers with principal offices within Europe, the Middle East or Africa
|
||||
(EMEA):
|
||||
|
||||
Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the London
|
||||
Court of International Arbitration (“LCIA”) Rules (which Rules are deemed to be incorporated by reference into this
|
||||
clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of
|
||||
arbitration shall be London, England.
|
||||
|
||||
(b) In addition, the following terms only apply to Customers with principal offices within Asia Pacific, Australia &
|
||||
New Zealand:
|
||||
|
||||
Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the Rules of
|
||||
Conciliation and Arbitration of the International Chamber of Commerce (“ICC”) in force on the date when the notice of
|
||||
arbitration is submitted in accordance with such Rules (which Rules are deemed to be incorporated by reference into this
|
||||
clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of
|
||||
arbitration shall be Singapore.
|
||||
|
||||
(c) In addition, the following terms only apply to Customers with principal offices within the Americas (excluding
|
||||
North America):
|
||||
|
||||
Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under
|
||||
International Dispute Resolution Procedures of the American Arbitration Association (“AAA”) in force on the date when
|
||||
the notice of arbitration is submitted in accordance with such Procedures (which Procedures are deemed to be
|
||||
incorporated by reference into this clause) on the basis that the governing law is the law of the State of New York,
|
||||
USA. The seat, or legal place, of arbitration shall be New York, New York, USA.
|
||||
|
||||
(4) In addition, for Customers with principal offices within the UK, the following new sentence is added to the end
|
||||
of Section 4.1:
|
||||
|
||||
Nothing in this Agreement shall have effect so as to limit or exclude a party’s liability for death or personal injury
|
||||
caused by negligence or for fraud including fraudulent misrepresentation and this Section 4.1 shall take effect subject
|
||||
to this provision.
|
||||
|
||||
(5) In addition, for Customers with principal offices within France, Sections 1.2, 3 and 4.1 of the Agreement are
|
||||
deleted and replaced with the following new Sections 1.2, 3.3 and 4.1:
|
||||
1.2 Reservation of Rights; Restrictions. Elasticsearch owns all right title and interest in and to the Software and
|
||||
any derivative works thereof, and except as expressly set forth in Section 1.1 above, no other license to the Software
|
||||
is granted to You by implication, or otherwise. You agree not to prepare derivative works from, modify, copy or use the
|
||||
Software in any manner except as expressly permitted in this Agreement; provided that You may copy the Software for
|
||||
archival purposes, only where such software is provided on a non-durable medium; and You may decompile the Software,
|
||||
where necessary for interoperability purposes and where necessary for the correction of errors making the software unfit
|
||||
for its intended purpose, if such right is not reserved by Elasticsearch as editor of the Software. Pursuant to article
|
||||
L122-6-1 of the French intellectual property code, Elasticsearch reserves the right to correct any bugs as necessary for
|
||||
the Software to serve its intended purpose. You agree not to: (i) transfer, sell, rent, lease, distribute, sublicense,
|
||||
loan or otherwise transfer the Software in whole or in part to any third party; (ii) use the Software for providing
|
||||
time-sharing services, any software-as-a-service offering (“SaaS”), service bureau services or as part of an application
|
||||
services provider or other service offering; (iii) alter or remove any proprietary notices in the Software; or (iv) make
|
||||
available to any third party any analysis of the results of operation of the Software, including benchmarking results,
|
||||
without the prior written consent of Elasticsearch.
|
||||
3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS”
|
||||
WITHOUT WARRANTY OF ANY KIND, AND ELASTICSEARCH AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR
|
||||
STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW,
|
||||
ELASTICSEARCH AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR PURPOSE WITH
|
||||
RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTICSEARCH DOES
|
||||
NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE
|
||||
UNINTERRUPTED.
|
||||
4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTICSEARCH OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF
|
||||
PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT OR
|
||||
UNFORESEEABLE DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE, OR THE
|
||||
PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR TORTIOUS CONDUCT,
|
||||
INCLUDING NEGLIGENCE. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH, THROUGH
|
||||
GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU, OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1, OR IN CASE OF
|
||||
DEATH OR PERSONAL INJURY.
|
||||
(6) In addition, for Customers with principal offices within Australia, Sections 4.1, 4.2 and 4.3 of the Agreement
|
||||
are deleted and replaced with the following new Sections 4.1, 4.2 and 4.3:
|
||||
4.1 Disclaimer of Certain Damages. Subject to clause 4.3, a party is not liable for Consequential Loss however
|
||||
caused (including by the negligence of that party) suffered or incurred by the other party in connection with this
|
||||
agreement. “Consequential Loss” means loss of revenues, loss of reputation, indirect loss, loss of profits,
|
||||
consequential loss, loss of actual or anticipated savings, indirect loss, lost opportunities, including opportunities to
|
||||
enter into arrangements with third parties, loss or damage in connection with claims against by third parties, or loss
|
||||
or corruption or data.
|
||||
4.2 Damages Cap. SUBJECT TO CLAUSES 4.1 AND 4.3, ANY LIABILITY OF ELASTICSEARCH FOR ANY LOSS OR DAMAGE, HOWEVER
|
||||
CAUSED (INCLUDING BY THE NEGLIGENCE OF ELASTICSEARCH), SUFFERED BY YOU IN CONNECTION WITH THIS AGREEMENT IS LIMITED TO
|
||||
THE AMOUNT YOU PAID, IN THE TWELVE (12) MONTHS IMMEDIATELY PRIOR TO THE EVENT GIVING RISE TO LIABILITY, UNDER THE
|
||||
ELASTICSEARCH SUPPORT SERVICES AGREEMENT IN CONNECTION WITH WHICH YOU OBTAINED THE LICENSE TO USE THE SOFTWARE. THE
|
||||
LIMITATION SET OUT IN THIS SECTION 4.2 IS AN AGGREGATE LIMIT FOR ALL CLAIMS, WHENEVER MADE.
|
||||
4.3 Limitation and Disclaimer Exceptions. If the Competition and Consumer Act 2010 (Cth) or any other legislation or
|
||||
any other legislation states that there is a guarantee in relation to any good or service supplied by Elasticsearch in
|
||||
connection with this agreement, and Elasticsearch’s liability for failing to comply with that guarantee cannot be
|
||||
excluded but may be limited, Sections 4.1 and 4.2 do not apply to that liability and instead Elasticsearch’s liability
|
||||
for such failure is limited (at Elasticsearch’s election) to, in the case of a supply of goods, the Elasticsearch
|
||||
replacing the goods or supplying equivalent goods or repairing the goods, or in the case of a supply of services,
|
||||
Elasticsearch supplying the services again or paying the cost of having the services supplied again.
|
||||
(7) In addition, for Customers with principal offices within Japan, Sections 1.2, 3 and 4.1 of the Agreement are
|
||||
deleted and replaced with the following new Sections 1.2, 3.3 and 4.1:
|
||||
1.2 Reservation of Rights; Restrictions. As between Elasticsearch and You, Elasticsearch owns all right title and
|
||||
interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1
|
||||
above, no other license to the Software is granted to You by implication or otherwise. You agree not to: (i) prepare
|
||||
derivative works from, modify, copy or use the Software in any manner except as expressly permitted in this Agreement or
|
||||
applicable law; (ii) transfer, sell, rent, lease, distribute, sublicense, loan or otherwise transfer the Software in
|
||||
whole or in part to any third party; (iii) use the Software for providing time-sharing services, any
|
||||
software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other
|
||||
service offering; (iv) alter or remove any proprietary notices in the Software; or (v) make available to any third party
|
||||
any analysis of the results of operation of the Software, including benchmarking results, without the prior written
|
||||
consent of Elasticsearch.
|
||||
3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS”
|
||||
WITHOUT WARRANTY OF ANY KIND, AND ELASTICSEARCH AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR
|
||||
STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW,
|
||||
ELASTICSEARCH AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF
|
||||
THE FOREGOING. FURTHER, ELASTICSEARCH DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT
|
||||
THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED.
|
||||
4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTICSEARCH OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF
|
||||
PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY
|
||||
SPECIALINDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE
|
||||
OR INABILITY TO USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A
|
||||
BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH
|
||||
THROUGH GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY
|
||||
OTHER LIABILITY THAT CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW.
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
import org.elasticsearch.gradle.MavenFilteringHack
|
||||
import org.elasticsearch.gradle.test.NodeInfo
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
|
||||
group 'org.elasticsearch.plugin'
|
||||
|
@ -21,13 +20,15 @@ ext.compactProfile = 'full'
|
|||
|
||||
dependencyLicenses.enabled = false
|
||||
|
||||
dependencies {
|
||||
// license deps
|
||||
compile project(':x-plugins:elasticsearch:license:base')
|
||||
testCompile project(':x-plugins:elasticsearch:license:licensor')
|
||||
licenseHeaders {
|
||||
approvedLicenses << 'BCrypt (BSD-like)'
|
||||
additionalLicense 'BCRYP', 'BCrypt (BSD-like)', 'Copyright (c) 2006 Damien Miller <djm@mindrot.org>'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// security deps
|
||||
compile project(path: ':modules:transport-netty3', configuration: 'runtime')
|
||||
compile project(path: ':modules:transport-netty4', configuration: 'runtime')
|
||||
compile 'dk.brics.automaton:automaton:1.11-8'
|
||||
compile 'com.unboundid:unboundid-ldapsdk:3.1.1'
|
||||
compile 'org.bouncycastle:bcprov-jdk15on:1.54'
|
||||
|
@ -73,6 +74,11 @@ for (String module : ['', 'license-plugin/', 'security/', 'watcher/', 'monitorin
|
|||
}
|
||||
}
|
||||
|
||||
// make LicenseSigner available for testing signed licenses
|
||||
sourceSets.test.java {
|
||||
srcDir '../license-tools/src/main/java'
|
||||
}
|
||||
|
||||
compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
|
||||
compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
|
||||
|
||||
|
@ -112,8 +118,10 @@ forbiddenPatterns {
|
|||
|
||||
// TODO: standardize packaging config for plugins
|
||||
bundlePlugin {
|
||||
from(projectDir) {
|
||||
from(project(':x-plugins').projectDir) {
|
||||
include 'LICENSE.txt'
|
||||
}
|
||||
from(projectDir) {
|
||||
include 'NOTICE.txt'
|
||||
}
|
||||
from('bin/x-pack') {
|
||||
|
@ -231,32 +239,3 @@ thirdPartyAudit.excludes = [
|
|||
'javax.activation.URLDataSource',
|
||||
'javax.activation.UnsupportedDataTypeException'
|
||||
]
|
||||
|
||||
// someone figure out what the x-plugins logic should be
|
||||
licenseHeaders.enabled = false
|
||||
|
||||
modifyPom { MavenPom pom ->
|
||||
pom.withXml { XmlProvider xml ->
|
||||
// first find if we have dependencies at all, and grab the node
|
||||
NodeList depsNodes = xml.asNode().get('dependencies')
|
||||
if (depsNodes.isEmpty()) {
|
||||
return
|
||||
}
|
||||
|
||||
// find the 'base' dependency and replace it with the correct name because the project name is
|
||||
// always used even when the pom of the other project is correct
|
||||
Iterator<Node> childNodeIter = depsNodes.get(0).children().iterator()
|
||||
while (childNodeIter.hasNext()) {
|
||||
Node depNode = childNodeIter.next()
|
||||
String groupId = depNode.get('groupId').get(0).text()
|
||||
Node artifactIdNode = depNode.get('artifactId').get(0)
|
||||
String artifactId = artifactIdNode.text()
|
||||
String scope = depNode.get("scope").get(0).text()
|
||||
if (groupId.equals('org.elasticsearch') && artifactId.equals('base')) {
|
||||
artifactIdNode.replaceNode(new Node(null, 'artifactId', 'license-core'))
|
||||
} else if ('test'.equals(scope)) {
|
||||
childNodeIter.remove()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,15 +7,14 @@ package org.elasticsearch.xpack.graph;
|
|||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.ActionPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestHandler;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreAction;
|
||||
import org.elasticsearch.xpack.graph.action.TransportGraphExploreAction;
|
||||
import org.elasticsearch.xpack.graph.rest.action.RestGraphAction;
|
||||
|
@ -30,25 +29,16 @@ import static java.util.Collections.singletonList;
|
|||
public class Graph extends Plugin implements ActionPlugin {
|
||||
|
||||
public static final String NAME = "graph";
|
||||
private final boolean transportClientMode;
|
||||
protected final boolean enabled;
|
||||
|
||||
|
||||
public Graph(Settings settings) {
|
||||
this.transportClientMode = XPackPlugin.transportClientMode(settings);
|
||||
enabled = enabled(settings);
|
||||
}
|
||||
|
||||
public static boolean enabled(Settings settings) {
|
||||
return XPackPlugin.featureEnabled(settings, NAME, true);
|
||||
this.enabled = XPackSettings.GRAPH_ENABLED.get(settings);
|
||||
}
|
||||
|
||||
public Collection<Module> createGuiceModules() {
|
||||
return Collections.singletonList(b -> {
|
||||
XPackPlugin.bindFeatureSet(b, GraphFeatureSet.class);
|
||||
if (transportClientMode) {
|
||||
b.bind(GraphLicensee.class).toProvider(Providers.of(null));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -67,10 +57,4 @@ public class Graph extends Plugin implements ActionPlugin {
|
|||
}
|
||||
return singletonList(RestGraphAction.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Collections.singletonList(Setting.boolSetting(XPackPlugin.featureEnabledSetting(NAME), true, Setting.Property.NodeScope));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,14 +5,16 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.graph;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.xpack.XPackFeatureSet;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -20,18 +22,17 @@ import java.io.IOException;
|
|||
public class GraphFeatureSet implements XPackFeatureSet {
|
||||
|
||||
private final boolean enabled;
|
||||
private final GraphLicensee licensee;
|
||||
private final XPackLicenseState licenseState;
|
||||
|
||||
@Inject
|
||||
public GraphFeatureSet(Settings settings, @Nullable GraphLicensee licensee, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
this.enabled = Graph.enabled(settings);
|
||||
this.licensee = licensee;
|
||||
namedWriteableRegistry.register(Usage.class, Usage.writeableName(Graph.NAME), Usage::new);
|
||||
public GraphFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState) {
|
||||
this.enabled = XPackSettings.GRAPH_ENABLED.get(settings);
|
||||
this.licenseState = licenseState;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return Graph.NAME;
|
||||
return XPackPlugin.GRAPH;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -41,7 +42,7 @@ public class GraphFeatureSet implements XPackFeatureSet {
|
|||
|
||||
@Override
|
||||
public boolean available() {
|
||||
return licensee != null && licensee.isAvailable();
|
||||
return licenseState != null && licenseState.isGraphAllowed();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -54,14 +55,14 @@ public class GraphFeatureSet implements XPackFeatureSet {
|
|||
return new Usage(available(), enabled());
|
||||
}
|
||||
|
||||
static class Usage extends XPackFeatureSet.Usage {
|
||||
public static class Usage extends XPackFeatureSet.Usage {
|
||||
|
||||
public Usage(StreamInput input) throws IOException {
|
||||
super(input);
|
||||
}
|
||||
|
||||
public Usage(boolean available, boolean enabled) {
|
||||
super(Graph.NAME, available, enabled);
|
||||
super(XPackPlugin.GRAPH, available, enabled);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,64 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.graph;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.core.License.OperationMode;
|
||||
import org.elasticsearch.license.plugin.core.AbstractLicenseeComponent;
|
||||
|
||||
public class GraphLicensee extends AbstractLicenseeComponent {
|
||||
|
||||
public static final String ID = Graph.NAME;
|
||||
|
||||
public GraphLicensee(Settings settings) {
|
||||
super(settings, ID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] expirationMessages() {
|
||||
return new String[] {
|
||||
"Graph explore APIs are disabled"
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] acknowledgmentMessages(OperationMode currentMode, OperationMode newMode) {
|
||||
switch (newMode) {
|
||||
case BASIC:
|
||||
case STANDARD:
|
||||
case GOLD:
|
||||
switch (currentMode) {
|
||||
case TRIAL:
|
||||
case PLATINUM:
|
||||
return new String[] { "Graph will be disabled" };
|
||||
}
|
||||
break;
|
||||
}
|
||||
return Strings.EMPTY_ARRAY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if Graph Exploration should be enabled.
|
||||
* <p>
|
||||
* Exploration is only disabled when the license has expired or if the mode is not:
|
||||
* <ul>
|
||||
* <li>{@link OperationMode#PLATINUM}</li>
|
||||
* <li>{@link OperationMode#TRIAL}</li>
|
||||
* </ul>
|
||||
*
|
||||
* @return {@code true} as long as the license is valid. Otherwise {@code false}.
|
||||
*/
|
||||
public boolean isAvailable() {
|
||||
// status is volatile
|
||||
Status localStatus = status;
|
||||
OperationMode operationMode = localStatus.getMode();
|
||||
|
||||
boolean licensed = operationMode == OperationMode.TRIAL || operationMode == OperationMode.PLATINUM;
|
||||
|
||||
return licensed && localStatus.isActive();
|
||||
}
|
||||
}
|
|
@ -23,7 +23,8 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.license.plugin.core.LicenseUtils;
|
||||
import org.elasticsearch.license.LicenseUtils;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
|
||||
|
@ -37,7 +38,8 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude
|
|||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.graph.GraphLicensee;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.graph.Graph;
|
||||
import org.elasticsearch.xpack.graph.action.Connection.ConnectionId;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.xpack.graph.action.Vertex.VertexId;
|
||||
|
@ -58,7 +60,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
public class TransportGraphExploreAction extends HandledTransportAction<GraphExploreRequest, GraphExploreResponse> {
|
||||
|
||||
private final TransportSearchAction searchAction;
|
||||
protected final GraphLicensee licensee;
|
||||
protected final XPackLicenseState licenseState;
|
||||
|
||||
static class VertexPriorityQueue extends PriorityQueue<Vertex> {
|
||||
|
||||
|
@ -76,19 +78,19 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
@Inject
|
||||
public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, TransportSearchAction transportSearchAction,
|
||||
TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
GraphLicensee licensee) {
|
||||
XPackLicenseState licenseState) {
|
||||
super(settings, GraphExploreAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
|
||||
GraphExploreRequest::new);
|
||||
this.searchAction = transportSearchAction;
|
||||
this.licensee = licensee;
|
||||
this.licenseState = licenseState;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(GraphExploreRequest request, ActionListener<GraphExploreResponse> listener) {
|
||||
if (licensee.isAvailable()) {
|
||||
if (licenseState.isGraphAllowed()) {
|
||||
new AsyncGraphAction(request, listener).start();
|
||||
} else {
|
||||
listener.onFailure(LicenseUtils.newComplianceException(GraphLicensee.ID));
|
||||
listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.GRAPH));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ import java.util.Map;
|
|||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -27,23 +26,24 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.support.RestActions;
|
||||
import org.elasticsearch.rest.action.support.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.XPackClient;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreRequest;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreResponse;
|
||||
import org.elasticsearch.xpack.graph.action.Hop;
|
||||
import org.elasticsearch.xpack.graph.action.VertexRequest;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.xpack.rest.XPackRestHandler;
|
||||
|
||||
|
||||
/**
|
||||
* @see GraphExploreRequest
|
||||
*/
|
||||
public class RestGraphAction extends BaseRestHandler {
|
||||
public class RestGraphAction extends XPackRestHandler {
|
||||
|
||||
private IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
public static final ParseField TIMEOUT_FIELD = new ParseField("timeout");
|
||||
|
@ -68,21 +68,23 @@ public class RestGraphAction extends BaseRestHandler {
|
|||
@Inject
|
||||
public RestGraphAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry) {
|
||||
super(settings);
|
||||
// @deprecated TODO need to add deprecation support as per https://github.com/elastic/x-plugins/issues/1760#issuecomment-217507517
|
||||
controller.registerHandler(GET, "/{index}/_graph/explore", this);
|
||||
controller.registerHandler(POST, "/{index}/_graph/explore", this);
|
||||
controller.registerHandler(GET, "/{index}/{type}/_graph/explore", this);
|
||||
controller.registerHandler(POST, "/{index}/{type}/_graph/explore", this);
|
||||
// new REST endpoint
|
||||
controller.registerHandler(GET, "/{index}/_xpack/graph/_explore", this);
|
||||
controller.registerHandler(POST, "/{index}/_xpack/graph/_explore", this);
|
||||
controller.registerHandler(GET, "/{index}/{type}/_xpack/graph/_explore", this);
|
||||
controller.registerHandler(POST, "/{index}/{type}/_xpack/graph/_explore", this);
|
||||
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
|
||||
// @deprecated Remove in 6.0
|
||||
// NOTE: Old versions did not end with "/_explore"; they were just "/explore"
|
||||
controller.registerWithDeprecatedHandler(GET, "/{index}" + URI_BASE + "/_graph/_explore", this,
|
||||
GET, "/{index}/_graph/explore", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(POST, "/{index}" + URI_BASE + "/_graph/_explore", this,
|
||||
POST, "/{index}/_graph/explore", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(GET, "/{index}/{type}" + URI_BASE + "/_graph/_explore", this,
|
||||
GET, "/{index}/{type}/_graph/explore", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(POST, "/{index}/{type}" + URI_BASE + "/_graph/_explore", this,
|
||||
POST, "/{index}/{type}/_graph/explore", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) throws IOException {
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final XPackClient client) throws IOException {
|
||||
GraphExploreRequest graphRequest = new GraphExploreRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
graphRequest.indicesOptions(IndicesOptions.fromRequest(request, graphRequest.indicesOptions()));
|
||||
graphRequest.routing(request.param("routing"));
|
||||
|
@ -109,7 +111,7 @@ public class RestGraphAction extends BaseRestHandler {
|
|||
}
|
||||
|
||||
graphRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
|
||||
client.execute(INSTANCE, graphRequest, new RestToXContentListener<GraphExploreResponse>(channel));
|
||||
client.es().execute(INSTANCE, graphRequest, new RestToXContentListener<GraphExploreResponse>(channel));
|
||||
}
|
||||
|
||||
private void parseHop(XContentParser parser, QueryParseContext context, Hop currentHop,
|
||||
|
|
|
@ -5,18 +5,13 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.graph;
|
||||
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.graph.GraphFeatureSet;
|
||||
import org.elasticsearch.xpack.graph.GraphLicensee;
|
||||
import org.junit.Before;
|
||||
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
|
@ -24,24 +19,17 @@ import static org.mockito.Mockito.when;
|
|||
*/
|
||||
public class GraphFeatureSetTests extends ESTestCase {
|
||||
|
||||
private GraphLicensee licensee;
|
||||
private NamedWriteableRegistry namedWriteableRegistry;
|
||||
private XPackLicenseState licenseState;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
licensee = mock(GraphLicensee.class);
|
||||
namedWriteableRegistry = mock(NamedWriteableRegistry.class);
|
||||
}
|
||||
|
||||
public void testWritableRegistration() throws Exception {
|
||||
new GraphFeatureSet(Settings.EMPTY, licensee, namedWriteableRegistry);
|
||||
verify(namedWriteableRegistry).register(eq(GraphFeatureSet.Usage.class), eq("xpack.usage.graph"), anyObject());
|
||||
licenseState = mock(XPackLicenseState.class);
|
||||
}
|
||||
|
||||
public void testAvailable() throws Exception {
|
||||
GraphFeatureSet featureSet = new GraphFeatureSet(Settings.EMPTY, licensee, namedWriteableRegistry);
|
||||
GraphFeatureSet featureSet = new GraphFeatureSet(Settings.EMPTY, licenseState);
|
||||
boolean available = randomBoolean();
|
||||
when(licensee.isAvailable()).thenReturn(available);
|
||||
when(licenseState.isGraphAllowed()).thenReturn(available);
|
||||
assertThat(featureSet.available(), is(available));
|
||||
}
|
||||
|
||||
|
@ -55,7 +43,7 @@ public class GraphFeatureSetTests extends ESTestCase {
|
|||
} else {
|
||||
settings.put("xpack.graph.enabled", enabled);
|
||||
}
|
||||
GraphFeatureSet featureSet = new GraphFeatureSet(settings.build(), licensee, namedWriteableRegistry);
|
||||
GraphFeatureSet featureSet = new GraphFeatureSet(settings.build(), licenseState);
|
||||
assertThat(featureSet.enabled(), is(enabled));
|
||||
}
|
||||
|
||||
|
|
|
@ -1,102 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.graph.license;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.core.License.OperationMode;
|
||||
import org.elasticsearch.license.plugin.core.AbstractLicenseeTestCase;
|
||||
import org.elasticsearch.xpack.graph.GraphLicensee;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class LicenseTests extends AbstractLicenseeTestCase {
|
||||
|
||||
GraphLicensee graphLicensee = new GraphLicensee(Settings.EMPTY);
|
||||
|
||||
public void testPlatinumTrialLicenseCanDoEverything() throws Exception {
|
||||
setOperationMode(graphLicensee, randomTrialOrPlatinumMode());
|
||||
assertLicensePlatinumTrialBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testBasicLicenseIsDisabled() throws Exception {
|
||||
setOperationMode(graphLicensee, OperationMode.BASIC);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testStandardLicenseIsDisabled() throws Exception {
|
||||
setOperationMode(graphLicensee, OperationMode.STANDARD);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testNoLicenseDoesNotWork() {
|
||||
setOperationMode(graphLicensee, OperationMode.BASIC);
|
||||
disable(graphLicensee);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testExpiredPlatinumTrialLicenseIsRestricted() throws Exception {
|
||||
setOperationMode(graphLicensee, randomTrialOrPlatinumMode());
|
||||
disable(graphLicensee);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testUpgradingFromBasicLicenseWorks() {
|
||||
setOperationMode(graphLicensee, OperationMode.BASIC);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
|
||||
setOperationMode(graphLicensee, randomTrialOrPlatinumMode());
|
||||
assertLicensePlatinumTrialBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testDowngradingToBasicLicenseWorks() {
|
||||
setOperationMode(graphLicensee, randomTrialOrPlatinumMode());
|
||||
assertLicensePlatinumTrialBehaviour(graphLicensee);
|
||||
|
||||
setOperationMode(graphLicensee, OperationMode.BASIC);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testUpgradingFromStandardLicenseWorks() {
|
||||
setOperationMode(graphLicensee, OperationMode.STANDARD);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
|
||||
setOperationMode(graphLicensee, randomTrialOrPlatinumMode());
|
||||
assertLicensePlatinumTrialBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testDowngradingToStandardLicenseWorks() {
|
||||
setOperationMode(graphLicensee, randomTrialOrPlatinumMode());
|
||||
assertLicensePlatinumTrialBehaviour(graphLicensee);
|
||||
|
||||
setOperationMode(graphLicensee, OperationMode.STANDARD);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testDowngradingToGoldLicenseWorks() {
|
||||
setOperationMode(graphLicensee, randomTrialOrPlatinumMode());
|
||||
assertLicensePlatinumTrialBehaviour(graphLicensee);
|
||||
|
||||
setOperationMode(graphLicensee, OperationMode.GOLD);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
public void testUpgradingExpiredLicenseWorks() {
|
||||
setOperationMode(graphLicensee, randomTrialOrPlatinumMode());
|
||||
disable(graphLicensee);
|
||||
assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(graphLicensee);
|
||||
|
||||
setOperationMode(graphLicensee, randomTrialOrPlatinumMode());
|
||||
assertLicensePlatinumTrialBehaviour(graphLicensee);
|
||||
}
|
||||
|
||||
private void assertLicensePlatinumTrialBehaviour(GraphLicensee graphLicensee) {
|
||||
assertThat("Expected graph exploration to be allowed", graphLicensee.isAvailable(), is(true));
|
||||
}
|
||||
|
||||
private void assertLicenseBasicOrStandardGoldOrNoneOrExpiredBehaviour(GraphLicensee graphLicensee) {
|
||||
assertThat("Expected graph exploration not to be allowed", graphLicensee.isAvailable(), is(false));
|
||||
}
|
||||
}
|
|
@ -7,13 +7,12 @@ package org.elasticsearch.xpack.graph.test;
|
|||
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.Settings.Builder;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.ScriptQueryBuilder;
|
||||
import org.elasticsearch.xpack.monitoring.Monitoring;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.AbstractSearchScript;
|
||||
|
@ -21,9 +20,7 @@ import org.elasticsearch.script.ExecutableScript;
|
|||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.watcher.Watcher;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreAction;
|
||||
import org.elasticsearch.xpack.graph.action.GraphExploreRequest;
|
||||
|
@ -128,9 +125,9 @@ public class GraphTests extends ESSingleNodeTestCase {
|
|||
public Settings nodeSettings() {
|
||||
// Disable security otherwise authentication failures happen creating indices.
|
||||
Builder newSettings = Settings.builder();
|
||||
newSettings.put(XPackPlugin.featureEnabledSetting(Security.NAME), false);
|
||||
newSettings.put(XPackPlugin.featureEnabledSetting(Monitoring.NAME), false);
|
||||
newSettings.put(XPackPlugin.featureEnabledSetting(Watcher.NAME), false);
|
||||
newSettings.put(XPackSettings.SECURITY_ENABLED.getKey(), false);
|
||||
newSettings.put(XPackSettings.MONITORING_ENABLED.getKey(), false);
|
||||
newSettings.put(XPackSettings.WATCHER_ENABLED.getKey(), false);
|
||||
return newSettings.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
"documentation": "https://www.elastic.co/guide/en/graph/current/explore.html",
|
||||
"methods": ["GET", "POST"],
|
||||
"url": {
|
||||
"path": "/{index}/_xpack/graph/_explore",
|
||||
"paths": ["/{index}/_xpack/graph/_explore", "/{index}/{type}/_xpack/graph/_explore"],
|
||||
"path": "/{index}/_xpack/_graph/_explore",
|
||||
"paths": ["/{index}/_xpack/_graph/_explore", "/{index}/{type}/_xpack/_graph/_explore"],
|
||||
"parts" : {
|
||||
"index": {
|
||||
"type" : "list",
|
||||
|
@ -30,4 +30,4 @@
|
|||
"description" : "Graph Query DSL"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.delete;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.delete;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.delete;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.delete;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
|
@ -3,16 +3,15 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public abstract class ExpirationCallback {
|
||||
abstract class ExpirationCallback {
|
||||
|
||||
static final String EXPIRATION_JOB_PREFIX = ".license_expiration_job_";
|
||||
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.get;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.get;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.get;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
|
@ -3,12 +3,11 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.get;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticsearchException;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
|
@ -25,12 +25,6 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.core.LicenseVerifier;
|
||||
import org.elasticsearch.license.core.OperationModeFileWatcher;
|
||||
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseRequest;
|
||||
import org.elasticsearch.license.plugin.action.put.PutLicenseRequest;
|
||||
import org.elasticsearch.license.plugin.action.put.PutLicenseResponse;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
|
||||
|
@ -44,18 +38,13 @@ import java.util.Locale;
|
|||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Service responsible for managing {@link LicensesMetaData}
|
||||
* Interfaces through which this is exposed are:
|
||||
* - LicensesClientService - responsible for listener registration of consumer plugin(s)
|
||||
* Service responsible for managing {@link LicensesMetaData}.
|
||||
* <p>
|
||||
* Notification Scheme:
|
||||
* <p>
|
||||
* All registered listeners are notified of the current license upon registration or when a new license is installed in the cluster state.
|
||||
* When a new license is notified as enabled to the registered listener, a notification is scheduled at the time of license expiry.
|
||||
* Registered listeners are notified using {@link #onUpdate(LicensesMetaData)}
|
||||
* On the master node, the service handles updating the cluster state when a new license is registered.
|
||||
* It also listens on all nodes for cluster state updates, and updates {@link XPackLicenseState} when
|
||||
* the license changes are detected in the cluster state.
|
||||
*/
|
||||
public class LicenseService extends AbstractLifecycleComponent implements ClusterStateListener, SchedulerEngine.Listener {
|
||||
|
||||
|
@ -65,14 +54,12 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
/**
|
||||
* Duration of grace period after a license has expired
|
||||
*/
|
||||
public static final TimeValue GRACE_PERIOD_DURATION = days(7);
|
||||
static final TimeValue GRACE_PERIOD_DURATION = days(7);
|
||||
|
||||
private final ClusterService clusterService;
|
||||
|
||||
/**
|
||||
* Currently active consumers to notify to
|
||||
*/
|
||||
private final List<InternalLicensee> registeredLicensees;
|
||||
/** The xpack feature state to update when license changes are made. */
|
||||
private final XPackLicenseState licenseState;
|
||||
|
||||
/**
|
||||
* Currently active license
|
||||
|
@ -104,115 +91,70 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
"please read the following messages and update the license again, this time with the \"acknowledge=true\" parameter:";
|
||||
|
||||
public LicenseService(Settings settings, ClusterService clusterService, Clock clock, Environment env,
|
||||
ResourceWatcherService resourceWatcherService, List<Licensee> registeredLicensees) {
|
||||
ResourceWatcherService resourceWatcherService, XPackLicenseState licenseState) {
|
||||
super(settings);
|
||||
this.clusterService = clusterService;
|
||||
this.clock = clock;
|
||||
this.scheduler = new SchedulerEngine(clock);
|
||||
this.registeredLicensees = registeredLicensees.stream().map(InternalLicensee::new).collect(Collectors.toList());
|
||||
this.licenseState = licenseState;
|
||||
this.operationModeFileWatcher = new OperationModeFileWatcher(resourceWatcherService,
|
||||
XPackPlugin.resolveConfigFile(env, "license_mode"), logger, () -> notifyLicensees(getLicense()));
|
||||
XPackPlugin.resolveConfigFile(env, "license_mode"), logger, () -> updateLicenseState(getLicense()));
|
||||
this.scheduler.register(this);
|
||||
populateExpirationCallbacks();
|
||||
}
|
||||
|
||||
private void logExpirationWarning(long expirationMillis, boolean expired) {
|
||||
String expiredMsg = expired ? "will expire" : "expired";
|
||||
String general = LoggerMessageFormat.format(null, "\n" +
|
||||
"#\n" +
|
||||
"# License [{}] on [{}]. If you have a new license, please update it.\n" +
|
||||
"# Otherwise, please reach out to your support contact.\n" +
|
||||
"# ", expiredMsg, DATE_FORMATTER.printer().print(expirationMillis));
|
||||
if (expired) {
|
||||
general = general.toUpperCase(Locale.ROOT);
|
||||
}
|
||||
StringBuilder builder = new StringBuilder(general);
|
||||
builder.append(System.lineSeparator());
|
||||
if (expired) {
|
||||
builder.append("# COMMERCIAL PLUGINS OPERATING WITH REDUCED FUNCTIONALITY");
|
||||
} else {
|
||||
builder.append("# Commercial plugins operate with reduced functionality on license expiration:");
|
||||
}
|
||||
XPackLicenseState.EXPIRATION_MESSAGES.forEach((feature, messages) -> {
|
||||
if (messages.length > 0) {
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# - ");
|
||||
builder.append(feature);
|
||||
for (String message : messages) {
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# - ");
|
||||
builder.append(message);
|
||||
}
|
||||
}
|
||||
});
|
||||
logger.warn("{}", builder);
|
||||
}
|
||||
|
||||
private void populateExpirationCallbacks() {
|
||||
expirationCallbacks.add(new ExpirationCallback.Pre(days(7), days(25), days(1)) {
|
||||
@Override
|
||||
public void on(License license) {
|
||||
String general = LoggerMessageFormat.format(null, "\n" +
|
||||
"#\n" +
|
||||
"# License will expire on [{}]. If you have a new license, please update it.\n" +
|
||||
"# Otherwise, please reach out to your support contact.\n" +
|
||||
"# ", DATE_FORMATTER.printer().print(license.expiryDate()));
|
||||
if (!registeredLicensees.isEmpty()) {
|
||||
StringBuilder builder = new StringBuilder(general);
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# Commercial plugins operate with reduced functionality on license " +
|
||||
"expiration:");
|
||||
for (InternalLicensee licensee : registeredLicensees) {
|
||||
if (licensee.expirationMessages().length > 0) {
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# - ");
|
||||
builder.append(licensee.id());
|
||||
for (String message : licensee.expirationMessages()) {
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# - ");
|
||||
builder.append(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.warn("{}", builder);
|
||||
} else {
|
||||
logger.warn("{}", general);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
@Override
|
||||
public void on(License license) {
|
||||
logExpirationWarning(license.expiryDate(), false);
|
||||
}
|
||||
});
|
||||
expirationCallbacks.add(new ExpirationCallback.Pre(days(0), days(7), TimeValue.timeValueMinutes(10)) {
|
||||
@Override
|
||||
public void on(License license) {
|
||||
String general = LoggerMessageFormat.format(null, "\n" +
|
||||
"#\n" +
|
||||
"# License will expire on [{}]. If you have a new license, please update it.\n" +
|
||||
"# Otherwise, please reach out to your support contact.\n" +
|
||||
"# ", DATE_FORMATTER.printer().print(license.expiryDate()));
|
||||
if (!registeredLicensees.isEmpty()) {
|
||||
StringBuilder builder = new StringBuilder(general);
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# Commercial plugins operate with reduced functionality on license " +
|
||||
"expiration:");
|
||||
for (InternalLicensee licensee : registeredLicensees) {
|
||||
if (licensee.expirationMessages().length > 0) {
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# - ");
|
||||
builder.append(licensee.id());
|
||||
for (String message : licensee.expirationMessages()) {
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# - ");
|
||||
builder.append(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.warn("{}", builder.toString());
|
||||
} else {
|
||||
logger.warn("{}", general);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
@Override
|
||||
public void on(License license) {
|
||||
logExpirationWarning(license.expiryDate(), false);
|
||||
}
|
||||
});
|
||||
expirationCallbacks.add(new ExpirationCallback.Post(days(0), null, TimeValue.timeValueMinutes(10)) {
|
||||
@Override
|
||||
public void on(License license) {
|
||||
// logged when grace period begins
|
||||
String general = LoggerMessageFormat.format(null, "\n" +
|
||||
"#\n" +
|
||||
"# LICENSE EXPIRED ON [{}]. IF YOU HAVE A NEW LICENSE, PLEASE\n" +
|
||||
"# UPDATE IT. OTHERWISE, PLEASE REACH OUT TO YOUR SUPPORT CONTACT.\n" +
|
||||
"# ", DATE_FORMATTER.printer().print(license.expiryDate()));
|
||||
if (!registeredLicensees.isEmpty()) {
|
||||
StringBuilder builder = new StringBuilder(general);
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# COMMERCIAL PLUGINS OPERATING WITH REDUCED FUNCTIONALITY");
|
||||
for (InternalLicensee licensee : registeredLicensees) {
|
||||
if (licensee.expirationMessages().length > 0) {
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# - ");
|
||||
builder.append(licensee.id());
|
||||
for (String message : licensee.expirationMessages()) {
|
||||
builder.append(System.lineSeparator());
|
||||
builder.append("# - ");
|
||||
builder.append(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.warn("{}", builder.toString());
|
||||
} else {
|
||||
logger.warn("{}", general);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
@Override
|
||||
public void on(License license) {
|
||||
// logged when grace period begins
|
||||
logExpirationWarning(license.expiryDate(), true);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -228,23 +170,23 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
listener.onResponse(new PutLicenseResponse(true, LicensesStatus.EXPIRED));
|
||||
} else {
|
||||
if (!request.acknowledged()) {
|
||||
// TODO: ack messages should be generated on the master, since another node's cluster state may be behind...
|
||||
final License currentLicense = getLicense();
|
||||
if (currentLicense != null) {
|
||||
Map<String, String[]> acknowledgeMessages = new HashMap<>(registeredLicensees.size() + 1);
|
||||
Map<String, String[]> acknowledgeMessages = new HashMap<>();
|
||||
if (!License.isAutoGeneratedLicense(currentLicense.signature()) // current license is not auto-generated
|
||||
&& currentLicense.issueDate() > newLicense.issueDate()) { // and has a later issue date
|
||||
acknowledgeMessages.put("license",
|
||||
new String[]{"The new license is older than the currently installed license. Are you sure you want to " +
|
||||
"override the current license?"});
|
||||
acknowledgeMessages.put("license", new String[]{
|
||||
"The new license is older than the currently installed license. " +
|
||||
"Are you sure you want to override the current license?"});
|
||||
}
|
||||
for (InternalLicensee licensee : registeredLicensees) {
|
||||
String[] listenerAcknowledgeMessages = licensee.acknowledgmentMessages(
|
||||
currentLicense.operationMode(), newLicense.operationMode());
|
||||
if (listenerAcknowledgeMessages.length > 0) {
|
||||
acknowledgeMessages.put(licensee.id(), listenerAcknowledgeMessages);
|
||||
XPackLicenseState.ACKNOWLEDGMENT_MESSAGES.forEach((feature, ackMessages) -> {
|
||||
String[] messages = ackMessages.apply(currentLicense.operationMode(), newLicense.operationMode());
|
||||
if (messages.length > 0) {
|
||||
acknowledgeMessages.put(feature, messages);
|
||||
}
|
||||
}
|
||||
if (!acknowledgeMessages.isEmpty()) {
|
||||
});
|
||||
if (acknowledgeMessages.isEmpty() == false) {
|
||||
// needs acknowledgement
|
||||
listener.onResponse(new PutLicenseResponse(false, LicensesStatus.VALID, ACKNOWLEDGEMENT_HEADER,
|
||||
acknowledgeMessages));
|
||||
|
@ -280,7 +222,7 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
if (licensesMetaData != null) {
|
||||
final License license = licensesMetaData.getLicense();
|
||||
if (event.getJobName().equals(LICENSE_JOB)) {
|
||||
notifyLicensees(license);
|
||||
updateLicenseState(license);
|
||||
} else if (event.getJobName().startsWith(ExpirationCallback.EXPIRATION_JOB_PREFIX)) {
|
||||
expirationCallbacks.stream()
|
||||
.filter(expirationCallback -> expirationCallback.getId().equals(event.getJobName()))
|
||||
|
@ -315,17 +257,6 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
});
|
||||
}
|
||||
|
||||
public Licensee.Status licenseeStatus(License license) {
|
||||
if (license == null) {
|
||||
return new Licensee.Status(License.OperationMode.MISSING, false);
|
||||
}
|
||||
long time = clock.millis();
|
||||
boolean active = time >= license.issueDate() &&
|
||||
time < license.expiryDate() + GRACE_PERIOD_DURATION.getMillis();
|
||||
|
||||
return new Licensee.Status(license.operationMode(), active);
|
||||
}
|
||||
|
||||
public License getLicense() {
|
||||
final License license = getLicense(clusterService.state().metaData().custom(LicensesMetaData.TYPE));
|
||||
return license == LicensesMetaData.LICENSE_TOMBSTONE ? null : license;
|
||||
|
@ -379,15 +310,25 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
protected void doStart() throws ElasticsearchException {
|
||||
clusterService.add(this);
|
||||
scheduler.start(Collections.emptyList());
|
||||
registeredLicensees.forEach(x -> initLicensee(x.licensee));
|
||||
logger.debug("initializing license state");
|
||||
final ClusterState clusterState = clusterService.state();
|
||||
if (clusterService.lifecycleState() == Lifecycle.State.STARTED
|
||||
&& clusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false
|
||||
&& clusterState.nodes().getMasterNode() != null) {
|
||||
final LicensesMetaData currentMetaData = clusterState.metaData().custom(LicensesMetaData.TYPE);
|
||||
if (clusterState.getNodes().isLocalNodeElectedMaster() &&
|
||||
(currentMetaData == null || currentMetaData.getLicense() == null)) {
|
||||
// triggers a cluster changed event
|
||||
// eventually notifying the current licensee
|
||||
registerTrialLicense();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStop() throws ElasticsearchException {
|
||||
clusterService.remove(this);
|
||||
scheduler.stop();
|
||||
// clear all handlers
|
||||
registeredLicensees.clear();
|
||||
// clear current license
|
||||
currentLicense.set(null);
|
||||
}
|
||||
|
@ -432,23 +373,18 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
}
|
||||
}
|
||||
|
||||
private void notifyLicensees(final License license) {
|
||||
protected void updateLicenseState(final License license) {
|
||||
if (license == LicensesMetaData.LICENSE_TOMBSTONE) {
|
||||
// implies license has been explicitly deleted
|
||||
// update licensee states
|
||||
registeredLicensees.forEach(InternalLicensee::onRemove);
|
||||
licenseState.update(License.OperationMode.MISSING, false);
|
||||
return;
|
||||
}
|
||||
if (license != null) {
|
||||
logger.debug("notifying [{}] listeners", registeredLicensees.size());
|
||||
long time = clock.millis();
|
||||
boolean active = time >= license.issueDate() &&
|
||||
time < license.expiryDate() + GRACE_PERIOD_DURATION.getMillis();
|
||||
licenseState.update(license.operationMode(), active);
|
||||
|
||||
Licensee.Status status = new Licensee.Status(license.operationMode(), active);
|
||||
for (InternalLicensee licensee : registeredLicensees) {
|
||||
licensee.onChange(status);
|
||||
}
|
||||
if (active) {
|
||||
if (time < license.expiryDate()) {
|
||||
logger.debug("license [{}] - valid", license.uid());
|
||||
|
@ -487,7 +423,7 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
previousLicense.removeOperationModeFileWatcher();
|
||||
}
|
||||
}
|
||||
notifyLicensees(license);
|
||||
updateLicenseState(license);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -510,24 +446,6 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
};
|
||||
}
|
||||
|
||||
private void initLicensee(Licensee licensee) {
|
||||
logger.debug("initializing licensee [{}]", licensee.id());
|
||||
final ClusterState clusterState = clusterService.state();
|
||||
if (clusterService.lifecycleState() == Lifecycle.State.STARTED
|
||||
&& clusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false
|
||||
&& clusterState.nodes().getMasterNode() != null) {
|
||||
final LicensesMetaData currentMetaData = clusterState.metaData().custom(LicensesMetaData.TYPE);
|
||||
if (clusterState.getNodes().isLocalNodeElectedMaster() &&
|
||||
(currentMetaData == null || currentMetaData.getLicense() == null)) {
|
||||
// triggers a cluster changed event
|
||||
// eventually notifying the current licensee
|
||||
registerTrialLicense();
|
||||
} else {
|
||||
notifyLicensees(currentMetaData.getLicense());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
License getLicense(final LicensesMetaData metaData) {
|
||||
if (metaData != null) {
|
||||
License license = metaData.getLicense();
|
||||
|
@ -543,48 +461,4 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores acknowledgement, expiration and license notification callbacks
|
||||
* for a registered listener
|
||||
*/
|
||||
private final class InternalLicensee {
|
||||
volatile Licensee.Status currentStatus = Licensee.Status.MISSING;
|
||||
|
||||
private final Licensee licensee;
|
||||
|
||||
private InternalLicensee(Licensee licensee) {
|
||||
this.licensee = licensee;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "(listener: " + licensee.id() + ", state: " + currentStatus + ")";
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return licensee.id();
|
||||
}
|
||||
|
||||
public String[] expirationMessages() {
|
||||
return licensee.expirationMessages();
|
||||
}
|
||||
|
||||
public String[] acknowledgmentMessages(License.OperationMode currentMode, License.OperationMode newMode) {
|
||||
return licensee.acknowledgmentMessages(currentMode, newMode);
|
||||
}
|
||||
|
||||
public synchronized void onChange(final Licensee.Status status) {
|
||||
if (currentStatus == null // not yet initialized
|
||||
|| !currentStatus.equals(status)) { // current license has changed
|
||||
logger.debug("licensee [{}] notified", licensee.id());
|
||||
licensee.onChange(status);
|
||||
currentStatus = status;
|
||||
}
|
||||
}
|
||||
|
||||
public void onRemove() {
|
||||
onChange(Licensee.Status.MISSING);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.rest.RestStatus;
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
|
||||
/**
|
||||
* Contains metadata about registered licenses
|
||||
*/
|
||||
class LicensesMetaData extends AbstractDiffable<MetaData.Custom> implements MetaData.Custom {
|
||||
|
||||
public static final String TYPE = "licenses";
|
||||
|
||||
/**
|
||||
* When license is explicitly removed by a user, LICENSE_TOMBSTONE
|
||||
* is used as a placeholder in the license metadata. This enables
|
||||
* us to distinguish between the scenario when a cluster never
|
||||
* had a license (null) and when a license was removed explicitly
|
||||
* (LICENSE_TOMBSTONE).
|
||||
* We rely on this to decide whether to generate a unsigned trial
|
||||
* license or not. we should only generate a license if no license
|
||||
* ever existed in the cluster state
|
||||
*/
|
||||
public static final License LICENSE_TOMBSTONE = License.builder()
|
||||
.type("trial")
|
||||
.issuer("elasticsearch")
|
||||
.uid("TOMBSTONE")
|
||||
.issuedTo("")
|
||||
.maxNodes(0)
|
||||
.issueDate(0)
|
||||
.expiryDate(0)
|
||||
.build();
|
||||
|
||||
public static final LicensesMetaData PROTO = new LicensesMetaData(null);
|
||||
|
||||
private License license;
|
||||
|
||||
public LicensesMetaData(License license) {
|
||||
this.license = license;
|
||||
}
|
||||
|
||||
public License getLicense() {
|
||||
return license;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
if (license != null) {
|
||||
return license.toString();
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
LicensesMetaData that = (LicensesMetaData) o;
|
||||
return !(license != null ? !license.equals(that.license) : that.license != null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return license != null ? license.hashCode() : 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EnumSet<MetaData.XContentContext> context() {
|
||||
return EnumSet.of(MetaData.XContentContext.GATEWAY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LicensesMetaData fromXContent(XContentParser parser) throws IOException {
|
||||
License license = LICENSE_TOMBSTONE;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
String fieldName = parser.currentName();
|
||||
if (fieldName != null) {
|
||||
if (fieldName.equals(Fields.LICENSE)) {
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
license = License.fromXContent(parser);
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
license = LICENSE_TOMBSTONE;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return new LicensesMetaData(license);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (license == LICENSE_TOMBSTONE) {
|
||||
builder.nullField(Fields.LICENSE);
|
||||
} else {
|
||||
builder.startObject(Fields.LICENSE);
|
||||
license.toInnerXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput streamOutput) throws IOException {
|
||||
if (license == LICENSE_TOMBSTONE) {
|
||||
streamOutput.writeBoolean(false); // no license
|
||||
} else {
|
||||
streamOutput.writeBoolean(true); // has a license
|
||||
license.writeTo(streamOutput);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public LicensesMetaData readFrom(StreamInput streamInput) throws IOException {
|
||||
License license = LICENSE_TOMBSTONE;
|
||||
if (streamInput.readBoolean()) {
|
||||
license = License.readLicense(streamInput);
|
||||
}
|
||||
return new LicensesMetaData(license);
|
||||
}
|
||||
|
||||
private static final class Fields {
|
||||
private static final String LICENSE = "license";
|
||||
}
|
||||
}
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
public enum LicensesStatus {
|
||||
VALID((byte) 0),
|
|
@ -3,7 +3,12 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
|
@ -13,31 +18,10 @@ import org.elasticsearch.common.inject.Module;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseAction;
|
||||
import org.elasticsearch.license.plugin.action.delete.TransportDeleteLicenseAction;
|
||||
import org.elasticsearch.license.plugin.action.get.GetLicenseAction;
|
||||
import org.elasticsearch.license.plugin.action.get.TransportGetLicenseAction;
|
||||
import org.elasticsearch.license.plugin.action.put.PutLicenseAction;
|
||||
import org.elasticsearch.license.plugin.action.put.TransportPutLicenseAction;
|
||||
import org.elasticsearch.license.plugin.core.LicensesMetaData;
|
||||
import org.elasticsearch.license.plugin.core.LicenseService;
|
||||
import org.elasticsearch.license.plugin.rest.RestDeleteLicenseAction;
|
||||
import org.elasticsearch.license.plugin.rest.RestGetLicenseAction;
|
||||
import org.elasticsearch.license.plugin.rest.RestPutLicenseAction;
|
||||
import org.elasticsearch.plugins.ActionPlugin;
|
||||
import org.elasticsearch.rest.RestHandler;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.graph.GraphLicensee;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoringLicensee;
|
||||
import org.elasticsearch.xpack.security.SecurityLicenseState;
|
||||
import org.elasticsearch.xpack.security.SecurityLicensee;
|
||||
import org.elasticsearch.xpack.support.clock.Clock;
|
||||
import org.elasticsearch.xpack.watcher.WatcherLicensee;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static org.elasticsearch.xpack.XPackPlugin.isTribeNode;
|
||||
|
@ -61,10 +45,6 @@ public class Licensing implements ActionPlugin {
|
|||
isTribeNode = isTribeNode(settings);
|
||||
}
|
||||
|
||||
public Collection<Module> nodeModules() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ActionHandler<? extends ActionRequest<?>, ? extends ActionResponse>> getActions() {
|
||||
if (isTribeNode) {
|
||||
|
@ -85,20 +65,6 @@ public class Licensing implements ActionPlugin {
|
|||
RestDeleteLicenseAction.class);
|
||||
}
|
||||
|
||||
public Collection<Object> createComponents(ClusterService clusterService, Clock clock, Environment environment,
|
||||
ResourceWatcherService resourceWatcherService,
|
||||
SecurityLicenseState securityLicenseState) {
|
||||
SecurityLicensee securityLicensee = new SecurityLicensee(settings, securityLicenseState);
|
||||
WatcherLicensee watcherLicensee = new WatcherLicensee(settings);
|
||||
MonitoringLicensee monitoringLicensee = new MonitoringLicensee(settings);
|
||||
GraphLicensee graphLicensee = new GraphLicensee(settings);
|
||||
LicenseService licenseService = new LicenseService(settings, clusterService, clock,
|
||||
environment, resourceWatcherService,
|
||||
Arrays.asList(securityLicensee, watcherLicensee, monitoringLicensee, graphLicensee));
|
||||
|
||||
return Arrays.asList(licenseService, securityLicenseState, securityLicensee, watcherLicensee, monitoringLicensee, graphLicensee);
|
||||
}
|
||||
|
||||
public List<Setting<?>> getSettings() {
|
||||
// TODO convert this wildcard to a real setting
|
||||
return Collections.singletonList(Setting.groupSetting("license.", Setting.Property.NodeScope));
|
|
@ -3,23 +3,10 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseAction;
|
||||
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseRequest;
|
||||
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseRequestBuilder;
|
||||
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseResponse;
|
||||
import org.elasticsearch.license.plugin.action.get.GetLicenseAction;
|
||||
import org.elasticsearch.license.plugin.action.get.GetLicenseRequest;
|
||||
import org.elasticsearch.license.plugin.action.get.GetLicenseRequestBuilder;
|
||||
import org.elasticsearch.license.plugin.action.get.GetLicenseResponse;
|
||||
import org.elasticsearch.license.plugin.action.put.PutLicenseAction;
|
||||
import org.elasticsearch.license.plugin.action.put.PutLicenseRequest;
|
||||
import org.elasticsearch.license.plugin.action.put.PutLicenseRequestBuilder;
|
||||
import org.elasticsearch.license.plugin.action.put.PutLicenseResponse;
|
||||
|
||||
/**
|
||||
*
|
|
@ -3,11 +3,11 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.license.core.License.OperationMode;
|
||||
import org.elasticsearch.license.License.OperationMode;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
@ -26,7 +26,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
* In case of failure to read a valid operation mode from <code>licenseModePath</code>,
|
||||
* the operation mode will default to PLATINUM
|
||||
*/
|
||||
public final class OperationModeFileWatcher extends FileChangesListener {
|
||||
public final class OperationModeFileWatcher implements FileChangesListener {
|
||||
private final ResourceWatcherService resourceWatcherService;
|
||||
private final Path licenseModePath;
|
||||
private final AtomicBoolean initialized = new AtomicBoolean();
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.put;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
|
@ -3,14 +3,13 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.license.plugin.action.put;
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -29,7 +28,7 @@ public class PutLicenseRequest extends AcknowledgedRequest<PutLicenseRequest> {
|
|||
}
|
||||
|
||||
/**
|
||||
* Parses license from json format to an instance of {@link org.elasticsearch.license.core.License}
|
||||
* Parses license from json format to an instance of {@link License}
|
||||
*
|
||||
* @param licenseDefinition licenses definition
|
||||
*/
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue