Merge branch 'master' of github.com:elastic/x-plugins into chart-swap
Original commit: elastic/x-pack-elasticsearch@530df0c21b
This commit is contained in:
commit
82a595e696
|
@ -8,5 +8,5 @@ A set of Elastic's commercial plugins:
|
|||
- Marvel
|
||||
|
||||
= Setup
|
||||
You must checkout x-plugins within an elasticsearch checkout. It must be
|
||||
called x-plugins, and must be inside the extra-plugins directory.
|
||||
You must checkout x-plugins and elasticsearch in the same directory as siblings. This
|
||||
elasticsearch checkout will be used when building x-plugins.
|
||||
|
|
|
@ -0,0 +1,220 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import com.squareup.okhttp.mockwebserver.MockResponse;
|
||||
import com.squareup.okhttp.mockwebserver.MockWebServer;
|
||||
import com.squareup.okhttp.mockwebserver.QueueDispatcher;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.mustache.MustachePlugin;
|
||||
import org.elasticsearch.watcher.actions.email.service.EmailTemplate;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.DataAttachment;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachments;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.HttpRequestAttachment;
|
||||
import org.elasticsearch.watcher.actions.email.service.support.EmailServer;
|
||||
import org.elasticsearch.watcher.client.WatchSourceBuilder;
|
||||
import org.elasticsearch.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.watcher.condition.compare.CompareCondition;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequestTemplate;
|
||||
import org.elasticsearch.watcher.support.http.Scheme;
|
||||
import org.elasticsearch.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.watcher.trigger.schedule.IntervalSchedule;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import javax.mail.BodyPart;
|
||||
import javax.mail.Multipart;
|
||||
import javax.mail.Part;
|
||||
import javax.mail.internet.MimeMessage;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
import static org.elasticsearch.watcher.actions.ActionBuilders.emailAction;
|
||||
import static org.elasticsearch.watcher.actions.email.DataAttachment.JSON;
|
||||
import static org.elasticsearch.watcher.actions.email.DataAttachment.YAML;
|
||||
import static org.elasticsearch.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
import static org.elasticsearch.watcher.condition.ConditionBuilders.compareCondition;
|
||||
import static org.elasticsearch.watcher.input.InputBuilders.searchInput;
|
||||
import static org.elasticsearch.watcher.test.WatcherTestUtils.newInputSearchRequest;
|
||||
import static org.elasticsearch.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.watcher.trigger.schedule.Schedules.interval;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public class EmailAttachmentTests extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
static final String USERNAME = "_user";
|
||||
static final String PASSWORD = "_passwd";
|
||||
|
||||
private MockWebServer webServer = new MockWebServer();;
|
||||
private EmailServer server;
|
||||
|
||||
@Before
|
||||
public void startWebservice() throws Exception {
|
||||
QueueDispatcher dispatcher = new QueueDispatcher();
|
||||
dispatcher.setFailFast(true);
|
||||
webServer.setDispatcher(dispatcher);
|
||||
webServer.start(0);
|
||||
MockResponse mockResponse = new MockResponse().setResponseCode(200)
|
||||
.addHeader("Content-Type", "application/foo").setBody("This is the content");
|
||||
webServer.enqueue(mockResponse);
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup() throws Exception {
|
||||
server.stop();
|
||||
webServer.shutdown();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Class<? extends Plugin>> pluginTypes() {
|
||||
List<Class<? extends Plugin>> types = new ArrayList<>();
|
||||
types.addAll(super.pluginTypes());
|
||||
types.add(MustachePlugin.class);
|
||||
return types;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
if(server == null) {
|
||||
//Need to construct the Email Server here as this happens before init()
|
||||
server = EmailServer.localhost("2500-2600", USERNAME, PASSWORD, logger);
|
||||
}
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("watcher.actions.email.service.account.test.smtp.auth", true)
|
||||
.put("watcher.actions.email.service.account.test.smtp.user", USERNAME)
|
||||
.put("watcher.actions.email.service.account.test.smtp.password", PASSWORD)
|
||||
.put("watcher.actions.email.service.account.test.smtp.port", server.port())
|
||||
.put("watcher.actions.email.service.account.test.smtp.host", "localhost")
|
||||
.build();
|
||||
}
|
||||
|
||||
public List<String> getAttachments(MimeMessage message) throws Exception {
|
||||
Object content = message.getContent();
|
||||
if (content instanceof String)
|
||||
return null;
|
||||
|
||||
if (content instanceof Multipart) {
|
||||
Multipart multipart = (Multipart) content;
|
||||
List<String> result = new ArrayList<>();
|
||||
|
||||
for (int i = 0; i < multipart.getCount(); i++) {
|
||||
result.addAll(getAttachments(multipart.getBodyPart(i)));
|
||||
}
|
||||
return result;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private List<String> getAttachments(BodyPart part) throws Exception {
|
||||
List<String> result = new ArrayList<>();
|
||||
Object content = part.getContent();
|
||||
if (content instanceof InputStream || content instanceof String) {
|
||||
if (Part.ATTACHMENT.equalsIgnoreCase(part.getDisposition()) || Strings.hasLength(part.getFileName())) {
|
||||
result.add(Streams.copyToString(new InputStreamReader(part.getInputStream(), StandardCharsets.UTF_8)));
|
||||
return result;
|
||||
} else {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
}
|
||||
|
||||
if (content instanceof Multipart) {
|
||||
Multipart multipart = (Multipart) content;
|
||||
for (int i = 0; i < multipart.getCount(); i++) {
|
||||
BodyPart bodyPart = multipart.getBodyPart(i);
|
||||
result.addAll(getAttachments(bodyPart));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void testThatEmailAttachmentsAreSent() throws Exception {
|
||||
org.elasticsearch.watcher.actions.email.DataAttachment dataFormat = randomFrom(JSON, YAML);
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
server.addListener(new EmailServer.Listener() {
|
||||
@Override
|
||||
public void on(MimeMessage message) throws Exception {
|
||||
assertThat(message.getSubject(), equalTo("Subject"));
|
||||
List<String> attachments = getAttachments(message);
|
||||
if (dataFormat == YAML) {
|
||||
assertThat(attachments, hasItem(allOf(startsWith("---"), containsString("_test_id"))));
|
||||
} else {
|
||||
assertThat(attachments, hasItem(allOf(startsWith("{"), containsString("_test_id"))));
|
||||
}
|
||||
assertThat(attachments, hasItem(containsString("This is the content")));
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
WatcherClient watcherClient = watcherClient();
|
||||
createIndex("idx");
|
||||
// Have a sample document in the index, the watch is going to evaluate
|
||||
client().prepareIndex("idx", "type").setSource("field", "value").get();
|
||||
refresh();
|
||||
SearchRequest searchRequest = newInputSearchRequest("idx").source(searchSource().query(matchAllQuery()));
|
||||
|
||||
List<EmailAttachmentParser.EmailAttachment> attachments = new ArrayList<>();
|
||||
|
||||
DataAttachment dataAttachment = DataAttachment.builder("my-id").dataAttachment(dataFormat).build();
|
||||
attachments.add(dataAttachment);
|
||||
|
||||
HttpRequestTemplate requestTemplate = HttpRequestTemplate.builder("localhost", webServer.getPort()).path("/").scheme(Scheme.HTTP).build();
|
||||
HttpRequestAttachment httpRequestAttachment = HttpRequestAttachment.builder("other-id").httpRequestTemplate(requestTemplate).build();
|
||||
|
||||
attachments.add(httpRequestAttachment);
|
||||
EmailAttachments emailAttachments = new EmailAttachments(attachments);
|
||||
XContentBuilder tmpBuilder = jsonBuilder();
|
||||
emailAttachments.toXContent(tmpBuilder, ToXContent.EMPTY_PARAMS);
|
||||
logger.info("TMP BUILDER {}", tmpBuilder.string());
|
||||
|
||||
EmailTemplate.Builder emailBuilder = EmailTemplate.builder().from("_from").to("_to").subject("Subject");
|
||||
WatchSourceBuilder watchSourceBuilder = watchBuilder()
|
||||
.trigger(schedule(interval(5, IntervalSchedule.Interval.Unit.SECONDS)))
|
||||
.input(searchInput(searchRequest))
|
||||
.condition(compareCondition("ctx.payload.hits.total", CompareCondition.Op.GT, 0l))
|
||||
.addAction("_email", emailAction(emailBuilder).setAuthentication(USERNAME, PASSWORD.toCharArray())
|
||||
.setAttachments(emailAttachments));
|
||||
logger.info("TMP WATCHSOURCE {}", watchSourceBuilder.build().getBytes().toUtf8());
|
||||
|
||||
watcherClient.preparePutWatch("_test_id")
|
||||
.setSource(watchSourceBuilder)
|
||||
.get();
|
||||
|
||||
if (timeWarped()) {
|
||||
timeWarp().scheduler().trigger("_test_id");
|
||||
refresh();
|
||||
}
|
||||
|
||||
assertWatchWithMinimumPerformedActionsCount("_test_id", 1);
|
||||
|
||||
if (!latch.await(5, TimeUnit.SECONDS)) {
|
||||
fail("waited too long for email to be received");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -19,6 +19,7 @@ import org.elasticsearch.marvel.agent.exporter.Exporters;
|
|||
import org.elasticsearch.marvel.agent.renderer.RendererModule;
|
||||
import org.elasticsearch.marvel.agent.settings.MarvelModule;
|
||||
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||
import org.elasticsearch.marvel.cleaner.CleanerService;
|
||||
import org.elasticsearch.marvel.license.LicenseModule;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.marvel.shield.InternalMarvelUser;
|
||||
|
@ -90,7 +91,9 @@ public class MarvelPlugin extends Plugin {
|
|||
if (!enabled) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return Arrays.<Class<? extends LifecycleComponent>>asList(MarvelLicensee.class, AgentService.class);
|
||||
return Arrays.<Class<? extends LifecycleComponent>>asList(MarvelLicensee.class,
|
||||
AgentService.class,
|
||||
CleanerService.class);
|
||||
}
|
||||
|
||||
public static boolean marvelEnabled(Settings settings) {
|
||||
|
@ -140,5 +143,6 @@ public class MarvelPlugin extends Plugin {
|
|||
module.registerSetting(MarvelSettings.COLLECTORS_SETTING);
|
||||
module.registerSetting(MarvelSettings.CLUSTER_STATE_TIMEOUT_SETTING);
|
||||
module.registerSetting(MarvelSettings.CLUSTER_STATS_TIMEOUT_SETTING);
|
||||
module.registerSetting(CleanerService.HISTORY_SETTING);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ public final class MarvelTemplateUtils {
|
|||
if (Strings.hasLength(version)) {
|
||||
return Integer.parseInt(version);
|
||||
}
|
||||
return null;
|
||||
throw new IllegalArgumentException("no marvel template version found");
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException("failed to parse marvel template version");
|
||||
} catch (IOException e) {
|
||||
|
|
|
@ -6,7 +6,10 @@
|
|||
package org.elasticsearch.marvel.agent.exporter.local;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
|
@ -15,26 +18,37 @@ import org.elasticsearch.cluster.ClusterService;
|
|||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterStateListener;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.marvel.agent.exporter.ExportBulk;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporter;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererRegistry;
|
||||
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||
import org.elasticsearch.marvel.cleaner.CleanerService;
|
||||
import org.elasticsearch.marvel.shield.SecuredClient;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class LocalExporter extends Exporter implements ClusterStateListener {
|
||||
public class LocalExporter extends Exporter implements ClusterStateListener, CleanerService.Listener {
|
||||
|
||||
public static final String TYPE = "local";
|
||||
|
||||
private final Client client;
|
||||
private final ClusterService clusterService;
|
||||
private final RendererRegistry renderers;
|
||||
private final CleanerService cleanerService;
|
||||
|
||||
private volatile LocalBulk bulk;
|
||||
private volatile boolean active = true;
|
||||
|
@ -42,11 +56,12 @@ public class LocalExporter extends Exporter implements ClusterStateListener {
|
|||
/** Version number of built-in templates **/
|
||||
private final Integer templateVersion;
|
||||
|
||||
public LocalExporter(Exporter.Config config, Client client, ClusterService clusterService, RendererRegistry renderers) {
|
||||
public LocalExporter(Exporter.Config config, Client client, ClusterService clusterService, RendererRegistry renderers, CleanerService cleanerService) {
|
||||
super(TYPE, config);
|
||||
this.client = client;
|
||||
this.clusterService = clusterService;
|
||||
this.renderers = renderers;
|
||||
this.cleanerService = cleanerService;
|
||||
|
||||
// Loads the current version number of built-in templates
|
||||
templateVersion = MarvelTemplateUtils.TEMPLATE_VERSION;
|
||||
|
@ -56,6 +71,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener {
|
|||
|
||||
bulk = resolveBulk(clusterService.state(), bulk);
|
||||
clusterService.add(this);
|
||||
cleanerService.add(this);
|
||||
}
|
||||
|
||||
LocalBulk getBulk() {
|
||||
|
@ -95,6 +111,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener {
|
|||
public synchronized void close() {
|
||||
active = false;
|
||||
clusterService.remove(this);
|
||||
cleanerService.remove(this);
|
||||
if (bulk != null) {
|
||||
try {
|
||||
bulk.terminate();
|
||||
|
@ -217,23 +234,108 @@ public class LocalExporter extends Exporter implements ClusterStateListener {
|
|||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCleanUpIndices(TimeValue retention) {
|
||||
if (bulk == null) {
|
||||
logger.debug("local exporter [{}] - not ready yet", name());
|
||||
return;
|
||||
}
|
||||
|
||||
if (clusterService.localNode().masterNode()) {
|
||||
|
||||
// Retention duration can be overridden at exporter level
|
||||
TimeValue exporterRetention = config.settings().getAsTime(CleanerService.HISTORY_DURATION, null);
|
||||
if (exporterRetention != null) {
|
||||
try {
|
||||
cleanerService.validateRetention(exporterRetention);
|
||||
retention = exporterRetention;
|
||||
} catch (IllegalArgumentException e) {
|
||||
logger.warn("local exporter [{}] - unable to use custom history duration [{}]: {}", name(), exporterRetention, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// Reference date time will be compared to index.creation_date settings,
|
||||
// that's why it must be in UTC
|
||||
DateTime expiration = new DateTime(DateTimeZone.UTC).minus(retention.millis());
|
||||
logger.debug("local exporter [{}] - cleaning indices [expiration={}, retention={}]", name(), expiration, retention);
|
||||
|
||||
ClusterState clusterState = clusterService.state();
|
||||
if (clusterState != null) {
|
||||
long expirationTime = expiration.getMillis();
|
||||
Set<String> indices = new HashSet<>();
|
||||
|
||||
for (ObjectObjectCursor<String, IndexMetaData> index : clusterState.getMetaData().indices()) {
|
||||
String indexName = index.key;
|
||||
if (Regex.simpleMatch(MarvelSettings.MARVEL_INDICES_PREFIX + "*", indexName)) {
|
||||
// Never delete the data indices
|
||||
if (indexName.startsWith(MarvelSettings.MARVEL_DATA_INDEX_PREFIX)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Never delete the current timestamped index
|
||||
if (indexName.equals(indexNameResolver().resolve(System.currentTimeMillis()))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
long creationDate = index.value.getCreationDate();
|
||||
if (creationDate <= expirationTime) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("local exporter [{}] - detected expired index [name={}, created={}, expired={}]", name(),
|
||||
indexName, new DateTime(creationDate, DateTimeZone.UTC), expiration);
|
||||
}
|
||||
indices.add(indexName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!indices.isEmpty()) {
|
||||
logger.info("local exporter [{}] - cleaning up [{}] old indices", name(), indices.size());
|
||||
deleteIndices(indices);
|
||||
} else {
|
||||
logger.debug("local exporter [{}] - no old indices found for clean up", name());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteIndices(Set<String> indices) {
|
||||
logger.trace("local exporter [{}] - deleting {} indices: {}", name(), indices.size(), Strings.collectionToCommaDelimitedString(indices));
|
||||
client.admin().indices().delete(new DeleteIndexRequest(indices.toArray(new String[indices.size()])), new ActionListener<DeleteIndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteIndexResponse response) {
|
||||
if (response.isAcknowledged()) {
|
||||
logger.debug("local exporter [{}] - indices deleted", name());
|
||||
} else {
|
||||
logger.warn("local exporter [{}] - unable to delete {} indices", name(), indices.size());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
logger.error("local exporter [{}] - failed to delete indices", e, name());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static class Factory extends Exporter.Factory<LocalExporter> {
|
||||
|
||||
private final SecuredClient client;
|
||||
private final RendererRegistry registry;
|
||||
private final ClusterService clusterService;
|
||||
private final CleanerService cleanerService;
|
||||
|
||||
@Inject
|
||||
public Factory(SecuredClient client, ClusterService clusterService, RendererRegistry registry) {
|
||||
public Factory(SecuredClient client, ClusterService clusterService, RendererRegistry registry, CleanerService cleanerService) {
|
||||
super(TYPE, true);
|
||||
this.client = client;
|
||||
this.clusterService = clusterService;
|
||||
this.registry = registry;
|
||||
this.cleanerService = cleanerService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LocalExporter create(Config config) {
|
||||
return new LocalExporter(config, client, clusterService, registry);
|
||||
return new LocalExporter(config, client, clusterService, registry, cleanerService);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.marvel.agent.settings;
|
|||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.marvel.agent.AgentService;
|
||||
import org.elasticsearch.marvel.cleaner.CleanerService;
|
||||
|
||||
public class MarvelModule extends AbstractModule {
|
||||
|
||||
|
@ -14,5 +15,6 @@ public class MarvelModule extends AbstractModule {
|
|||
protected void configure() {
|
||||
bind(MarvelSettings.class).asEagerSingleton();
|
||||
bind(AgentService.class).asEagerSingleton();
|
||||
bind(CleanerService.class).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,204 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.cleaner;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.chrono.ISOChronology;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
|
||||
/**
|
||||
* CleanerService takes care of deleting old marvel indices.
|
||||
*/
|
||||
public class CleanerService extends AbstractLifecycleComponent<CleanerService> {
|
||||
|
||||
public static final String HISTORY_DURATION = "history.duration";
|
||||
public static final Setting<TimeValue> HISTORY_SETTING = Setting.timeSetting("marvel." + HISTORY_DURATION, TimeValue.timeValueHours(7 * 24), true, Setting.Scope.CLUSTER);
|
||||
|
||||
private final MarvelLicensee licensee;
|
||||
private final ThreadPool threadPool;
|
||||
private final ExecutionScheduler executionScheduler;
|
||||
private final List<Listener> listeners = new CopyOnWriteArrayList<>();
|
||||
|
||||
private volatile IndicesCleaner runnable;
|
||||
private volatile TimeValue retention;
|
||||
|
||||
CleanerService(Settings settings, ClusterSettings clusterSettings, MarvelLicensee licensee, ThreadPool threadPool, ExecutionScheduler executionScheduler) {
|
||||
super(settings);
|
||||
this.licensee = licensee;
|
||||
this.threadPool = threadPool;
|
||||
this.executionScheduler = executionScheduler;
|
||||
clusterSettings.addSettingsUpdateConsumer(HISTORY_SETTING, this::setRetention, this::validateRetention);
|
||||
}
|
||||
|
||||
@Inject
|
||||
public CleanerService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, MarvelLicensee licensee) {
|
||||
this(settings, clusterSettings, licensee,threadPool, new DefaultExecutionScheduler());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStart() {
|
||||
logger.debug("starting cleaning service");
|
||||
this.runnable = new IndicesCleaner();
|
||||
threadPool.schedule(executionScheduler.nextExecutionDelay(new DateTime(ISOChronology.getInstance())), executorName(), runnable);
|
||||
logger.debug("cleaning service started");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStop() {
|
||||
logger.debug("stopping cleaning service");
|
||||
listeners.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doClose() {
|
||||
logger.debug("closing cleaning service");
|
||||
runnable.cancel();
|
||||
logger.debug("cleaning service closed");
|
||||
}
|
||||
|
||||
private String executorName() {
|
||||
return ThreadPool.Names.GENERIC;
|
||||
}
|
||||
|
||||
TimeValue getRetention() {
|
||||
return retention;
|
||||
}
|
||||
|
||||
public void setRetention(TimeValue retention) {
|
||||
validateRetention(retention);
|
||||
this.retention = retention;
|
||||
}
|
||||
|
||||
public void validateRetention(TimeValue retention) {
|
||||
if (retention == null) {
|
||||
throw new IllegalArgumentException("history duration setting cannot be null");
|
||||
}
|
||||
if ((retention.getMillis() <= 0) && (retention.getMillis() != -1)) {
|
||||
throw new IllegalArgumentException("invalid history duration setting value");
|
||||
}
|
||||
if (!licensee.allowUpdateRetention()) {
|
||||
throw new IllegalArgumentException("license does not allow the history duration setting to be updated to value [" + retention + "]");
|
||||
}
|
||||
}
|
||||
|
||||
public void add(Listener listener) {
|
||||
listeners.add(listener);
|
||||
}
|
||||
|
||||
public void remove(Listener listener) {
|
||||
listeners.remove(listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Listener that get called when indices must be cleaned
|
||||
*/
|
||||
public interface Listener {
|
||||
|
||||
/**
|
||||
* This method is called on listeners so that they can
|
||||
* clean indices.
|
||||
*
|
||||
* @param retention global retention value, it can be overridden at exporter level
|
||||
*/
|
||||
void onCleanUpIndices(TimeValue retention);
|
||||
}
|
||||
|
||||
class IndicesCleaner extends AbstractRunnable {
|
||||
|
||||
private volatile ScheduledFuture<?> future;
|
||||
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
if (lifecycle.stoppedOrClosed()) {
|
||||
logger.trace("cleaning service is stopping, exiting");
|
||||
return;
|
||||
}
|
||||
if (!licensee.cleaningEnabled()) {
|
||||
logger.debug("cleaning service is disabled due to invalid license");
|
||||
return;
|
||||
}
|
||||
|
||||
TimeValue globalRetention = retention;
|
||||
if (globalRetention == null) {
|
||||
try {
|
||||
globalRetention = HISTORY_SETTING.get(settings);
|
||||
validateRetention(globalRetention);
|
||||
} catch (IllegalArgumentException e) {
|
||||
globalRetention = HISTORY_SETTING.get(Settings.EMPTY);
|
||||
}
|
||||
}
|
||||
|
||||
DateTime start = new DateTime(ISOChronology.getInstance());
|
||||
if (globalRetention.millis() > 0) {
|
||||
logger.trace("cleaning up indices with retention [{}]", globalRetention);
|
||||
|
||||
for (Listener listener : listeners) {
|
||||
try {
|
||||
listener.onCleanUpIndices(globalRetention);
|
||||
} catch (Throwable t) {
|
||||
logger.error("listener failed to clean indices", t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!lifecycle.stoppedOrClosed()) {
|
||||
TimeValue delay = executionScheduler.nextExecutionDelay(start);
|
||||
logger.debug("scheduling next execution in [{}] seconds", delay.seconds());
|
||||
future = threadPool.schedule(delay, executorName(), this);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.error("failed to clean indices", t);
|
||||
}
|
||||
|
||||
public void cancel() {
|
||||
FutureUtils.cancel(future);
|
||||
}
|
||||
}
|
||||
|
||||
interface ExecutionScheduler {
|
||||
|
||||
/**
|
||||
* Calculates the delay in millis between "now" and
|
||||
* the next execution.
|
||||
*
|
||||
* @param now the current time
|
||||
* @return the delay in millis
|
||||
*/
|
||||
TimeValue nextExecutionDelay(DateTime now);
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule task so that it will be executed everyday at 01:00 AM
|
||||
*/
|
||||
static class DefaultExecutionScheduler implements ExecutionScheduler {
|
||||
|
||||
@Override
|
||||
public TimeValue nextExecutionDelay(DateTime now) {
|
||||
// Runs at 01:00 AM today or the next day if it's too late
|
||||
DateTime next = now.withTimeAtStartOfDay().plusHours(1);
|
||||
if (next.isBefore(now) || next.equals(now)) {
|
||||
next = next.plusDays(1);
|
||||
}
|
||||
return TimeValue.timeValueMillis(next.getMillis() - now.getMillis());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -27,7 +27,8 @@ public class MarvelLicensee extends AbstractLicenseeComponent<MarvelLicensee> im
|
|||
@Override
|
||||
public String[] expirationMessages() {
|
||||
return new String[] {
|
||||
"The agent will stop collecting cluster and indices metrics"
|
||||
"The agent will stop collecting cluster and indices metrics",
|
||||
"The agent will stop to automatically clean up indices older than [marvel.history.duration]",
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -46,7 +47,10 @@ public class MarvelLicensee extends AbstractLicenseeComponent<MarvelLicensee> im
|
|||
"running multiple clusters, users won't be able to access the clusters with\n" +
|
||||
"[{}] licenses from within a single Marvel instance. You will have to deploy a\n" +
|
||||
"separate and dedicated Marvel instance for each [{}] cluster you wish to monitor.",
|
||||
newLicense.type(), newLicense.type(), newLicense.type())
|
||||
newLicense.type(), newLicense.type(), newLicense.type()),
|
||||
LoggerMessageFormat.format(
|
||||
"Automatic index cleanup is disabled for clusters with [{}] license.", newLicense.type())
|
||||
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -62,4 +66,14 @@ public class MarvelLicensee extends AbstractLicenseeComponent<MarvelLicensee> im
|
|||
status.getLicenseState() != LicenseState.DISABLED;
|
||||
}
|
||||
|
||||
public boolean cleaningEnabled() {
|
||||
Status status = this.status;
|
||||
return status.getMode() != License.OperationMode.NONE &&
|
||||
status.getLicenseState() != LicenseState.DISABLED;
|
||||
}
|
||||
|
||||
public boolean allowUpdateRetention() {
|
||||
Status status = this.status;
|
||||
return status.getMode() == License.OperationMode.PLATINUM || status.getMode() == License.OperationMode.GOLD;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ public class InternalMarvelUser extends User.Simple {
|
|||
// we need all monitoring access
|
||||
.add(Privilege.Index.MONITOR, "*")
|
||||
|
||||
// and full access to .marvel-* and .marvel-data indices
|
||||
// and full access to .marvel-es-* and .marvel-es-data indices
|
||||
.add(Privilege.Index.ALL, MarvelSettings.MARVEL_INDICES_PREFIX + "*")
|
||||
|
||||
// note, we don't need _license permission as we're taking the licenses
|
||||
|
|
|
@ -109,7 +109,7 @@ public class IndexStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
assertThat(indexStats.getTotal().getStore().getSizeInBytes(), greaterThan(0L));
|
||||
assertThat(indexStats.getTotal().getStore().getThrottleTime().millis(), equalTo(0L));
|
||||
assertNotNull(indexStats.getTotal().getIndexing());
|
||||
assertThat(indexStats.getTotal().getIndexing().getTotal().getThrottleTimeInMillis(), equalTo(0L));
|
||||
assertThat(indexStats.getTotal().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0L));
|
||||
}
|
||||
|
||||
public void testIndexStatsCollectorMultipleIndices() throws Exception {
|
||||
|
@ -168,7 +168,7 @@ public class IndexStatsCollectorTests extends AbstractCollectorTestCase {
|
|||
assertThat(indexStats.getTotal().getStore().getSizeInBytes(), greaterThanOrEqualTo(0L));
|
||||
assertThat(indexStats.getTotal().getStore().getThrottleTime().millis(), equalTo(0L));
|
||||
assertNotNull(indexStats.getTotal().getIndexing());
|
||||
assertThat(indexStats.getTotal().getIndexing().getTotal().getThrottleTimeInMillis(), equalTo(0L));
|
||||
assertThat(indexStats.getTotal().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0L));
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.SettingsException;
|
|||
import org.elasticsearch.marvel.agent.exporter.local.LocalExporter;
|
||||
import org.elasticsearch.marvel.agent.renderer.RendererRegistry;
|
||||
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||
import org.elasticsearch.marvel.cleaner.CleanerService;
|
||||
import org.elasticsearch.marvel.shield.MarvelSettingsFilter;
|
||||
import org.elasticsearch.marvel.shield.MarvelShieldIntegration;
|
||||
import org.elasticsearch.marvel.shield.SecuredClient;
|
||||
|
@ -59,7 +60,7 @@ public class ExportersTests extends ESTestCase {
|
|||
clusterService = mock(ClusterService.class);
|
||||
|
||||
// we always need to have the local exporter as it serves as the default one
|
||||
factories.put(LocalExporter.TYPE, new LocalExporter.Factory(new SecuredClient(client, mock(MarvelShieldIntegration.class)), clusterService, mock(RendererRegistry.class)));
|
||||
factories.put(LocalExporter.TYPE, new LocalExporter.Factory(new SecuredClient(client, mock(MarvelShieldIntegration.class)), clusterService, mock(RendererRegistry.class), mock(CleanerService.class)));
|
||||
clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(MarvelSettings.COLLECTORS_SETTING, MarvelSettings.INTERVAL_SETTING, Exporters.EXPORTERS_SETTING)));
|
||||
settingsFilter = mock(MarvelSettingsFilter.class);
|
||||
exporters = new Exporters(Settings.EMPTY, factories, settingsFilter, clusterService, clusterSettings);
|
||||
|
|
|
@ -0,0 +1,215 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.cleaner;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.plugin.core.LicenseState;
|
||||
import org.elasticsearch.license.plugin.core.Licensee;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporter;
|
||||
import org.elasticsearch.marvel.agent.exporter.Exporters;
|
||||
import org.elasticsearch.marvel.agent.exporter.IndexNameResolver;
|
||||
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
||||
@ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0.0)
|
||||
public abstract class AbstractIndicesCleanerTestCase extends MarvelIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
Settings.Builder settings = Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put(MarvelSettings.INTERVAL_SETTING.getKey(), "-1")
|
||||
.put(CleanerService.HISTORY_SETTING.getKey(), "-1");
|
||||
return settings.build();
|
||||
}
|
||||
|
||||
public void testNothingToDelete() throws Exception {
|
||||
internalCluster().startNode();
|
||||
|
||||
CleanerService.Listener listener = getListener();
|
||||
listener.onCleanUpIndices(days(0));
|
||||
assertIndicesCount(0);
|
||||
}
|
||||
|
||||
public void testDeleteIndex() throws Exception {
|
||||
internalCluster().startNode();
|
||||
|
||||
createIndex(MarvelSettings.MARVEL_INDICES_PREFIX + "test", now().minusDays(10));
|
||||
assertIndicesCount(1);
|
||||
|
||||
CleanerService.Listener listener = getListener();
|
||||
listener.onCleanUpIndices(days(10));
|
||||
assertIndicesCount(0);
|
||||
}
|
||||
|
||||
public void testIgnoreDataIndex() throws Exception {
|
||||
internalCluster().startNode();
|
||||
|
||||
createIndex(MarvelSettings.MARVEL_DATA_INDEX_PREFIX + "test", now().minusDays(10));
|
||||
assertIndicesCount(1);
|
||||
|
||||
CleanerService.Listener listener = getListener();
|
||||
listener.onCleanUpIndices(days(0));
|
||||
assertIndicesCount(1);
|
||||
}
|
||||
|
||||
public void testIgnoreCurrentTimestampedIndex() throws Exception {
|
||||
internalCluster().startNode();
|
||||
|
||||
IndexNameResolver indexNameResolver = null;
|
||||
for (Exporter exporter : internalCluster().getInstance(Exporters.class)) {
|
||||
indexNameResolver = exporter.indexNameResolver();
|
||||
}
|
||||
assertNotNull(indexNameResolver);
|
||||
|
||||
DateTime tenDaysAgo = now().minusDays(10);
|
||||
createIndex(indexNameResolver.resolve(tenDaysAgo.getMillis()), tenDaysAgo);
|
||||
|
||||
DateTime today = now();
|
||||
createIndex(indexNameResolver.resolve(today.getMillis()), today);
|
||||
assertIndicesCount(2);
|
||||
|
||||
CleanerService.Listener listener = getListener();
|
||||
listener.onCleanUpIndices(days(0));
|
||||
}
|
||||
|
||||
public void testDeleteIndices() throws Exception {
|
||||
internalCluster().startNode();
|
||||
|
||||
CleanerService.Listener listener = getListener();
|
||||
|
||||
final DateTime now = now();
|
||||
createIndex(MarvelSettings.MARVEL_INDICES_PREFIX + "one-year-ago", now.minusYears(1));
|
||||
createIndex(MarvelSettings.MARVEL_INDICES_PREFIX + "six-months-ago", now.minusMonths(6));
|
||||
createIndex(MarvelSettings.MARVEL_INDICES_PREFIX + "one-month-ago", now.minusMonths(1));
|
||||
createIndex(MarvelSettings.MARVEL_INDICES_PREFIX + "ten-days-ago", now.minusDays(10));
|
||||
createIndex(MarvelSettings.MARVEL_INDICES_PREFIX + "one-day-ago", now.minusDays(1));
|
||||
assertIndicesCount(5);
|
||||
|
||||
// Clean indices that have expired two years ago
|
||||
listener.onCleanUpIndices(years(2));
|
||||
assertIndicesCount(5);
|
||||
|
||||
// Clean indices that have expired 8 months ago
|
||||
listener.onCleanUpIndices(months(8));
|
||||
assertIndicesCount(4);
|
||||
|
||||
// Clean indices that have expired 3 months ago
|
||||
listener.onCleanUpIndices(months(3));
|
||||
assertIndicesCount(3);
|
||||
|
||||
// Clean indices that have expired 15 days ago
|
||||
listener.onCleanUpIndices(days(15));
|
||||
assertIndicesCount(2);
|
||||
|
||||
// Clean indices that have expired 7 days ago
|
||||
listener.onCleanUpIndices(days(7));
|
||||
assertIndicesCount(1);
|
||||
|
||||
// Clean indices until now
|
||||
listener.onCleanUpIndices(days(0));
|
||||
assertIndicesCount(0);
|
||||
}
|
||||
|
||||
public void testRetentionAsGlobalSetting() throws Exception {
|
||||
final int max = 10;
|
||||
final int retention = randomIntBetween(1, max);
|
||||
internalCluster().startNode(Settings.builder().put(CleanerService.HISTORY_SETTING.getKey(), String.format(Locale.ROOT, "%dd", retention)));
|
||||
|
||||
final DateTime now = now();
|
||||
for (int i = 0; i < max; i++) {
|
||||
createIndex(MarvelSettings.MARVEL_INDICES_PREFIX + String.valueOf(i), now.minusDays(i));
|
||||
}
|
||||
assertIndicesCount(max);
|
||||
|
||||
// Clean indices that have expired for N days, as specified in the global retention setting
|
||||
CleanerService.Listener listener = getListener();
|
||||
listener.onCleanUpIndices(days(retention));
|
||||
assertIndicesCount(retention);
|
||||
}
|
||||
|
||||
public void testRetentionAsExporterSetting() throws Exception {
|
||||
final int max = 10;
|
||||
|
||||
// Default retention is between 3 and max days
|
||||
final int defaultRetention = randomIntBetween(3, max);
|
||||
internalCluster().startNode(Settings.builder().put(CleanerService.HISTORY_SETTING.getKey(), String.format(Locale.ROOT, "%dd", defaultRetention)));
|
||||
|
||||
final DateTime now = now();
|
||||
for (int i = 0; i < max; i++) {
|
||||
createIndex(MarvelSettings.MARVEL_INDICES_PREFIX + String.valueOf(i), now.minusDays(i));
|
||||
}
|
||||
assertIndicesCount(max);
|
||||
|
||||
// Exporter retention is between 0 and the default retention
|
||||
final int exporterRetention = randomIntBetween(1, defaultRetention);
|
||||
assertThat(exporterRetention, lessThanOrEqualTo(defaultRetention));
|
||||
|
||||
// Updates the retention setting for the exporter
|
||||
Exporters exporters = internalCluster().getInstance(Exporters.class);
|
||||
for (Exporter exporter : exporters) {
|
||||
Settings transientSettings = Settings.builder().put("marvel.agent.exporters." + exporter.name() + "." + CleanerService.HISTORY_DURATION, String.format(Locale.ROOT, "%dd", exporterRetention)).build();
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(transientSettings));
|
||||
}
|
||||
|
||||
// Move to GOLD license
|
||||
for (MarvelLicensee licensee : internalCluster().getInstances(MarvelLicensee.class)) {
|
||||
licensee.onChange(new Licensee.Status(License.OperationMode.GOLD, LicenseState.ENABLED));
|
||||
}
|
||||
|
||||
// Try to clean indices using the global setting
|
||||
CleanerService.Listener listener = getListener();
|
||||
listener.onCleanUpIndices(days(defaultRetention));
|
||||
|
||||
// Checks that indices have been deleted according to
|
||||
// the retention configured at exporter level
|
||||
assertIndicesCount(exporterRetention);
|
||||
}
|
||||
|
||||
protected CleanerService.Listener getListener() {
|
||||
Exporters exporters = internalCluster().getInstance(Exporters.class);
|
||||
for (Exporter exporter : exporters) {
|
||||
if (exporter instanceof CleanerService.Listener) {
|
||||
return (CleanerService.Listener) exporter;
|
||||
}
|
||||
}
|
||||
throw new IllegalStateException("unable to find listener");
|
||||
}
|
||||
|
||||
protected abstract void createIndex(String name, DateTime creationDate);
|
||||
|
||||
protected abstract void assertIndicesCount(int count) throws Exception;
|
||||
|
||||
private static TimeValue years(int years) {
|
||||
DateTime now = now();
|
||||
return TimeValue.timeValueMillis(now.getMillis() - now.minusYears(years).getMillis());
|
||||
}
|
||||
|
||||
private static TimeValue months(int months) {
|
||||
DateTime now = now();
|
||||
return TimeValue.timeValueMillis(now.getMillis() - now.minusMonths(months).getMillis());
|
||||
}
|
||||
|
||||
private static TimeValue days(int days) {
|
||||
return TimeValue.timeValueHours(days * 24);
|
||||
}
|
||||
|
||||
private static DateTime now() {
|
||||
return new DateTime(DateTimeZone.UTC);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,167 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.cleaner;
|
||||
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.marvel.license.MarvelLicensee;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class CleanerServiceTests extends ESTestCase {
|
||||
|
||||
private ClusterSettings clusterSettings;
|
||||
private TimeValue defaultRetention;
|
||||
private ThreadPool threadPool;
|
||||
|
||||
@Before
|
||||
public void start() {
|
||||
clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<Setting<?>>(Arrays.asList(CleanerService.HISTORY_SETTING)));
|
||||
defaultRetention = TimeValue.parseTimeValue("7d", null, "");
|
||||
threadPool = new ThreadPool("CleanerServiceTests");
|
||||
}
|
||||
|
||||
@After
|
||||
public void stop() throws InterruptedException {
|
||||
terminate(threadPool);
|
||||
}
|
||||
|
||||
public void testRetentionDefaultValue() {
|
||||
MarvelLicensee licensee = mock(MarvelLicensee.class);
|
||||
when(licensee.allowUpdateRetention()).thenReturn(false);
|
||||
assertNull(new CleanerService(Settings.EMPTY, clusterSettings, threadPool, licensee).getRetention());
|
||||
}
|
||||
|
||||
public void testRetentionUpdateAllowed() {
|
||||
TimeValue randomRetention = TimeValue.parseTimeValue(randomTimeValue(), null, "");
|
||||
|
||||
MarvelLicensee licensee = mock(MarvelLicensee.class);
|
||||
when(licensee.allowUpdateRetention()).thenReturn(true);
|
||||
|
||||
CleanerService service = new CleanerService(Settings.EMPTY, clusterSettings, threadPool, licensee);
|
||||
service.setRetention(randomRetention);
|
||||
assertThat(service.getRetention(), equalTo(randomRetention));
|
||||
|
||||
try {
|
||||
service.validateRetention(randomRetention);
|
||||
} catch (IllegalArgumentException e) {
|
||||
fail("fail to validate new value of retention");
|
||||
}
|
||||
}
|
||||
|
||||
public void testRetentionUpdateBlocked() {
|
||||
TimeValue randomRetention = TimeValue.parseTimeValue(randomTimeValue(), null, "");
|
||||
|
||||
MarvelLicensee licensee = mock(MarvelLicensee.class);
|
||||
when(licensee.allowUpdateRetention()).thenReturn(false);
|
||||
|
||||
CleanerService service = new CleanerService(Settings.EMPTY, clusterSettings, threadPool, licensee);
|
||||
try {
|
||||
service.setRetention(randomRetention);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("license does not allow the history duration setting to be updated to value"));
|
||||
assertNull(service.getRetention());
|
||||
}
|
||||
|
||||
try {
|
||||
service.validateRetention(randomRetention);
|
||||
fail("exception should have been thrown");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("license does not allow the history duration setting to be updated to value"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testNextExecutionDelay() {
|
||||
CleanerService.ExecutionScheduler scheduler = new CleanerService.DefaultExecutionScheduler();
|
||||
|
||||
DateTime now = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC);
|
||||
assertThat(scheduler.nextExecutionDelay(now).millis(), equalTo(TimeValue.timeValueHours(1).millis()));
|
||||
|
||||
now = new DateTime(2015, 1, 1, 1, 0, DateTimeZone.UTC);
|
||||
assertThat(scheduler.nextExecutionDelay(now).millis(), equalTo(TimeValue.timeValueHours(24).millis()));
|
||||
|
||||
now = new DateTime(2015, 1, 1, 0, 59, DateTimeZone.UTC);
|
||||
assertThat(scheduler.nextExecutionDelay(now).millis(), equalTo(TimeValue.timeValueMinutes(1).millis()));
|
||||
|
||||
now = new DateTime(2015, 1, 1, 23, 59, DateTimeZone.UTC);
|
||||
assertThat(scheduler.nextExecutionDelay(now).millis(), equalTo(TimeValue.timeValueMinutes(60 + 1).millis()));
|
||||
|
||||
now = new DateTime(2015, 1, 1, 12, 34, 56);
|
||||
assertThat(scheduler.nextExecutionDelay(now).millis(), equalTo(new DateTime(2015, 1, 2, 1, 0, 0).getMillis() - now.getMillis()));
|
||||
|
||||
}
|
||||
|
||||
public void testExecution() throws InterruptedException {
|
||||
final int nbExecutions = randomIntBetween(1, 3);
|
||||
CountDownLatch latch = new CountDownLatch(nbExecutions);
|
||||
|
||||
logger.debug("--> creates a cleaner service that cleans every second");
|
||||
MarvelLicensee licensee = mock(MarvelLicensee.class);
|
||||
when(licensee.cleaningEnabled()).thenReturn(true);
|
||||
CleanerService service = new CleanerService(Settings.EMPTY, clusterSettings, licensee, threadPool, new TestExecutionScheduler(1_000));
|
||||
|
||||
logger.debug("--> registers cleaning listener");
|
||||
TestListener listener = new TestListener(latch);
|
||||
service.add(listener);
|
||||
|
||||
try {
|
||||
logger.debug("--> starts cleaning service");
|
||||
service.start();
|
||||
|
||||
logger.debug("--> waits for listener to be executed");
|
||||
if (!latch.await(10, TimeUnit.SECONDS)) {
|
||||
fail("waiting too long for test to complete. Expected listener was not executed");
|
||||
}
|
||||
} finally {
|
||||
service.stop();
|
||||
}
|
||||
assertThat(latch.getCount(), equalTo(0L));
|
||||
}
|
||||
|
||||
class TestListener implements CleanerService.Listener {
|
||||
|
||||
final CountDownLatch latch;
|
||||
|
||||
TestListener(CountDownLatch latch) {
|
||||
this.latch = latch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCleanUpIndices(TimeValue retention) {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
class TestExecutionScheduler implements CleanerService.ExecutionScheduler {
|
||||
|
||||
final long offset;
|
||||
|
||||
TestExecutionScheduler(long offset) {
|
||||
this.offset = offset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeValue nextExecutionDelay(DateTime now) {
|
||||
return TimeValue.timeValueMillis(offset);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.marvel.cleaner.local;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.marvel.agent.exporter.local.LocalExporter;
|
||||
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||
import org.elasticsearch.marvel.cleaner.AbstractIndicesCleanerTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class LocalIndicesCleanerTests extends AbstractIndicesCleanerTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("marvel.agent.exporters._local.type", LocalExporter.TYPE)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createIndex(String name, DateTime creationDate) {
|
||||
assertAcked(prepareCreate(name)
|
||||
.setSettings(settingsBuilder().put(IndexMetaData.SETTING_CREATION_DATE, creationDate.getMillis()).build()));
|
||||
ensureYellow(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertIndicesCount(int count) throws Exception {
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
assertThat(client().admin().indices().prepareGetSettings(MarvelSettings.MARVEL_INDICES_PREFIX + "*").get().getIndexToSettings().size(),
|
||||
equalTo(count));
|
||||
} catch (IndexNotFoundException e) {
|
||||
if (shieldEnabled) {
|
||||
assertThat(0, equalTo(count));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -7,10 +7,12 @@ package org.elasticsearch.marvel.shield;
|
|||
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.marvel.agent.exporter.MarvelTemplateUtils;
|
||||
import org.elasticsearch.marvel.agent.settings.MarvelSettings;
|
||||
import org.elasticsearch.marvel.test.MarvelIntegTestCase;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
|
@ -18,6 +20,15 @@ import static org.hamcrest.Matchers.is;
|
|||
|
||||
public class SecuredClientTests extends MarvelIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put(Node.HTTP_ENABLED, false)
|
||||
.put(MarvelSettings.INTERVAL_SETTING.getKey(), "-1")
|
||||
.build();
|
||||
}
|
||||
|
||||
public void testAllowedAccess() {
|
||||
SecuredClient securedClient = internalCluster().getInstance(SecuredClient.class);
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.shield.authz.AuthorizationService;
|
|||
import org.elasticsearch.shield.authz.Privilege;
|
||||
import org.elasticsearch.shield.crypto.CryptoService;
|
||||
import org.elasticsearch.shield.license.ShieldLicenseState;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -64,7 +65,7 @@ public class ShieldActionFilter extends AbstractComponent implements ActionFilte
|
|||
}
|
||||
|
||||
@Override
|
||||
public void apply(String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) {
|
||||
public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) {
|
||||
|
||||
/**
|
||||
A functional requirement - when the license of shield is disabled (invalid/expires), shield will continue
|
||||
|
@ -100,9 +101,9 @@ public class ShieldActionFilter extends AbstractComponent implements ActionFilte
|
|||
interceptor.intercept(request, user);
|
||||
}
|
||||
}
|
||||
chain.proceed(action, request, new SigningListener(this, listener));
|
||||
chain.proceed(task, action, request, new SigningListener(this, listener));
|
||||
} else {
|
||||
chain.proceed(action, request, listener);
|
||||
chain.proceed(task, action, request, listener);
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
listener.onFailure(t);
|
||||
|
|
|
@ -241,7 +241,6 @@ public class InternalAuthenticationService extends AbstractComponent implements
|
|||
* @throws ElasticsearchSecurityException If none of the configured realms successfully authenticated the
|
||||
* request
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
User authenticateWithRealms(String action, TransportMessage<?> message, User fallbackUser) throws ElasticsearchSecurityException {
|
||||
AuthenticationToken token;
|
||||
try {
|
||||
|
|
|
@ -5,13 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.shield.authc.activedirectory;
|
||||
|
||||
import com.unboundid.ldap.sdk.FailoverServerSet;
|
||||
import com.unboundid.ldap.sdk.LDAPConnection;
|
||||
import com.unboundid.ldap.sdk.LDAPConnectionOptions;
|
||||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import com.unboundid.ldap.sdk.SearchRequest;
|
||||
import com.unboundid.ldap.sdk.SearchResult;
|
||||
import com.unboundid.ldap.sdk.ServerSet;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.shield.ShieldSettingsFilter;
|
||||
|
@ -23,7 +20,6 @@ import org.elasticsearch.shield.authc.ldap.support.SessionFactory;
|
|||
import org.elasticsearch.shield.authc.support.SecuredString;
|
||||
import org.elasticsearch.shield.ssl.ClientSSLService;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.shield.authc.ldap.support.LdapUtils.createFilter;
|
||||
|
@ -51,10 +47,9 @@ public class ActiveDirectorySessionFactory extends SessionFactory {
|
|||
private final String userSearchFilter;
|
||||
private final LdapSearchScope userSearchScope;
|
||||
private final GroupsResolver groupResolver;
|
||||
private final ServerSet ldapServerSet;
|
||||
|
||||
public ActiveDirectorySessionFactory(RealmConfig config, ClientSSLService sslService) {
|
||||
super(config);
|
||||
super(config, sslService);
|
||||
Settings settings = config.settings();
|
||||
domainName = settings.get(AD_DOMAIN_NAME_SETTING);
|
||||
if (domainName == null) {
|
||||
|
@ -64,7 +59,6 @@ public class ActiveDirectorySessionFactory extends SessionFactory {
|
|||
userSearchDN = settings.get(AD_USER_SEARCH_BASEDN_SETTING, domainDN);
|
||||
userSearchScope = LdapSearchScope.resolve(settings.get(AD_USER_SEARCH_SCOPE_SETTING), LdapSearchScope.SUB_TREE);
|
||||
userSearchFilter = settings.get(AD_USER_SEARCH_FILTER_SETTING, "(&(objectClass=user)(|(sAMAccountName={0})(userPrincipalName={0}@" + domainName + ")))");
|
||||
ldapServerSet = serverSet(config.settings(), sslService);
|
||||
groupResolver = new ActiveDirectoryGroupsResolver(settings.getAsSettings("group_search"), domainDN);
|
||||
}
|
||||
|
||||
|
@ -72,24 +66,10 @@ public class ActiveDirectorySessionFactory extends SessionFactory {
|
|||
filter.filterOut("shield.authc.realms." + realmName + "." + HOSTNAME_VERIFICATION_SETTING);
|
||||
}
|
||||
|
||||
ServerSet serverSet(Settings settings, ClientSSLService clientSSLService) {
|
||||
@Override
|
||||
protected LDAPServers ldapServers(Settings settings) {
|
||||
String[] ldapUrls = settings.getAsArray(URLS_SETTING, new String[] { "ldap://" + domainName + ":389" });
|
||||
LDAPServers servers = new LDAPServers(ldapUrls);
|
||||
LDAPConnectionOptions options = connectionOptions(settings);
|
||||
SocketFactory socketFactory;
|
||||
if (servers.ssl()) {
|
||||
socketFactory = clientSSLService.sslSocketFactory();
|
||||
if (settings.getAsBoolean(HOSTNAME_VERIFICATION_SETTING, true)) {
|
||||
logger.debug("using encryption for LDAP connections with hostname verification");
|
||||
} else {
|
||||
logger.debug("using encryption for LDAP connections without hostname verification");
|
||||
}
|
||||
} else {
|
||||
socketFactory = null;
|
||||
}
|
||||
FailoverServerSet serverSet = new FailoverServerSet(servers.addresses(), servers.ports(), socketFactory, options);
|
||||
serverSet.setReOrderOnFailover(true);
|
||||
return serverSet;
|
||||
return new LDAPServers(ldapUrls);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -103,7 +83,7 @@ public class ActiveDirectorySessionFactory extends SessionFactory {
|
|||
LDAPConnection connection;
|
||||
|
||||
try {
|
||||
connection = ldapServerSet.getConnection();
|
||||
connection = serverSet.getConnection();
|
||||
} catch (LDAPException e) {
|
||||
throw new IOException("failed to connect to any active directory servers", e);
|
||||
}
|
||||
|
|
|
@ -5,11 +5,8 @@
|
|||
*/
|
||||
package org.elasticsearch.shield.authc.ldap;
|
||||
|
||||
import com.unboundid.ldap.sdk.FailoverServerSet;
|
||||
import com.unboundid.ldap.sdk.LDAPConnection;
|
||||
import com.unboundid.ldap.sdk.LDAPConnectionOptions;
|
||||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import com.unboundid.ldap.sdk.ServerSet;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.shield.authc.RealmConfig;
|
||||
import org.elasticsearch.shield.authc.ldap.support.LdapSession;
|
||||
|
@ -19,7 +16,6 @@ import org.elasticsearch.shield.authc.support.SecuredString;
|
|||
import org.elasticsearch.shield.ssl.ClientSSLService;
|
||||
import org.elasticsearch.shield.support.Exceptions;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import java.io.IOException;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Locale;
|
||||
|
@ -38,43 +34,17 @@ public class LdapSessionFactory extends SessionFactory {
|
|||
|
||||
private final String[] userDnTemplates;
|
||||
private final GroupsResolver groupResolver;
|
||||
private final ServerSet ldapServerSet;
|
||||
|
||||
public LdapSessionFactory(RealmConfig config, ClientSSLService sslService) {
|
||||
super(config);
|
||||
super(config, sslService);
|
||||
Settings settings = config.settings();
|
||||
userDnTemplates = settings.getAsArray(USER_DN_TEMPLATES_SETTING);
|
||||
if (userDnTemplates == null) {
|
||||
throw new IllegalArgumentException("missing required LDAP setting [" + USER_DN_TEMPLATES_SETTING + "]");
|
||||
}
|
||||
this.ldapServerSet = serverSet(config.settings(), sslService);
|
||||
groupResolver = groupResolver(settings);
|
||||
}
|
||||
|
||||
ServerSet serverSet(Settings settings, ClientSSLService clientSSLService) {
|
||||
// Parse LDAP urls
|
||||
String[] ldapUrls = settings.getAsArray(URLS_SETTING);
|
||||
if (ldapUrls == null || ldapUrls.length == 0) {
|
||||
throw new IllegalArgumentException("missing required LDAP setting [" + URLS_SETTING + "]");
|
||||
}
|
||||
LDAPServers servers = new LDAPServers(ldapUrls);
|
||||
LDAPConnectionOptions options = connectionOptions(settings);
|
||||
SocketFactory socketFactory;
|
||||
if (servers.ssl()) {
|
||||
socketFactory = clientSSLService.sslSocketFactory();
|
||||
if (settings.getAsBoolean(HOSTNAME_VERIFICATION_SETTING, true)) {
|
||||
logger.debug("using encryption for LDAP connections with hostname verification");
|
||||
} else {
|
||||
logger.debug("using encryption for LDAP connections without hostname verification");
|
||||
}
|
||||
} else {
|
||||
socketFactory = null;
|
||||
}
|
||||
FailoverServerSet serverSet = new FailoverServerSet(servers.addresses(), servers.ports(), socketFactory, options);
|
||||
serverSet.setReOrderOnFailover(true);
|
||||
return serverSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* This iterates through the configured user templates attempting to open. If all attempts fail, the last exception
|
||||
* is kept as the cause of the thrown exception
|
||||
|
@ -87,7 +57,7 @@ public class LdapSessionFactory extends SessionFactory {
|
|||
LDAPConnection connection;
|
||||
|
||||
try {
|
||||
connection = ldapServerSet.getConnection();
|
||||
connection = serverSet.getConnection();
|
||||
} catch (LDAPException e) {
|
||||
throw new IOException("failed to connect to any LDAP servers", e);
|
||||
}
|
||||
|
|
|
@ -5,10 +5,8 @@
|
|||
*/
|
||||
package org.elasticsearch.shield.authc.ldap;
|
||||
|
||||
import com.unboundid.ldap.sdk.FailoverServerSet;
|
||||
import com.unboundid.ldap.sdk.GetEntryLDAPConnectionPoolHealthCheck;
|
||||
import com.unboundid.ldap.sdk.LDAPConnection;
|
||||
import com.unboundid.ldap.sdk.LDAPConnectionOptions;
|
||||
import com.unboundid.ldap.sdk.LDAPConnectionPool;
|
||||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import com.unboundid.ldap.sdk.SearchRequest;
|
||||
|
@ -29,7 +27,6 @@ import org.elasticsearch.shield.authc.support.SecuredString;
|
|||
import org.elasticsearch.shield.ssl.ClientSSLService;
|
||||
import org.elasticsearch.shield.support.Exceptions;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
|
@ -48,12 +45,11 @@ public class LdapUserSearchSessionFactory extends SessionFactory {
|
|||
private final String userSearchBaseDn;
|
||||
private final LdapSearchScope scope;
|
||||
private final String userAttribute;
|
||||
private final ServerSet serverSet;
|
||||
|
||||
private LDAPConnectionPool connectionPool;
|
||||
|
||||
public LdapUserSearchSessionFactory(RealmConfig config, ClientSSLService sslService) {
|
||||
super(config);
|
||||
super(config, sslService);
|
||||
Settings settings = config.settings();
|
||||
userSearchBaseDn = settings.get("user_search.base_dn");
|
||||
if (userSearchBaseDn == null) {
|
||||
|
@ -61,7 +57,6 @@ public class LdapUserSearchSessionFactory extends SessionFactory {
|
|||
}
|
||||
scope = LdapSearchScope.resolve(settings.get("user_search.scope"), LdapSearchScope.SUB_TREE);
|
||||
userAttribute = settings.get("user_search.attribute", DEFAULT_USERNAME_ATTRIBUTE);
|
||||
serverSet = serverSet(settings, sslService);
|
||||
connectionPool = createConnectionPool(config, serverSet, timeout, logger);
|
||||
groupResolver = groupResolver(settings);
|
||||
}
|
||||
|
@ -126,30 +121,6 @@ public class LdapUserSearchSessionFactory extends SessionFactory {
|
|||
return request;
|
||||
}
|
||||
|
||||
ServerSet serverSet(Settings settings, ClientSSLService clientSSLService) {
|
||||
// Parse LDAP urls
|
||||
String[] ldapUrls = settings.getAsArray(URLS_SETTING);
|
||||
if (ldapUrls == null || ldapUrls.length == 0) {
|
||||
throw new IllegalArgumentException("missing required LDAP setting [" + URLS_SETTING + "]");
|
||||
}
|
||||
LDAPServers servers = new LDAPServers(ldapUrls);
|
||||
LDAPConnectionOptions options = connectionOptions(settings);
|
||||
SocketFactory socketFactory;
|
||||
if (servers.ssl()) {
|
||||
socketFactory = clientSSLService.sslSocketFactory();
|
||||
if (settings.getAsBoolean(HOSTNAME_VERIFICATION_SETTING, true)) {
|
||||
logger.debug("using encryption for LDAP connections with hostname verification");
|
||||
} else {
|
||||
logger.debug("using encryption for LDAP connections without hostname verification");
|
||||
}
|
||||
} else {
|
||||
socketFactory = null;
|
||||
}
|
||||
FailoverServerSet serverSet = new FailoverServerSet(servers.addresses(), servers.ports(), socketFactory, options);
|
||||
serverSet.setReOrderOnFailover(true);
|
||||
return serverSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LdapSession session(String user, SecuredString password) throws Exception {
|
||||
try {
|
||||
|
|
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authc.ldap.support;
|
||||
|
||||
import com.unboundid.ldap.sdk.FailoverServerSet;
|
||||
import com.unboundid.ldap.sdk.LDAPConnectionOptions;
|
||||
import com.unboundid.ldap.sdk.RoundRobinDNSServerSet;
|
||||
import com.unboundid.ldap.sdk.RoundRobinServerSet;
|
||||
import com.unboundid.ldap.sdk.ServerSet;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Enumeration representing the various supported {@link ServerSet} types that can be used with out built in realms.
|
||||
*/
|
||||
public enum LdapLoadBalancing {
|
||||
|
||||
FAILOVER() {
|
||||
@Override
|
||||
ServerSet buildServerSet(String[] addresses, int[] ports, Settings settings, @Nullable SocketFactory socketFactory, @Nullable LDAPConnectionOptions options) {
|
||||
FailoverServerSet serverSet = new FailoverServerSet(addresses, ports, socketFactory, options);
|
||||
serverSet.setReOrderOnFailover(true);
|
||||
return serverSet;
|
||||
}
|
||||
},
|
||||
|
||||
ROUND_ROBIN() {
|
||||
@Override
|
||||
ServerSet buildServerSet(String[] addresses, int[] ports, Settings settings, @Nullable SocketFactory socketFactory, @Nullable LDAPConnectionOptions options) {
|
||||
return new RoundRobinServerSet(addresses, ports, socketFactory, options);
|
||||
}
|
||||
},
|
||||
|
||||
DNS_ROUND_ROBIN() {
|
||||
@Override
|
||||
ServerSet buildServerSet(String[] addresses, int[] ports, Settings settings, @Nullable SocketFactory socketFactory, @Nullable LDAPConnectionOptions options) {
|
||||
if (addresses.length != 1) {
|
||||
throw new IllegalArgumentException(toString() + " can only be used with a single url");
|
||||
}
|
||||
if (InetAddresses.isInetAddress(addresses[0])) {
|
||||
throw new IllegalArgumentException(toString() + " can only be used with a DNS name");
|
||||
}
|
||||
TimeValue dnsTtl = settings.getAsTime("cache_ttl", TimeValue.timeValueHours(1L));
|
||||
return new RoundRobinDNSServerSet(addresses[0], ports[0],
|
||||
RoundRobinDNSServerSet.AddressSelectionMode.ROUND_ROBIN, dnsTtl.millis(), null, socketFactory, options);
|
||||
}
|
||||
},
|
||||
|
||||
DNS_FAILOVER() {
|
||||
@Override
|
||||
ServerSet buildServerSet(String[] addresses, int[] ports, Settings settings, @Nullable SocketFactory socketFactory, @Nullable LDAPConnectionOptions options) {
|
||||
if (addresses.length != 1) {
|
||||
throw new IllegalArgumentException(toString() + " can only be used with a single url");
|
||||
}
|
||||
if (InetAddresses.isInetAddress(addresses[0])) {
|
||||
throw new IllegalArgumentException(toString() + " can only be used with a DNS name");
|
||||
}
|
||||
TimeValue dnsTtl = settings.getAsTime("cache_ttl", TimeValue.timeValueHours(1L));
|
||||
return new RoundRobinDNSServerSet(addresses[0], ports[0],
|
||||
RoundRobinDNSServerSet.AddressSelectionMode.FAILOVER, dnsTtl.millis(), null, socketFactory, options);
|
||||
}
|
||||
};
|
||||
|
||||
public static final String LOAD_BALANCE_SETTINGS = "load_balance";
|
||||
public static final String LOAD_BALANCE_TYPE_SETTING = "type";
|
||||
public static final String LOAD_BALANCE_TYPE_DEFAULT = LdapLoadBalancing.FAILOVER.toString();
|
||||
|
||||
abstract ServerSet buildServerSet(String[] addresses, int[] ports, Settings settings, @Nullable SocketFactory socketFactory, @Nullable LDAPConnectionOptions options);
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ENGLISH);
|
||||
}
|
||||
|
||||
public static ServerSet serverSet(String[] addresses, int[] ports, Settings settings, @Nullable SocketFactory socketFactory, @Nullable LDAPConnectionOptions options) {
|
||||
Settings loadBalanceSettings = settings.getAsSettings(LOAD_BALANCE_SETTINGS);
|
||||
String type = loadBalanceSettings.get(LOAD_BALANCE_TYPE_SETTING, LOAD_BALANCE_TYPE_DEFAULT);
|
||||
switch (type.toLowerCase(Locale.ENGLISH)) {
|
||||
case "failover":
|
||||
return FAILOVER.buildServerSet(addresses, ports, loadBalanceSettings, socketFactory, options);
|
||||
case "dns_failover":
|
||||
return DNS_FAILOVER.buildServerSet(addresses, ports, loadBalanceSettings, socketFactory, options);
|
||||
case "round_robin":
|
||||
return ROUND_ROBIN.buildServerSet(addresses, ports, loadBalanceSettings, socketFactory, options);
|
||||
case "dns_round_robin":
|
||||
return DNS_ROUND_ROBIN.buildServerSet(addresses, ports, loadBalanceSettings, socketFactory, options);
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown server set type [" + type + "]. value must be one of " + Arrays.toString(LdapLoadBalancing.values()));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.shield.authc.ldap.support;
|
|||
import com.unboundid.ldap.sdk.LDAPConnectionOptions;
|
||||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import com.unboundid.ldap.sdk.LDAPURL;
|
||||
import com.unboundid.ldap.sdk.ServerSet;
|
||||
import com.unboundid.util.ssl.HostNameSSLSocketVerifier;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
|
@ -15,7 +16,9 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.shield.authc.RealmConfig;
|
||||
import org.elasticsearch.shield.authc.support.SecuredString;
|
||||
import org.elasticsearch.shield.ssl.ClientSSLService;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
|
@ -48,8 +51,9 @@ public abstract class SessionFactory {
|
|||
protected final ESLogger connectionLogger;
|
||||
protected final RealmConfig config;
|
||||
protected final TimeValue timeout;
|
||||
protected final ServerSet serverSet;
|
||||
|
||||
protected SessionFactory(RealmConfig config) {
|
||||
protected SessionFactory(RealmConfig config, ClientSSLService sslService) {
|
||||
this.config = config;
|
||||
this.logger = config.logger(getClass());
|
||||
this.connectionLogger = config.logger(getClass());
|
||||
|
@ -59,6 +63,7 @@ public abstract class SessionFactory {
|
|||
searchTimeout = TimeValue.timeValueSeconds(1L);
|
||||
}
|
||||
this.timeout = searchTimeout;
|
||||
this.serverSet = serverSet(config.settings(), sslService, ldapServers(config.settings()));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -104,6 +109,33 @@ public abstract class SessionFactory {
|
|||
return options;
|
||||
}
|
||||
|
||||
protected LDAPServers ldapServers(Settings settings) {
|
||||
// Parse LDAP urls
|
||||
String[] ldapUrls = settings.getAsArray(URLS_SETTING);
|
||||
if (ldapUrls == null || ldapUrls.length == 0) {
|
||||
throw new IllegalArgumentException("missing required LDAP setting [" + URLS_SETTING + "]");
|
||||
}
|
||||
return new LDAPServers(ldapUrls);
|
||||
}
|
||||
|
||||
protected ServerSet serverSet(Settings settings, ClientSSLService clientSSLService, LDAPServers ldapServers) {
|
||||
SocketFactory socketFactory = null;
|
||||
if (ldapServers.ssl()) {
|
||||
socketFactory = clientSSLService.sslSocketFactory();
|
||||
if (settings.getAsBoolean(HOSTNAME_VERIFICATION_SETTING, true)) {
|
||||
logger.debug("using encryption for LDAP connections with hostname verification");
|
||||
} else {
|
||||
logger.debug("using encryption for LDAP connections without hostname verification");
|
||||
}
|
||||
}
|
||||
return LdapLoadBalancing.serverSet(ldapServers.addresses(), ldapServers.ports(), settings, socketFactory, connectionOptions(settings));
|
||||
}
|
||||
|
||||
// package private to use for testing
|
||||
ServerSet getServerSet() {
|
||||
return serverSet;
|
||||
}
|
||||
|
||||
public static class LDAPServers {
|
||||
|
||||
private final String[] addresses;
|
||||
|
|
|
@ -32,11 +32,13 @@ import org.elasticsearch.index.engine.EngineException;
|
|||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.percolator.PercolatorFieldMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.ShardUtils;
|
||||
import org.elasticsearch.percolator.PercolatorService;
|
||||
import org.elasticsearch.shield.authz.InternalAuthorizationService;
|
||||
import org.elasticsearch.shield.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader;
|
||||
import org.elasticsearch.shield.license.ShieldLicenseState;
|
||||
|
@ -133,6 +135,7 @@ public class ShieldIndexSearcherWrapper extends IndexSearcherWrapper {
|
|||
allowedFields.addAll(mapperService.simpleMatchToIndexNames(field));
|
||||
}
|
||||
resolveParentChildJoinFields(allowedFields);
|
||||
resolvePercolatorFields(allowedFields);
|
||||
reader = FieldSubsetReader.wrap(reader, allowedFields);
|
||||
}
|
||||
|
||||
|
@ -225,6 +228,13 @@ public class ShieldIndexSearcherWrapper extends IndexSearcherWrapper {
|
|||
}
|
||||
}
|
||||
|
||||
private void resolvePercolatorFields(Set<String> allowedFields) {
|
||||
if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) {
|
||||
allowedFields.add(PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME);
|
||||
allowedFields.add(PercolatorFieldMapper.UNKNOWN_QUERY_FULL_FIELD_NAME);
|
||||
}
|
||||
}
|
||||
|
||||
static void intersectScorerAndRoleBits(Scorer scorer, SparseFixedBitSet roleBits, LeafCollector collector, Bits acceptDocs) throws IOException {
|
||||
// ConjunctionDISI uses the DocIdSetIterator#cost() to order the iterators, so if roleBits has the lowest cardinality it should be used first:
|
||||
DocIdSetIterator iterator = ConjunctionDISI.intersectIterators(Arrays.asList(new BitSetIterator(roleBits, roleBits.approximateCardinality()), scorer.iterator()));
|
||||
|
|
|
@ -28,7 +28,6 @@ public class ShieldAuthcClient {
|
|||
* Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively
|
||||
* select the realms (by their unique names) and/or users (by their usernames) that should be evicted.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public ClearRealmCacheRequestBuilder prepareClearRealmCache() {
|
||||
return new ClearRealmCacheRequestBuilder(client);
|
||||
}
|
||||
|
@ -37,7 +36,6 @@ public class ShieldAuthcClient {
|
|||
* Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively
|
||||
* select the realms (by their unique names) and/or users (by their usernames) that should be evicted.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public void clearRealmCache(ClearRealmCacheRequest request, ActionListener<ClearRealmCacheResponse> listener) {
|
||||
client.execute(ClearRealmCacheAction.INSTANCE, request, listener);
|
||||
}
|
||||
|
@ -46,7 +44,6 @@ public class ShieldAuthcClient {
|
|||
* Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively
|
||||
* select the realms (by their unique names) and/or users (by their usernames) that should be evicted.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public ActionFuture<ClearRealmCacheResponse> clearRealmCache(ClearRealmCacheRequest request) {
|
||||
return client.execute(ClearRealmCacheAction.INSTANCE, request);
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.elasticsearch.shield.action.ShieldActionMapper;
|
|||
import org.elasticsearch.shield.authc.AuthenticationService;
|
||||
import org.elasticsearch.shield.authc.pki.PkiRealm;
|
||||
import org.elasticsearch.shield.authz.AuthorizationService;
|
||||
import org.elasticsearch.transport.DelegatingTransportChannel;
|
||||
import org.elasticsearch.transport.TransportChannel;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.transport.netty.NettyTransportChannel;
|
||||
|
@ -71,8 +72,13 @@ public interface ServerTransportFilter {
|
|||
*/
|
||||
String shieldAction = actionMapper.action(action, request);
|
||||
|
||||
if (extractClientCert && (transportChannel instanceof NettyTransportChannel)) {
|
||||
Channel channel = ((NettyTransportChannel)transportChannel).getChannel();
|
||||
TransportChannel unwrappedChannel = transportChannel;
|
||||
while (unwrappedChannel instanceof DelegatingTransportChannel) {
|
||||
unwrappedChannel = ((DelegatingTransportChannel) unwrappedChannel).getChannel();
|
||||
}
|
||||
|
||||
if (extractClientCert && (unwrappedChannel instanceof NettyTransportChannel)) {
|
||||
Channel channel = ((NettyTransportChannel)unwrappedChannel).getChannel();
|
||||
SslHandler sslHandler = channel.getPipeline().get(SslHandler.class);
|
||||
assert sslHandler != null;
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.shield.authz.AuthorizationService;
|
|||
import org.elasticsearch.shield.authz.accesscontrol.RequestContext;
|
||||
import org.elasticsearch.shield.license.ShieldLicenseState;
|
||||
import org.elasticsearch.shield.transport.netty.ShieldNettyTransport;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportChannel;
|
||||
|
@ -143,8 +144,7 @@ public class ShieldServerTransportService extends TransportService {
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void messageReceived(T request, TransportChannel channel) throws Exception {
|
||||
public void messageReceived(T request, TransportChannel channel, Task task) throws Exception {
|
||||
try {
|
||||
if (licenseState.securityEnabled()) {
|
||||
String profile = channel.getProfileName();
|
||||
|
@ -163,13 +163,18 @@ public class ShieldServerTransportService extends TransportService {
|
|||
}
|
||||
RequestContext context = new RequestContext(request);
|
||||
RequestContext.setCurrent(context);
|
||||
handler.messageReceived(request, channel);
|
||||
handler.messageReceived(request, channel, task);
|
||||
} catch (Throwable t) {
|
||||
channel.sendResponse(t);
|
||||
} finally {
|
||||
RequestContext.removeCurrent();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void messageReceived(T request, TransportChannel channel) throws Exception {
|
||||
throw new UnsupportedOperationException("task parameter is required for this operation");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.elasticsearch.shield.authc.AuthenticationService;
|
|||
import org.elasticsearch.shield.authz.AuthorizationService;
|
||||
import org.elasticsearch.shield.crypto.CryptoService;
|
||||
import org.elasticsearch.shield.license.ShieldLicenseState;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -62,12 +63,13 @@ public class ShieldActionFilterTests extends ESTestCase {
|
|||
ActionRequest request = mock(ActionRequest.class);
|
||||
ActionListener listener = mock(ActionListener.class);
|
||||
ActionFilterChain chain = mock(ActionFilterChain.class);
|
||||
Task task = mock(Task.class);
|
||||
User user = new User.Simple("username", new String[] { "r1", "r2" });
|
||||
when(authcService.authenticate("_action", request, User.SYSTEM)).thenReturn(user);
|
||||
doReturn(request).when(spy(filter)).unsign(user, "_action", request);
|
||||
filter.apply("_action", request, listener, chain);
|
||||
filter.apply(task, "_action", request, listener, chain);
|
||||
verify(authzService).authorize(user, "_action", request);
|
||||
verify(chain).proceed(eq("_action"), eq(request), isA(ShieldActionFilter.SigningListener.class));
|
||||
verify(chain).proceed(eq(task), eq("_action"), eq(request), isA(ShieldActionFilter.SigningListener.class));
|
||||
}
|
||||
|
||||
public void testActionProcessException() throws Exception {
|
||||
|
@ -75,10 +77,11 @@ public class ShieldActionFilterTests extends ESTestCase {
|
|||
ActionListener listener = mock(ActionListener.class);
|
||||
ActionFilterChain chain = mock(ActionFilterChain.class);
|
||||
RuntimeException exception = new RuntimeException("process-error");
|
||||
Task task = mock(Task.class);
|
||||
User user = new User.Simple("username", new String[] { "r1", "r2" });
|
||||
when(authcService.authenticate("_action", request, User.SYSTEM)).thenReturn(user);
|
||||
doThrow(exception).when(authzService).authorize(user, "_action", request);
|
||||
filter.apply("_action", request, listener, chain);
|
||||
filter.apply(task, "_action", request, listener, chain);
|
||||
verify(listener).onFailure(exception);
|
||||
verifyNoMoreInteractions(chain);
|
||||
}
|
||||
|
@ -88,13 +91,14 @@ public class ShieldActionFilterTests extends ESTestCase {
|
|||
ActionListener listener = mock(ActionListener.class);
|
||||
ActionFilterChain chain = mock(ActionFilterChain.class);
|
||||
User user = mock(User.class);
|
||||
Task task = mock(Task.class);
|
||||
when(authcService.authenticate("_action", request, User.SYSTEM)).thenReturn(user);
|
||||
when(cryptoService.signed("signed_scroll_id")).thenReturn(true);
|
||||
when(cryptoService.unsignAndVerify("signed_scroll_id")).thenReturn("scroll_id");
|
||||
filter.apply("_action", request, listener, chain);
|
||||
filter.apply(task, "_action", request, listener, chain);
|
||||
assertThat(request.scrollId(), equalTo("scroll_id"));
|
||||
verify(authzService).authorize(user, "_action", request);
|
||||
verify(chain).proceed(eq("_action"), eq(request), isA(ShieldActionFilter.SigningListener.class));
|
||||
verify(chain).proceed(eq(task), eq("_action"), eq(request), isA(ShieldActionFilter.SigningListener.class));
|
||||
}
|
||||
|
||||
public void testActionSignatureError() throws Exception {
|
||||
|
@ -103,10 +107,11 @@ public class ShieldActionFilterTests extends ESTestCase {
|
|||
ActionFilterChain chain = mock(ActionFilterChain.class);
|
||||
IllegalArgumentException sigException = new IllegalArgumentException("bad bad boy");
|
||||
User user = mock(User.class);
|
||||
Task task = mock(Task.class);
|
||||
when(authcService.authenticate("_action", request, User.SYSTEM)).thenReturn(user);
|
||||
when(cryptoService.signed("scroll_id")).thenReturn(true);
|
||||
doThrow(sigException).when(cryptoService).unsignAndVerify("scroll_id");
|
||||
filter.apply("_action", request, listener, chain);
|
||||
filter.apply(task, "_action", request, listener, chain);
|
||||
verify(listener).onFailure(isA(ElasticsearchSecurityException.class));
|
||||
verify(auditTrail).tamperedRequest(user, "_action", request);
|
||||
verifyNoMoreInteractions(chain);
|
||||
|
@ -116,11 +121,12 @@ public class ShieldActionFilterTests extends ESTestCase {
|
|||
ActionRequest request = mock(ActionRequest.class);
|
||||
ActionListener listener = mock(ActionListener.class);
|
||||
ActionFilterChain chain = mock(ActionFilterChain.class);
|
||||
Task task = mock(Task.class);
|
||||
when(shieldLicenseState.securityEnabled()).thenReturn(false);
|
||||
filter.apply("_action", request, listener, chain);
|
||||
filter.apply(task, "_action", request, listener, chain);
|
||||
verifyZeroInteractions(authcService);
|
||||
verifyZeroInteractions(authzService);
|
||||
verify(chain).proceed(eq("_action"), eq(request), eq(listener));
|
||||
verify(chain).proceed(eq(task), eq("_action"), eq(request), eq(listener));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -11,6 +11,8 @@ import com.unboundid.ldap.sdk.Attribute;
|
|||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import com.unboundid.ldap.sdk.LDAPURL;
|
||||
import com.unboundid.ldap.sdk.schema.Schema;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.shield.User;
|
||||
import org.elasticsearch.shield.authc.RealmConfig;
|
||||
|
@ -24,6 +26,10 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.shield.authc.ldap.support.SessionFactory.HOSTNAME_VERIFICATION_SETTING;
|
||||
import static org.elasticsearch.shield.authc.ldap.support.SessionFactory.URLS_SETTING;
|
||||
|
@ -53,12 +59,18 @@ import static org.mockito.Mockito.verify;
|
|||
public class ActiveDirectoryRealmTests extends ESTestCase {
|
||||
private static final String PASSWORD = "password";
|
||||
|
||||
private InMemoryDirectoryServer directoryServer;
|
||||
protected static int numberOfLdapServers;
|
||||
protected InMemoryDirectoryServer[] directoryServers;
|
||||
|
||||
private ResourceWatcherService resourceWatcherService;
|
||||
private ThreadPool threadPool;
|
||||
private Settings globalSettings;
|
||||
|
||||
@BeforeClass
|
||||
public static void setNumberOfLdapServers() {
|
||||
numberOfLdapServers = randomIntBetween(1, 4);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void start() throws Exception {
|
||||
InMemoryDirectoryServerConfig config = new InMemoryDirectoryServerConfig("dc=ad,dc=test,dc=elasticsearch,dc=com");
|
||||
|
@ -69,10 +81,14 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
|||
config.addAdditionalBindCredentials("CN=ironman@ad.test.elasticsearch.com", PASSWORD);
|
||||
config.addAdditionalBindCredentials("CN=Thor@ad.test.elasticsearch.com", PASSWORD);
|
||||
|
||||
directoryServer = new InMemoryDirectoryServer(config);
|
||||
directoryServer.add("dc=ad,dc=test,dc=elasticsearch,dc=com", new Attribute("dc", "UnboundID"), new Attribute("objectClass", "top", "domain", "extensibleObject"));
|
||||
directoryServer.importFromLDIF(false, getDataPath("ad.ldif").toString());
|
||||
directoryServer.startListening();
|
||||
directoryServers = new InMemoryDirectoryServer[numberOfLdapServers];
|
||||
for (int i = 0; i < numberOfLdapServers; i++) {
|
||||
InMemoryDirectoryServer directoryServer = new InMemoryDirectoryServer(config);
|
||||
directoryServer.add("dc=ad,dc=test,dc=elasticsearch,dc=com", new Attribute("dc", "UnboundID"), new Attribute("objectClass", "top", "domain", "extensibleObject"));
|
||||
directoryServer.importFromLDIF(false, getDataPath("ad.ldif").toString());
|
||||
directoryServer.startListening();
|
||||
directoryServers[i] = directoryServer;
|
||||
}
|
||||
threadPool = new ThreadPool("active directory realm tests");
|
||||
resourceWatcherService = new ResourceWatcherService(Settings.EMPTY, threadPool);
|
||||
globalSettings = Settings.builder().put("path.home", createTempDir()).build();
|
||||
|
@ -82,7 +98,9 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
|||
public void stop() throws InterruptedException {
|
||||
resourceWatcherService.stop();
|
||||
terminate(threadPool);
|
||||
directoryServer.shutDown(true);
|
||||
for (int i = 0; i < numberOfLdapServers; i++) {
|
||||
directoryServers[i].shutDown(true);
|
||||
}
|
||||
}
|
||||
|
||||
public void testAuthenticateUserPrincipleName() throws Exception {
|
||||
|
@ -110,9 +128,13 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
|||
assertThat(user.roles(), arrayContaining(containsString("Avengers")));
|
||||
}
|
||||
|
||||
private String ldapUrl() throws LDAPException {
|
||||
LDAPURL url = new LDAPURL("ldap", "localhost", directoryServer.getListenPort(), null, null, null, null);
|
||||
return url.toString();
|
||||
protected String[] ldapUrls() throws LDAPException {
|
||||
List<String> urls = new ArrayList<>(numberOfLdapServers);
|
||||
for (int i = 0; i < numberOfLdapServers; i++) {
|
||||
LDAPURL url = new LDAPURL("ldap", "localhost", directoryServers[i].getListenPort(), null, null, null, null);
|
||||
urls.add(url.toString());
|
||||
}
|
||||
return urls.toArray(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
public void testAuthenticateCachesSuccesfulAuthentications() throws Exception {
|
||||
|
@ -206,7 +228,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
|||
|
||||
private Settings settings(Settings extraSettings) throws Exception {
|
||||
return Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrl())
|
||||
.putArray(URLS_SETTING, ldapUrls())
|
||||
.put(ActiveDirectorySessionFactory.AD_DOMAIN_NAME_SETTING, "ad.test.elasticsearch.com")
|
||||
.put(DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING, true)
|
||||
.put(HOSTNAME_VERIFICATION_SETTING, false)
|
||||
|
|
|
@ -248,7 +248,7 @@ public class ActiveDirectorySessionFactoryTests extends ESTestCase {
|
|||
@SuppressWarnings("unchecked")
|
||||
public void testStandardLdapWithAttributeGroups() throws Exception {
|
||||
String userTemplate = "CN={0},CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com";
|
||||
Settings settings = LdapTestCase.buildLdapSettings(AD_LDAP_URL, userTemplate, false);
|
||||
Settings settings = LdapTestCase.buildLdapSettings(new String[] { AD_LDAP_URL }, userTemplate, false);
|
||||
RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings);
|
||||
LdapSessionFactory sessionFactory = new LdapSessionFactory(config, clientSSLService);
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
public void testAuthenticateSubTreeGroupSearch() throws Exception {
|
||||
String groupSearchBase = "o=sevenSeas";
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = buildLdapSettings(ldapUrl(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE);
|
||||
Settings settings = buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE);
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings);
|
||||
LdapSessionFactory ldapFactory = new LdapSessionFactory(config, null);
|
||||
LdapRealm ldap = new LdapRealm(config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService));
|
||||
|
@ -73,7 +73,7 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
String groupSearchBase = "ou=crews,ou=groups,o=sevenSeas";
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = Settings.builder()
|
||||
.put(buildLdapSettings(ldapUrl(), userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL))
|
||||
.put(buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL))
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings);
|
||||
|
||||
|
@ -89,7 +89,7 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
String groupSearchBase = "o=sevenSeas";
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = Settings.builder()
|
||||
.put(buildLdapSettings(ldapUrl(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings);
|
||||
|
||||
|
@ -107,7 +107,7 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
String groupSearchBase = "o=sevenSeas";
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = Settings.builder()
|
||||
.put(buildLdapSettings(ldapUrl(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings);
|
||||
|
||||
|
@ -133,7 +133,7 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
String groupSearchBase = "o=sevenSeas";
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = Settings.builder()
|
||||
.put(buildLdapSettings(ldapUrl(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(LdapRealm.CACHE_TTL_SETTING, -1)
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings);
|
||||
|
@ -152,7 +152,7 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
String groupSearchBase = "o=sevenSeas";
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrl())
|
||||
.putArray(URLS_SETTING, ldapUrls())
|
||||
.putArray(USER_DN_TEMPLATES_SETTING, userTemplate)
|
||||
.put("group_search.base_dn", groupSearchBase)
|
||||
.put("group_search.scope", LdapSearchScope.SUB_TREE)
|
||||
|
@ -166,7 +166,7 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
public void testLdapRealmSelectsLdapUserSearchSessionFactory() throws Exception {
|
||||
String groupSearchBase = "o=sevenSeas";
|
||||
Settings settings = Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrl())
|
||||
.putArray(URLS_SETTING, ldapUrls())
|
||||
.put("user_search.base_dn", "")
|
||||
.put("bind_dn", "cn=Thomas Masterman Hardy,ou=people,o=sevenSeas")
|
||||
.put("bind_password", PASSWORD)
|
||||
|
@ -185,7 +185,7 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
|
||||
public void testLdapRealmThrowsExceptionForUserTemplateAndSearchSettings() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrl())
|
||||
.putArray(URLS_SETTING, ldapUrls())
|
||||
.putArray(USER_DN_TEMPLATES_SETTING, "cn=foo")
|
||||
.put("user_search.base_dn", "cn=bar")
|
||||
.put("group_search.base_dn", "")
|
||||
|
@ -205,7 +205,7 @@ public class LdapRealmTests extends LdapTestCase {
|
|||
String groupSearchBase = "o=sevenSeas";
|
||||
String userTemplate = VALID_USER_TEMPLATE;
|
||||
Settings settings = Settings.builder()
|
||||
.put(buildLdapSettings(ldapUrl(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(DnRoleMapper.ROLE_MAPPING_FILE_SETTING, getDataPath("/org/elasticsearch/shield/authc/support/role_mapping.yml"))
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("test-ldap-realm-userdn", settings, globalSettings);
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
*/
|
||||
package org.elasticsearch.shield.authc.ldap;
|
||||
|
||||
import com.unboundid.ldap.listener.InMemoryDirectoryServer;
|
||||
import com.unboundid.ldap.sdk.LDAPURL;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.shield.authc.RealmConfig;
|
||||
|
@ -35,11 +37,11 @@ public class LdapSessionFactoryTests extends LdapTestCase {
|
|||
}
|
||||
|
||||
public void testBindWithReadTimeout() throws Exception {
|
||||
String ldapUrl = ldapUrl();
|
||||
InMemoryDirectoryServer ldapServer = randomFrom(ldapServers);
|
||||
String ldapUrl = new LDAPURL("ldap", "localhost", ldapServer.getListenPort(), null, null, null, null).toString();
|
||||
String groupSearchBase = "o=sevenSeas";
|
||||
String[] userTemplates = new String[] {
|
||||
"cn={0},ou=people,o=sevenSeas",
|
||||
};
|
||||
String userTemplates = "cn={0},ou=people,o=sevenSeas";
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(buildLdapSettings(ldapUrl, userTemplates, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(SessionFactory.TIMEOUT_TCP_READ_SETTING, "1ms") //1 millisecond
|
||||
|
@ -68,9 +70,8 @@ public class LdapSessionFactoryTests extends LdapTestCase {
|
|||
// Local sockets connect too fast...
|
||||
String ldapUrl = "ldap://54.200.235.244:389";
|
||||
String groupSearchBase = "o=sevenSeas";
|
||||
String[] userTemplates = new String[] {
|
||||
"cn={0},ou=people,o=sevenSeas",
|
||||
};
|
||||
String userTemplates = "cn={0},ou=people,o=sevenSeas";
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(buildLdapSettings(ldapUrl, userTemplates, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(SessionFactory.TIMEOUT_TCP_CONNECTION_SETTING, "1ms") //1 millisecond
|
||||
|
@ -86,7 +87,7 @@ public class LdapSessionFactoryTests extends LdapTestCase {
|
|||
fail("expected connection timeout error here");
|
||||
} catch (Throwable t) {
|
||||
long time = System.currentTimeMillis() - start;
|
||||
assertThat(time, lessThan(10000l));
|
||||
assertThat(time, lessThan(10000L));
|
||||
assertThat(t, instanceOf(IOException.class));
|
||||
assertThat(t.getCause().getCause().getMessage(), containsString("within the configured timeout of"));
|
||||
}
|
||||
|
@ -99,7 +100,7 @@ public class LdapSessionFactoryTests extends LdapTestCase {
|
|||
"wrongname={0},ou=people,o=sevenSeas",
|
||||
"cn={0},ou=people,o=sevenSeas", //this last one should work
|
||||
};
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrl(), userTemplates, groupSearchBase, LdapSearchScope.SUB_TREE), globalSettings);
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplates, groupSearchBase, LdapSearchScope.SUB_TREE), globalSettings);
|
||||
|
||||
LdapSessionFactory sessionFactory = new LdapSessionFactory(config, null);
|
||||
|
||||
|
@ -119,7 +120,7 @@ public class LdapSessionFactoryTests extends LdapTestCase {
|
|||
"wrongname={0},ou=people,o=sevenSeas",
|
||||
"asdf={0},ou=people,o=sevenSeas", //none of these should work
|
||||
};
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrl(), userTemplates, groupSearchBase, LdapSearchScope.SUB_TREE), globalSettings);
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplates, groupSearchBase, LdapSearchScope.SUB_TREE), globalSettings);
|
||||
|
||||
LdapSessionFactory ldapFac = new LdapSessionFactory(config, null);
|
||||
|
||||
|
@ -135,7 +136,7 @@ public class LdapSessionFactoryTests extends LdapTestCase {
|
|||
public void testGroupLookupSubtree() throws Exception {
|
||||
String groupSearchBase = "o=sevenSeas";
|
||||
String userTemplate = "cn={0},ou=people,o=sevenSeas";
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrl(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE), globalSettings);
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE), globalSettings);
|
||||
|
||||
LdapSessionFactory ldapFac = new LdapSessionFactory(config, null);
|
||||
|
||||
|
@ -151,7 +152,7 @@ public class LdapSessionFactoryTests extends LdapTestCase {
|
|||
public void testGroupLookupOneLevel() throws Exception {
|
||||
String groupSearchBase = "ou=crews,ou=groups,o=sevenSeas";
|
||||
String userTemplate = "cn={0},ou=people,o=sevenSeas";
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrl(), userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL), globalSettings);
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL), globalSettings);
|
||||
|
||||
LdapSessionFactory ldapFac = new LdapSessionFactory(config, null);
|
||||
|
||||
|
@ -165,7 +166,7 @@ public class LdapSessionFactoryTests extends LdapTestCase {
|
|||
public void testGroupLookupBase() throws Exception {
|
||||
String groupSearchBase = "cn=HMS Lydia,ou=crews,ou=groups,o=sevenSeas";
|
||||
String userTemplate = "cn={0},ou=people,o=sevenSeas";
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrl(), userTemplate, groupSearchBase, LdapSearchScope.BASE), globalSettings);
|
||||
RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.BASE), globalSettings);
|
||||
|
||||
LdapSessionFactory ldapFac = new LdapSessionFactory(config, null);
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.junit.Before;
|
|||
|
||||
import java.nio.file.Path;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
@ -79,7 +79,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
|
||||
public void testSupportsUnauthenticatedSessions() throws Exception {
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, "", LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, "", LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", "")
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
|
@ -99,7 +99,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String userSearchBase = "o=sevenSeas";
|
||||
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
|
@ -133,7 +133,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String userSearchBase = "o=sevenSeas";
|
||||
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
|
@ -170,7 +170,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String userSearchBase = "cn=William Bush,ou=people,o=sevenSeas";
|
||||
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
|
@ -205,7 +205,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String userSearchBase = "o=sevenSeas";
|
||||
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
|
@ -242,7 +242,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String userSearchBase = "ou=people,o=sevenSeas";
|
||||
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
|
@ -277,7 +277,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String userSearchBase = "o=sevenSeas";
|
||||
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
|
@ -313,7 +313,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String userSearchBase = "o=sevenSeas";
|
||||
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
|
@ -346,7 +346,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com";
|
||||
String userSearchBase = "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com";
|
||||
Settings settings = settingsBuilder()
|
||||
.put(LdapTestCase.buildLdapSettings(ActiveDirectorySessionFactoryTests.AD_LDAP_URL, Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(LdapTestCase.buildLdapSettings(new String[] { ActiveDirectorySessionFactoryTests.AD_LDAP_URL }, Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "ironman@ad.test.elasticsearch.com")
|
||||
.put("bind_password", ActiveDirectorySessionFactoryTests.PASSWORD)
|
||||
|
@ -388,7 +388,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String groupSearchBase = "ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com";
|
||||
String userSearchBase = "ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com";
|
||||
RealmConfig config = new RealmConfig("oldap-test", settingsBuilder()
|
||||
.put(LdapTestCase.buildLdapSettings(OpenLdapTests.OPEN_LDAP_URL, Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.ONE_LEVEL))
|
||||
.put(LdapTestCase.buildLdapSettings(new String[] { OpenLdapTests.OPEN_LDAP_URL }, Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.ONE_LEVEL))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "uid=blackwidow,ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com")
|
||||
.put("bind_password", OpenLdapTests.PASSWORD)
|
||||
|
@ -419,13 +419,13 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String groupSearchBase = "o=sevenSeas";
|
||||
String userSearchBase = "o=sevenSeas";
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
.build(), globalSettings);
|
||||
|
||||
LDAPConnectionPool connectionPool = LdapUserSearchSessionFactory.createConnectionPool(config, new SingleServerSet("localhost", ldapServer.getListenPort()), TimeValue.timeValueSeconds(5), NoOpLogger.INSTANCE);
|
||||
LDAPConnectionPool connectionPool = LdapUserSearchSessionFactory.createConnectionPool(config, new SingleServerSet("localhost", randomFrom(ldapServers).getListenPort()), TimeValue.timeValueSeconds(5), NoOpLogger.INSTANCE);
|
||||
try {
|
||||
assertThat(connectionPool.getCurrentAvailableConnections(), is(LdapUserSearchSessionFactory.DEFAULT_CONNECTION_POOL_INITIAL_SIZE));
|
||||
assertThat(connectionPool.getMaximumAvailableConnections(), is(LdapUserSearchSessionFactory.DEFAULT_CONNECTION_POOL_SIZE));
|
||||
|
@ -442,7 +442,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String groupSearchBase = "o=sevenSeas";
|
||||
String userSearchBase = "o=sevenSeas";
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas")
|
||||
.put("bind_password", "pass")
|
||||
|
@ -451,7 +451,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
.put("user_search.pool.health_check.enabled", false)
|
||||
.build(), globalSettings);
|
||||
|
||||
LDAPConnectionPool connectionPool = LdapUserSearchSessionFactory.createConnectionPool(config, new SingleServerSet("localhost", ldapServer.getListenPort()), TimeValue.timeValueSeconds(5), NoOpLogger.INSTANCE);
|
||||
LDAPConnectionPool connectionPool = LdapUserSearchSessionFactory.createConnectionPool(config, new SingleServerSet("localhost", randomFrom(ldapServers).getListenPort()), TimeValue.timeValueSeconds(5), NoOpLogger.INSTANCE);
|
||||
try {
|
||||
assertThat(connectionPool.getCurrentAvailableConnections(), is(10));
|
||||
assertThat(connectionPool.getMaximumAvailableConnections(), is(12));
|
||||
|
@ -466,7 +466,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String groupSearchBase = "o=sevenSeas";
|
||||
String userSearchBase = "o=sevenSeas";
|
||||
RealmConfig config = new RealmConfig("ldap_realm", settingsBuilder()
|
||||
.put(buildLdapSettings(ldapUrl(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_password", "pass")
|
||||
.build(), globalSettings);
|
||||
|
@ -498,7 +498,8 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com";
|
||||
String userSearchBase = "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com";
|
||||
Settings ldapSettings = settingsBuilder()
|
||||
.put(LdapTestCase.buildLdapSettings("ldaps://elastic.co:636", Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put(LdapTestCase.buildLdapSettings(new String[] { "ldaps://elastic.co:636" }, Strings.EMPTY_ARRAY,
|
||||
groupSearchBase, LdapSearchScope.SUB_TREE))
|
||||
.put("user_search.base_dn", userSearchBase)
|
||||
.put("bind_dn", "ironman@ad.test.elasticsearch.com")
|
||||
.put("bind_password", ActiveDirectorySessionFactoryTests.PASSWORD)
|
||||
|
@ -513,7 +514,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase {
|
|||
}
|
||||
builder.put("path.home", createTempDir());
|
||||
|
||||
try (Node node = new MockNode(builder.build(), Version.CURRENT, Arrays.asList(XPackPlugin.class, XPackPlugin.class))) {
|
||||
try (Node node = new MockNode(builder.build(), Version.CURRENT, Collections.singletonList(XPackPlugin.class))) {
|
||||
node.start();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authc.ldap.support;
|
||||
|
||||
import com.unboundid.ldap.sdk.FailoverServerSet;
|
||||
import com.unboundid.ldap.sdk.RoundRobinDNSServerSet;
|
||||
import com.unboundid.ldap.sdk.RoundRobinServerSet;
|
||||
import com.unboundid.ldap.sdk.ServerSet;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class LdapLoadBalancingTests extends ESTestCase {
|
||||
|
||||
public void testBadTypeThrowsException() {
|
||||
String badType = randomAsciiOfLengthBetween(3, 12);
|
||||
Settings settings = Settings.builder().put(LdapLoadBalancing.LOAD_BALANCE_SETTINGS + "." + LdapLoadBalancing.LOAD_BALANCE_TYPE_SETTING, badType).build();
|
||||
try {
|
||||
LdapLoadBalancing.serverSet(null, null, settings, null, null);
|
||||
fail("using type [" + badType + "] should have thrown an exception");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("unknown server set type"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testFailoverServerSet() {
|
||||
Settings settings = Settings.builder().put(LdapLoadBalancing.LOAD_BALANCE_SETTINGS + "." + LdapLoadBalancing.LOAD_BALANCE_TYPE_SETTING, "failover").build();
|
||||
String[] address = new String[] { "localhost" };
|
||||
int[] ports = new int[] { 26000 };
|
||||
ServerSet serverSet = LdapLoadBalancing.serverSet(address, ports, settings, null, null);
|
||||
assertThat(serverSet, instanceOf(FailoverServerSet.class));
|
||||
assertThat(((FailoverServerSet)serverSet).reOrderOnFailover(), is(true));
|
||||
}
|
||||
|
||||
public void testDnsFailover() {
|
||||
Settings settings = Settings.builder().put(LdapLoadBalancing.LOAD_BALANCE_SETTINGS + "." + LdapLoadBalancing.LOAD_BALANCE_TYPE_SETTING, "dns_failover").build();
|
||||
String[] address = new String[] { "foo.bar" };
|
||||
int[] ports = new int[] { 26000 };
|
||||
ServerSet serverSet = LdapLoadBalancing.serverSet(address, ports, settings, null, null);
|
||||
assertThat(serverSet, instanceOf(RoundRobinDNSServerSet.class));
|
||||
assertThat(((RoundRobinDNSServerSet)serverSet).getAddressSelectionMode(), is(RoundRobinDNSServerSet.AddressSelectionMode.FAILOVER));
|
||||
}
|
||||
|
||||
public void testDnsFailoverBadArgs() {
|
||||
Settings settings = Settings.builder().put(LdapLoadBalancing.LOAD_BALANCE_SETTINGS + "." + LdapLoadBalancing.LOAD_BALANCE_TYPE_SETTING, "dns_failover").build();
|
||||
String[] addresses = new String[] { "foo.bar", "localhost" };
|
||||
int[] ports = new int[] { 26000, 389 };
|
||||
try {
|
||||
LdapLoadBalancing.serverSet(addresses, ports, settings, null, null);
|
||||
fail("dns server sets only support a single URL");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("single url"));
|
||||
}
|
||||
|
||||
try {
|
||||
LdapLoadBalancing.serverSet(new String[] { "127.0.0.1" }, new int[] { 389 }, settings, null, null);
|
||||
fail("dns server sets only support DNS names");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("DNS name"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRoundRobin() {
|
||||
Settings settings = Settings.builder().put(LdapLoadBalancing.LOAD_BALANCE_SETTINGS + "." + LdapLoadBalancing.LOAD_BALANCE_TYPE_SETTING, "round_robin").build();
|
||||
String[] address = new String[] { "localhost", "foo.bar" };
|
||||
int[] ports = new int[] { 389, 389 };
|
||||
ServerSet serverSet = LdapLoadBalancing.serverSet(address, ports, settings, null, null);
|
||||
assertThat(serverSet, instanceOf(RoundRobinServerSet.class));
|
||||
}
|
||||
|
||||
public void testDnsRoundRobin() {
|
||||
Settings settings = Settings.builder().put(LdapLoadBalancing.LOAD_BALANCE_SETTINGS + "." + LdapLoadBalancing.LOAD_BALANCE_TYPE_SETTING, "dns_round_robin").build();
|
||||
String[] address = new String[] { "foo.bar" };
|
||||
int[] ports = new int[] { 26000 };
|
||||
ServerSet serverSet = LdapLoadBalancing.serverSet(address, ports, settings, null, null);
|
||||
assertThat(serverSet, instanceOf(RoundRobinDNSServerSet.class));
|
||||
assertThat(((RoundRobinDNSServerSet)serverSet).getAddressSelectionMode(), is(RoundRobinDNSServerSet.AddressSelectionMode.ROUND_ROBIN));
|
||||
}
|
||||
|
||||
public void testDnsRoundRobinBadArgs() {
|
||||
Settings settings = Settings.builder().put(LdapLoadBalancing.LOAD_BALANCE_SETTINGS + "." + LdapLoadBalancing.LOAD_BALANCE_TYPE_SETTING, "dns_round_robin").build();
|
||||
String[] addresses = new String[] { "foo.bar", "localhost" };
|
||||
int[] ports = new int[] { 26000, 389 };
|
||||
try {
|
||||
LdapLoadBalancing.serverSet(addresses, ports, settings, null, null);
|
||||
fail("dns server sets only support a single URL");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("single url"));
|
||||
}
|
||||
|
||||
try {
|
||||
LdapLoadBalancing.serverSet(new String[] { "127.0.0.1" }, new int[] { 389 }, settings, null, null);
|
||||
fail("dns server sets only support DNS names");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("DNS name"));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -9,6 +9,7 @@ import com.unboundid.ldap.listener.InMemoryDirectoryServer;
|
|||
import com.unboundid.ldap.sdk.Attribute;
|
||||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import com.unboundid.ldap.sdk.LDAPURL;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.shield.authc.RealmConfig;
|
||||
import org.elasticsearch.shield.authc.ldap.LdapRealm;
|
||||
|
@ -17,6 +18,10 @@ import org.elasticsearch.test.ESTestCase;
|
|||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.shield.authc.ldap.LdapSessionFactory.HOSTNAME_VERIFICATION_SETTING;
|
||||
import static org.elasticsearch.shield.authc.ldap.LdapSessionFactory.URLS_SETTING;
|
||||
|
@ -24,41 +29,68 @@ import static org.elasticsearch.shield.authc.ldap.LdapSessionFactory.USER_DN_TEM
|
|||
|
||||
public abstract class LdapTestCase extends ESTestCase {
|
||||
|
||||
protected InMemoryDirectoryServer ldapServer;
|
||||
protected static int numberOfLdapServers;
|
||||
protected InMemoryDirectoryServer[] ldapServers;
|
||||
|
||||
@BeforeClass
|
||||
public static void setNumberOfLdapServers() {
|
||||
numberOfLdapServers = randomIntBetween(1, 4);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void startLdap() throws Exception {
|
||||
ldapServer = new InMemoryDirectoryServer("o=sevenSeas");
|
||||
ldapServer.add("o=sevenSeas", new Attribute("dc", "UnboundID"), new Attribute("objectClass", "top", "domain", "extensibleObject"));
|
||||
ldapServer.importFromLDIF(false, getDataPath("/org/elasticsearch/shield/authc/ldap/support/seven-seas.ldif").toString());
|
||||
ldapServer.startListening();
|
||||
ldapServers = new InMemoryDirectoryServer[numberOfLdapServers];
|
||||
for (int i = 0; i < numberOfLdapServers; i++) {
|
||||
InMemoryDirectoryServer ldapServer = new InMemoryDirectoryServer("o=sevenSeas");
|
||||
ldapServer.add("o=sevenSeas", new Attribute("dc", "UnboundID"), new Attribute("objectClass", "top", "domain", "extensibleObject"));
|
||||
ldapServer.importFromLDIF(false, getDataPath("/org/elasticsearch/shield/authc/ldap/support/seven-seas.ldif").toString());
|
||||
ldapServer.startListening();
|
||||
ldapServers[i] = ldapServer;
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void stopLdap() throws Exception {
|
||||
ldapServer.shutDown(true);
|
||||
for (int i = 0; i < numberOfLdapServers; i++) {
|
||||
ldapServers[i].shutDown(true);
|
||||
}
|
||||
}
|
||||
|
||||
protected String ldapUrl() throws LDAPException {
|
||||
LDAPURL url = new LDAPURL("ldap", "localhost", ldapServer.getListenPort(), null, null, null, null);
|
||||
return url.toString();
|
||||
protected String[] ldapUrls() throws LDAPException {
|
||||
List<String> urls = new ArrayList<>(numberOfLdapServers);
|
||||
for (int i = 0; i < numberOfLdapServers; i++) {
|
||||
LDAPURL url = new LDAPURL("ldap", "localhost", ldapServers[i].getListenPort(), null, null, null, null);
|
||||
urls.add(url.toString());
|
||||
}
|
||||
return urls.toArray(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
public static Settings buildLdapSettings(String ldapUrl, String userTemplate, String groupSearchBase, LdapSearchScope scope) {
|
||||
return buildLdapSettings(new String[] { ldapUrl }, new String[] { userTemplate }, groupSearchBase, scope);
|
||||
}
|
||||
|
||||
public static Settings buildLdapSettings(String[] ldapUrl, String userTemplate, String groupSearchBase, LdapSearchScope scope) {
|
||||
return buildLdapSettings(ldapUrl, new String[] { userTemplate }, groupSearchBase, scope);
|
||||
}
|
||||
|
||||
public static Settings buildLdapSettings(String ldapUrl, String[] userTemplate, String groupSearchBase, LdapSearchScope scope) {
|
||||
return Settings.builder()
|
||||
public static Settings buildLdapSettings(String[] ldapUrl, String[] userTemplate, String groupSearchBase, LdapSearchScope scope) {
|
||||
return buildLdapSettings(ldapUrl, userTemplate, groupSearchBase, scope, null);
|
||||
}
|
||||
|
||||
public static Settings buildLdapSettings(String[] ldapUrl, String[] userTemplate, String groupSearchBase, LdapSearchScope scope, LdapLoadBalancing serverSetType) {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrl)
|
||||
.putArray(USER_DN_TEMPLATES_SETTING, userTemplate)
|
||||
.put("group_search.base_dn", groupSearchBase)
|
||||
.put("group_search.scope", scope)
|
||||
.put(HOSTNAME_VERIFICATION_SETTING, false)
|
||||
.build();
|
||||
.put(HOSTNAME_VERIFICATION_SETTING, false);
|
||||
if (serverSetType != null) {
|
||||
builder.put(LdapLoadBalancing.LOAD_BALANCE_SETTINGS + "." + LdapLoadBalancing.LOAD_BALANCE_TYPE_SETTING, serverSetType.toString());
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static Settings buildLdapSettings(String ldapUrl, String userTemplate, boolean hostnameVerification) {
|
||||
public static Settings buildLdapSettings(String[] ldapUrl, String userTemplate, boolean hostnameVerification) {
|
||||
return Settings.builder()
|
||||
.putArray(URLS_SETTING, ldapUrl)
|
||||
.putArray(USER_DN_TEMPLATES_SETTING, userTemplate)
|
||||
|
|
|
@ -0,0 +1,151 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authc.ldap.support;
|
||||
|
||||
import com.unboundid.ldap.sdk.LDAPConnection;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.shield.authc.RealmConfig;
|
||||
import org.elasticsearch.shield.authc.support.SecuredString;
|
||||
import org.elasticsearch.shield.ssl.ClientSSLService;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
/**
|
||||
* Tests that the server sets properly load balance connections without throwing exceptions
|
||||
*/
|
||||
public class SessionFactoryLoadBalancingTests extends LdapTestCase {
|
||||
|
||||
public void testRoundRobin() throws Exception {
|
||||
TestSessionFactory testSessionFactory = createSessionFactory(LdapLoadBalancing.ROUND_ROBIN);
|
||||
|
||||
final int numberOfIterations = randomIntBetween(1, 5);
|
||||
for (int iteration = 0; iteration < numberOfIterations; iteration++) {
|
||||
for (int i = 0; i < numberOfLdapServers; i++) {
|
||||
LDAPConnection connection = null;
|
||||
try {
|
||||
connection = testSessionFactory.getServerSet().getConnection();
|
||||
assertThat(connection.getConnectedPort(), is(ldapServers[i].getListenPort()));
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testRoundRobinWithFailures() throws Exception {
|
||||
assumeTrue("at least one ldap server should be present for this test", ldapServers.length > 1);
|
||||
TestSessionFactory testSessionFactory = createSessionFactory(LdapLoadBalancing.ROUND_ROBIN);
|
||||
|
||||
// create a list of ports
|
||||
List<Integer> ports = new ArrayList<>(numberOfLdapServers);
|
||||
for (int i = 0; i < ldapServers.length; i++) {
|
||||
ports.add(ldapServers[i].getListenPort());
|
||||
}
|
||||
|
||||
int numberToKill = randomIntBetween(1, numberOfLdapServers - 1);
|
||||
for (int i = 0; i < numberToKill; i++) {
|
||||
int index = randomIntBetween(0, numberOfLdapServers - 1);
|
||||
ports.remove(Integer.valueOf(ldapServers[index].getListenPort()));
|
||||
ldapServers[index].shutDown(true);
|
||||
}
|
||||
|
||||
final int numberOfIterations = randomIntBetween(1, 5);
|
||||
for (int iteration = 0; iteration < numberOfIterations; iteration++) {
|
||||
for (Integer port : ports) {
|
||||
LDAPConnection connection = null;
|
||||
try {
|
||||
connection = testSessionFactory.getServerSet().getConnection();
|
||||
assertThat(connection.getConnectedPort(), is(port));
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testFailover() throws Exception {
|
||||
assumeTrue("at least one ldap server should be present for this test", ldapServers.length > 1);
|
||||
TestSessionFactory testSessionFactory = createSessionFactory(LdapLoadBalancing.FAILOVER);
|
||||
|
||||
// first test that there is no round robin stuff going on
|
||||
final int firstPort = ldapServers[0].getListenPort();
|
||||
for (int i = 0; i < numberOfLdapServers; i++) {
|
||||
LDAPConnection connection = null;
|
||||
try {
|
||||
connection = testSessionFactory.getServerSet().getConnection();
|
||||
assertThat(connection.getConnectedPort(), is(firstPort));
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<Integer> stoppedServers = new ArrayList<>();
|
||||
// now we should kill some servers including the first one
|
||||
int numberToKill = randomIntBetween(1, numberOfLdapServers - 1);
|
||||
// always kill the first one, but don't add to the list
|
||||
ldapServers[0].shutDown(true);
|
||||
stoppedServers.add(0);
|
||||
for (int i = 0; i < numberToKill - 1; i++) {
|
||||
int index = randomIntBetween(1, numberOfLdapServers - 1);
|
||||
ldapServers[index].shutDown(true);
|
||||
stoppedServers.add(index);
|
||||
}
|
||||
|
||||
int firstNonStoppedPort = -1;
|
||||
// now we find the first that isn't stopped
|
||||
for (int i = 0; i < numberOfLdapServers; i++) {
|
||||
if (stoppedServers.contains(i) == false) {
|
||||
firstNonStoppedPort = ldapServers[i].getListenPort();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assertThat(firstNonStoppedPort, not(-1));
|
||||
final int numberOfIterations = randomIntBetween(1, 5);
|
||||
for (int iteration = 0; iteration < numberOfIterations; iteration++) {
|
||||
LDAPConnection connection = null;
|
||||
try {
|
||||
connection = testSessionFactory.getServerSet().getConnection();
|
||||
assertThat(connection.getConnectedPort(), is(firstNonStoppedPort));
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private TestSessionFactory createSessionFactory(LdapLoadBalancing loadBalancing) throws Exception {
|
||||
String groupSearchBase = "cn=HMS Lydia,ou=crews,ou=groups,o=sevenSeas";
|
||||
String userTemplate = "cn={0},ou=people,o=sevenSeas";
|
||||
Settings settings = buildLdapSettings(ldapUrls(), new String[] { userTemplate }, groupSearchBase,
|
||||
LdapSearchScope.SUB_TREE, loadBalancing);
|
||||
RealmConfig config = new RealmConfig("test-session-factory", settings, Settings.builder().put("path.home", createTempDir()).build());
|
||||
return new TestSessionFactory(config, null);
|
||||
}
|
||||
|
||||
static class TestSessionFactory extends SessionFactory {
|
||||
|
||||
protected TestSessionFactory(RealmConfig config, ClientSSLService sslService) {
|
||||
super(config, sslService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LdapSession session(String user, SecuredString password) throws Exception {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -20,7 +20,6 @@ import static org.hamcrest.Matchers.is;
|
|||
|
||||
public class SessionFactoryTests extends ESTestCase {
|
||||
public void testConnectionFactoryReturnsCorrectLDAPConnectionOptionsWithDefaultSettings() {
|
||||
SessionFactory factory = createSessionFactory();
|
||||
LDAPConnectionOptions options = SessionFactory.connectionOptions(Settings.EMPTY);
|
||||
assertThat(options.followReferrals(), is(equalTo(true)));
|
||||
assertThat(options.allowConcurrentSocketFactoryUse(), is(equalTo(true)));
|
||||
|
@ -36,7 +35,6 @@ public class SessionFactoryTests extends ESTestCase {
|
|||
.put(SessionFactory.TIMEOUT_TCP_READ_SETTING, "20ms")
|
||||
.put(SessionFactory.FOLLOW_REFERRALS_SETTING, "false")
|
||||
.build();
|
||||
SessionFactory factory = createSessionFactory();
|
||||
LDAPConnectionOptions options = SessionFactory.connectionOptions(settings);
|
||||
assertThat(options.followReferrals(), is(equalTo(false)));
|
||||
assertThat(options.allowConcurrentSocketFactoryUse(), is(equalTo(true)));
|
||||
|
@ -60,7 +58,7 @@ public class SessionFactoryTests extends ESTestCase {
|
|||
|
||||
private SessionFactory createSessionFactory() {
|
||||
Settings global = settingsBuilder().put("path.home", createTempDir()).build();
|
||||
return new SessionFactory(new RealmConfig("_name", Settings.EMPTY, global)) {
|
||||
return new SessionFactory(new RealmConfig("_name", Settings.builder().put("url", "ldap://localhost:389").build(), global), null) {
|
||||
|
||||
@Override
|
||||
public LdapSession session(String user, SecuredString password) {
|
||||
|
|
|
@ -81,7 +81,7 @@ public class ShieldIndexSearcherWrapperUnitTests extends ESTestCase {
|
|||
AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(),
|
||||
Collections.emptyMap(), Collections.emptyMap());
|
||||
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
|
||||
mapperService = new MapperService(indexSettings, analysisService, similarityService, new IndicesModule().getMapperRegistry());
|
||||
mapperService = new MapperService(indexSettings, analysisService, similarityService, new IndicesModule().getMapperRegistry(), () -> null);
|
||||
|
||||
ShardId shardId = new ShardId(index, 0);
|
||||
licenseState = mock(ShieldLicenseState.class);
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.transport.TransportResponse;
|
|||
import org.elasticsearch.transport.TransportResponseHandler;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.transport.netty.NettyTransport;
|
||||
import org.elasticsearch.transport.netty.NettyTransportChannel;
|
||||
import org.mockito.InOrder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -102,11 +101,11 @@ public class TransportFilterTests extends ESIntegTestCase {
|
|||
ClientTransportFilter sourceClientFilter = internalCluster().getInstance(ClientTransportFilter.class, source);
|
||||
ClientTransportFilter targetClientFilter = internalCluster().getInstance(ClientTransportFilter.class, target);
|
||||
|
||||
InOrder inOrder = inOrder(sourceServerFilter, sourceClientFilter, targetServerFilter, targetClientFilter);
|
||||
InOrder inOrder = inOrder(sourceClientFilter, targetServerFilter, targetClientFilter, sourceServerFilter);
|
||||
inOrder.verify(sourceClientFilter).outbound("_action", new Request("src_to_trgt"));
|
||||
inOrder.verify(targetServerFilter).inbound(eq("_action"), eq(new Request("src_to_trgt")), isA(NettyTransportChannel.class));
|
||||
inOrder.verify(targetServerFilter).inbound(eq("_action"), eq(new Request("src_to_trgt")), isA(TransportChannel.class));
|
||||
inOrder.verify(targetClientFilter).outbound("_action", new Request("trgt_to_src"));
|
||||
inOrder.verify(sourceServerFilter).inbound(eq("_action"), eq(new Request("trgt_to_src")), isA(NettyTransportChannel.class));
|
||||
inOrder.verify(sourceServerFilter).inbound(eq("_action"), eq(new Request("trgt_to_src")), isA(TransportChannel.class));
|
||||
}
|
||||
|
||||
public static class InternalPlugin extends Plugin {
|
||||
|
|
|
@ -46,12 +46,13 @@ import java.util.concurrent.Callable;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class HandshakeWaitingHandlerTests extends ESTestCase {
|
||||
private static final int CONCURRENT_CLIENT_REQUESTS = 20;
|
||||
|
@ -63,8 +64,7 @@ public class HandshakeWaitingHandlerTests extends ESTestCase {
|
|||
private ClientBootstrap clientBootstrap;
|
||||
private SSLContext sslContext;
|
||||
|
||||
private final AtomicBoolean failed = new AtomicBoolean(false);
|
||||
private volatile Throwable failureCause = null;
|
||||
private final AtomicReference<Throwable> failureCause = new AtomicReference<>();
|
||||
private ExecutorService threadPoolExecutor;
|
||||
|
||||
@Before
|
||||
|
@ -99,8 +99,7 @@ public class HandshakeWaitingHandlerTests extends ESTestCase {
|
|||
serverBootstrap.shutdown();
|
||||
serverBootstrap.releaseExternalResources();
|
||||
|
||||
failed.set(false);
|
||||
failureCause = null;
|
||||
failureCause.set(null);
|
||||
}
|
||||
|
||||
public void testWriteBeforeHandshakeFailsWithoutHandler() throws Exception {
|
||||
|
@ -127,13 +126,13 @@ public class HandshakeWaitingHandlerTests extends ESTestCase {
|
|||
handshakeFuture.getChannel().close();
|
||||
}
|
||||
|
||||
if (failed.get()) {
|
||||
assertThat(failureCause, anyOf(instanceOf(SSLException.class), instanceOf(AssertionError.class)));
|
||||
if (failureCause.get() != null) {
|
||||
assertThat(failureCause.get(), anyOf(instanceOf(SSLException.class), instanceOf(AssertionError.class)));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assertThat("Expected this test to fail with an SSLException or AssertionError", failed.get(), is(true));
|
||||
assertThat("Expected this test to fail with an SSLException or AssertionError", failureCause.get(), notNullValue());
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch-shield/issues/533")
|
||||
|
@ -176,13 +175,12 @@ public class HandshakeWaitingHandlerTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void assertNotFailed() {
|
||||
if (failed.get()) {
|
||||
if (failureCause.get() != null) {
|
||||
StringWriter writer = new StringWriter();
|
||||
if (failed.get()) {
|
||||
failureCause.printStackTrace(new PrintWriter(writer));
|
||||
if (failureCause.get() != null) {
|
||||
failureCause.get().printStackTrace(new PrintWriter(writer));
|
||||
}
|
||||
|
||||
assertThat("Expected this test to always pass with the HandshakeWaitingHandler in pipeline\n" + writer.toString(), failed.get(), is(false));
|
||||
assertThat("Expected this test to always pass with the HandshakeWaitingHandler in pipeline\n" + writer.toString(), failureCause.get(), nullValue());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -226,9 +224,7 @@ public class HandshakeWaitingHandlerTests extends ESTestCase {
|
|||
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
|
||||
Throwable cause = e.getCause();
|
||||
// Only save first cause
|
||||
if (failed.compareAndSet(false, true)) {
|
||||
failureCause = cause;
|
||||
}
|
||||
failureCause.compareAndSet(null, cause);
|
||||
ctx.getChannel().close();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.test.ShieldIntegTestCase;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
// this class sits in org.elasticsearch.transport so that TransportService.requestHandlers is visible
|
||||
public class ShieldServerTransportServiceTests extends ShieldIntegTestCase {
|
||||
|
@ -30,8 +31,8 @@ public class ShieldServerTransportServiceTests extends ShieldIntegTestCase {
|
|||
for (Map.Entry<String, RequestHandlerRegistry> entry : transportService.requestHandlers.entrySet()) {
|
||||
assertThat(
|
||||
"handler not wrapped by " + ShieldServerTransportService.ProfileSecuredRequestHandler.class + "; do all the handler registration methods have overrides?",
|
||||
entry.getValue().getHandler(),
|
||||
instanceOf(ShieldServerTransportService.ProfileSecuredRequestHandler.class)
|
||||
entry.getValue().toString(),
|
||||
startsWith(ShieldServerTransportService.ProfileSecuredRequestHandler.class.getName() + "@")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ cluster:monitor/nodes/stats
|
|||
cluster:monitor/state
|
||||
cluster:monitor/stats
|
||||
cluster:monitor/task
|
||||
cluster:monitor/tasks/lists
|
||||
indices:admin/aliases
|
||||
indices:admin/aliases/exists
|
||||
indices:admin/aliases/get
|
||||
|
@ -43,9 +44,6 @@ indices:admin/template/put
|
|||
indices:admin/types/exists
|
||||
indices:admin/upgrade
|
||||
indices:admin/validate/query
|
||||
indices:admin/warmers/delete
|
||||
indices:admin/warmers/get
|
||||
indices:admin/warmers/put
|
||||
indices:monitor/recovery
|
||||
indices:monitor/segments
|
||||
indices:monitor/settings/get
|
||||
|
|
|
@ -6,6 +6,7 @@ cluster:monitor/nodes/info[n]
|
|||
cluster:monitor/nodes/liveness
|
||||
cluster:monitor/nodes/stats[n]
|
||||
cluster:monitor/stats[n]
|
||||
cluster:monitor/tasks/lists[n]
|
||||
cluster:admin/shield/realm/cache/clear
|
||||
cluster:admin/shield/realm/cache/clear[n]
|
||||
indices:admin/analyze[s]
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.rest.action;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
import org.elasticsearch.watcher.client.WatcherClient;
|
||||
import org.elasticsearch.watcher.support.xcontent.WatcherParams;
|
||||
import org.elasticsearch.watcher.transport.actions.execute.ExecuteWatchRequestBuilder;
|
||||
import org.elasticsearch.watcher.trigger.TriggerService;
|
||||
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class RestExecuteWatchActionTest extends ESTestCase {
|
||||
|
||||
private RestController restController = mock(RestController.class);
|
||||
private Client client = mock(Client.class);
|
||||
private TriggerService triggerService = mock(TriggerService.class);
|
||||
private RestChannel restChannel = mock(RestChannel.class);
|
||||
private WatcherClient watcherClient = mock(WatcherClient.class);
|
||||
|
||||
public void testThatFlagsCanBeSpecifiedViaParameters() throws Exception {
|
||||
|
||||
String randomId = randomAsciiOfLength(10);
|
||||
for (String recordExecution : Lists.newArrayList("true", "false", null)) {
|
||||
for (String ignoreCondition : Lists.newArrayList("true", "false", null)) {
|
||||
for (String debugCondition : Lists.newArrayList("true", "false", null)) {
|
||||
ExecuteWatchRequestBuilder builder = new ExecuteWatchRequestBuilder(client);
|
||||
when(watcherClient.prepareExecuteWatch()).thenReturn(builder);
|
||||
|
||||
RestExecuteWatchAction restExecuteWatchAction = new RestExecuteWatchAction(Settings.EMPTY, restController, client, triggerService);
|
||||
restExecuteWatchAction.handleRequest(createFakeRestRequest(randomId, recordExecution, ignoreCondition, debugCondition), restChannel, watcherClient);
|
||||
|
||||
assertThat(builder.request().getId(), is(randomId));
|
||||
assertThat(builder.request().isRecordExecution(), is(Boolean.parseBoolean(recordExecution)));
|
||||
assertThat(builder.request().isIgnoreCondition(), is(Boolean.parseBoolean(ignoreCondition)));
|
||||
assertThat(builder.request().isDebug(), is(Boolean.parseBoolean(debugCondition)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private FakeRestRequest createFakeRestRequest(String randomId, String recordExecution, String ignoreCondition, String debugCondition) {
|
||||
FakeRestRequest restRequest = new FakeRestRequest() {
|
||||
@Override
|
||||
public boolean hasContent() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesReference content() {
|
||||
return new BytesArray("{}");
|
||||
}
|
||||
};
|
||||
|
||||
restRequest.params().put("id", randomId);
|
||||
// make sure we test true/false/no params
|
||||
if (recordExecution != null) restRequest.params().put("record_execution", recordExecution);
|
||||
if (ignoreCondition != null) restRequest.params().put("ignore_condition", ignoreCondition);
|
||||
if (debugCondition != null) restRequest.params().put("debug", debugCondition);
|
||||
|
||||
return restRequest;
|
||||
}
|
||||
}
|
|
@ -27,6 +27,8 @@ import org.elasticsearch.watcher.actions.email.service.EmailService;
|
|||
import org.elasticsearch.watcher.actions.email.service.InternalEmailService;
|
||||
import org.elasticsearch.watcher.actions.hipchat.service.HipChatService;
|
||||
import org.elasticsearch.watcher.actions.hipchat.service.InternalHipChatService;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.InternalPagerDutyService;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyService;
|
||||
import org.elasticsearch.watcher.actions.slack.service.InternalSlackService;
|
||||
import org.elasticsearch.watcher.actions.slack.service.SlackService;
|
||||
import org.elasticsearch.watcher.client.WatcherClientModule;
|
||||
|
@ -163,6 +165,7 @@ public class WatcherPlugin extends Plugin {
|
|||
EmailService.class,
|
||||
HipChatService.class,
|
||||
SlackService.class,
|
||||
PagerDutyService.class,
|
||||
HttpClient.class,
|
||||
WatcherSettingsValidation.class);
|
||||
}
|
||||
|
@ -193,6 +196,7 @@ public class WatcherPlugin extends Plugin {
|
|||
module.registerSetting(InternalSlackService.SLACK_ACCOUNT_SETTING);
|
||||
module.registerSetting(InternalEmailService.EMAIL_ACCOUNT_SETTING);
|
||||
module.registerSetting(InternalHipChatService.HIPCHAT_ACCOUNT_SETTING);
|
||||
module.registerSetting(InternalPagerDutyService.PAGERDUTY_ACCOUNT_SETTING);
|
||||
}
|
||||
|
||||
public void onModule(NetworkModule module) {
|
||||
|
|
|
@ -10,6 +10,8 @@ import org.elasticsearch.watcher.actions.email.service.EmailTemplate;
|
|||
import org.elasticsearch.watcher.actions.hipchat.HipChatAction;
|
||||
import org.elasticsearch.watcher.actions.index.IndexAction;
|
||||
import org.elasticsearch.watcher.actions.logging.LoggingAction;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.PagerDutyAction;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.IncidentEvent;
|
||||
import org.elasticsearch.watcher.actions.slack.SlackAction;
|
||||
import org.elasticsearch.watcher.actions.slack.service.message.SlackMessage;
|
||||
import org.elasticsearch.watcher.actions.webhook.WebhookAction;
|
||||
|
@ -87,4 +89,16 @@ public final class ActionBuilders {
|
|||
public static SlackAction.Builder slackAction(String account, SlackMessage.Template message) {
|
||||
return SlackAction.builder(account, message);
|
||||
}
|
||||
|
||||
public static PagerDutyAction.Builder triggerPagerDutyAction(String account, String description) {
|
||||
return pagerDutyAction(IncidentEvent.templateBuilder(description).setAccount(account));
|
||||
}
|
||||
|
||||
public static PagerDutyAction.Builder pagerDutyAction(IncidentEvent.Template.Builder event) {
|
||||
return PagerDutyAction.builder(event.build());
|
||||
}
|
||||
|
||||
public static PagerDutyAction.Builder pagerDutyAction(IncidentEvent.Template event) {
|
||||
return PagerDutyAction.builder(event);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,10 @@ import org.elasticsearch.watcher.actions.email.EmailActionFactory;
|
|||
import org.elasticsearch.watcher.actions.email.service.EmailService;
|
||||
import org.elasticsearch.watcher.actions.email.service.HtmlSanitizer;
|
||||
import org.elasticsearch.watcher.actions.email.service.InternalEmailService;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.DataAttachmentParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentsParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.HttpEmailAttachementParser;
|
||||
import org.elasticsearch.watcher.actions.hipchat.HipChatAction;
|
||||
import org.elasticsearch.watcher.actions.hipchat.HipChatActionFactory;
|
||||
import org.elasticsearch.watcher.actions.hipchat.service.HipChatService;
|
||||
|
@ -20,6 +24,10 @@ import org.elasticsearch.watcher.actions.index.IndexAction;
|
|||
import org.elasticsearch.watcher.actions.index.IndexActionFactory;
|
||||
import org.elasticsearch.watcher.actions.logging.LoggingAction;
|
||||
import org.elasticsearch.watcher.actions.logging.LoggingActionFactory;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.PagerDutyAction;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.PagerDutyActionFactory;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.InternalPagerDutyService;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyService;
|
||||
import org.elasticsearch.watcher.actions.slack.SlackAction;
|
||||
import org.elasticsearch.watcher.actions.slack.SlackActionFactory;
|
||||
import org.elasticsearch.watcher.actions.slack.service.InternalSlackService;
|
||||
|
@ -35,6 +43,7 @@ import java.util.Map;
|
|||
public class WatcherActionModule extends AbstractModule {
|
||||
|
||||
private final Map<String, Class<? extends ActionFactory>> parsers = new HashMap<>();
|
||||
private final Map<String, Class<? extends EmailAttachmentParser>> emailAttachmentParsers = new HashMap<>();
|
||||
|
||||
public WatcherActionModule() {
|
||||
registerAction(EmailAction.TYPE, EmailActionFactory.class);
|
||||
|
@ -43,12 +52,20 @@ public class WatcherActionModule extends AbstractModule {
|
|||
registerAction(LoggingAction.TYPE, LoggingActionFactory.class);
|
||||
registerAction(HipChatAction.TYPE, HipChatActionFactory.class);
|
||||
registerAction(SlackAction.TYPE, SlackActionFactory.class);
|
||||
registerAction(PagerDutyAction.TYPE, PagerDutyActionFactory.class);
|
||||
|
||||
registerEmailAttachmentParser(HttpEmailAttachementParser.TYPE, HttpEmailAttachementParser.class);
|
||||
registerEmailAttachmentParser(DataAttachmentParser.TYPE, DataAttachmentParser.class);
|
||||
}
|
||||
|
||||
public void registerAction(String type, Class<? extends ActionFactory> parserType) {
|
||||
parsers.put(type, parserType);
|
||||
}
|
||||
|
||||
public void registerEmailAttachmentParser(String type, Class<? extends EmailAttachmentParser> parserClass) {
|
||||
emailAttachmentParsers.put(type, parserClass);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
|
||||
|
@ -60,11 +77,27 @@ public class WatcherActionModule extends AbstractModule {
|
|||
|
||||
bind(ActionRegistry.class).asEagerSingleton();
|
||||
|
||||
// email
|
||||
bind(HtmlSanitizer.class).asEagerSingleton();
|
||||
bind(InternalEmailService.class).asEagerSingleton();
|
||||
bind(EmailService.class).to(InternalEmailService.class).asEagerSingleton();
|
||||
bind(HipChatService.class).to(InternalHipChatService.class).asEagerSingleton();
|
||||
bind(SlackService.class).to(InternalSlackService.class).asEagerSingleton();
|
||||
|
||||
MapBinder<String, EmailAttachmentParser> emailParsersBinder = MapBinder.newMapBinder(binder(), String.class, EmailAttachmentParser.class);
|
||||
for (Map.Entry<String, Class<? extends EmailAttachmentParser>> entry : emailAttachmentParsers.entrySet()) {
|
||||
emailParsersBinder.addBinding(entry.getKey()).to(entry.getValue()).asEagerSingleton();
|
||||
}
|
||||
bind(EmailAttachmentsParser.class).asEagerSingleton();
|
||||
|
||||
// hipchat
|
||||
bind(InternalHipChatService.class).asEagerSingleton();
|
||||
bind(HipChatService.class).to(InternalHipChatService.class);
|
||||
|
||||
// slack
|
||||
bind(InternalSlackService.class).asEagerSingleton();
|
||||
bind(SlackService.class).to(InternalSlackService.class);
|
||||
|
||||
// pager duty
|
||||
bind(InternalPagerDutyService.class).asEagerSingleton();
|
||||
bind(PagerDutyService.class).to(InternalPagerDutyService.class);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -16,12 +16,15 @@ import org.elasticsearch.watcher.actions.email.service.Authentication;
|
|||
import org.elasticsearch.watcher.actions.email.service.Email;
|
||||
import org.elasticsearch.watcher.actions.email.service.EmailTemplate;
|
||||
import org.elasticsearch.watcher.actions.email.service.Profile;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachments;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentsParser;
|
||||
import org.elasticsearch.watcher.support.secret.Secret;
|
||||
import org.elasticsearch.watcher.support.xcontent.WatcherParams;
|
||||
import org.elasticsearch.watcher.support.xcontent.WatcherXContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -35,13 +38,15 @@ public class EmailAction implements Action {
|
|||
private final @Nullable Authentication auth;
|
||||
private final @Nullable Profile profile;
|
||||
private final @Nullable DataAttachment dataAttachment;
|
||||
private final @Nullable EmailAttachments emailAttachments;
|
||||
|
||||
public EmailAction(EmailTemplate email, @Nullable String account, @Nullable Authentication auth, @Nullable Profile profile, @Nullable DataAttachment dataAttachment) {
|
||||
public EmailAction(EmailTemplate email, @Nullable String account, @Nullable Authentication auth, @Nullable Profile profile, @Nullable DataAttachment dataAttachment, @Nullable EmailAttachments emailAttachments) {
|
||||
this.email = email;
|
||||
this.account = account;
|
||||
this.auth = auth;
|
||||
this.profile = profile;
|
||||
this.dataAttachment = dataAttachment;
|
||||
this.emailAttachments = emailAttachments;
|
||||
}
|
||||
|
||||
public EmailTemplate getEmail() {
|
||||
|
@ -64,6 +69,10 @@ public class EmailAction implements Action {
|
|||
return dataAttachment;
|
||||
}
|
||||
|
||||
public EmailAttachments getAttachments() {
|
||||
return emailAttachments;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return TYPE;
|
||||
|
@ -76,21 +85,17 @@ public class EmailAction implements Action {
|
|||
|
||||
EmailAction action = (EmailAction) o;
|
||||
|
||||
if (!email.equals(action.email)) return false;
|
||||
if (account != null ? !account.equals(action.account) : action.account != null) return false;
|
||||
if (auth != null ? !auth.equals(action.auth) : action.auth != null) return false;
|
||||
if (profile != action.profile) return false;
|
||||
return !(dataAttachment != null ? !dataAttachment.equals(action.dataAttachment) : action.dataAttachment != null);
|
||||
return Objects.equals(email, action.email) &&
|
||||
Objects.equals(account, action.account) &&
|
||||
Objects.equals(auth, action.auth) &&
|
||||
Objects.equals(profile, action.profile) &&
|
||||
Objects.equals(emailAttachments, action.emailAttachments) &&
|
||||
Objects.equals(dataAttachment, action.dataAttachment);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = email.hashCode();
|
||||
result = 31 * result + (account != null ? account.hashCode() : 0);
|
||||
result = 31 * result + (auth != null ? auth.hashCode() : 0);
|
||||
result = 31 * result + (profile != null ? profile.hashCode() : 0);
|
||||
result = 31 * result + (dataAttachment != null ? dataAttachment.hashCode() : 0);
|
||||
return result;
|
||||
return Objects.hash(email, account, auth, profile, dataAttachment, emailAttachments);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -111,17 +116,21 @@ public class EmailAction implements Action {
|
|||
if (dataAttachment != null) {
|
||||
builder.field(Field.ATTACH_DATA.getPreferredName(), dataAttachment, params);
|
||||
}
|
||||
if (emailAttachments != null) {
|
||||
emailAttachments.toXContent(builder, params);
|
||||
}
|
||||
email.xContentBody(builder, params);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
public static EmailAction parse(String watchId, String actionId, XContentParser parser) throws IOException {
|
||||
public static EmailAction parse(String watchId, String actionId, XContentParser parser, EmailAttachmentsParser emailAttachmentsParser) throws IOException {
|
||||
EmailTemplate.Parser emailParser = new EmailTemplate.Parser();
|
||||
String account = null;
|
||||
String user = null;
|
||||
Secret password = null;
|
||||
Profile profile = Profile.STANDARD;
|
||||
DataAttachment dataAttachment = null;
|
||||
EmailAttachments attachments = EmailAttachments.EMPTY_ATTACHMENTS;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
|
@ -134,7 +143,9 @@ public class EmailAction implements Action {
|
|||
} catch (IOException ioe) {
|
||||
throw new ElasticsearchParseException("could not parse [{}] action [{}/{}]. failed to parse data attachment field [{}]", ioe, TYPE, watchId, actionId, currentFieldName);
|
||||
}
|
||||
}else if (!emailParser.handle(currentFieldName, parser)) {
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ATTACHMENTS)) {
|
||||
attachments = emailAttachmentsParser.parse(parser);
|
||||
} else if (!emailParser.handle(currentFieldName, parser)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACCOUNT)) {
|
||||
account = parser.text();
|
||||
|
@ -162,7 +173,7 @@ public class EmailAction implements Action {
|
|||
auth = new Authentication(user, password);
|
||||
}
|
||||
|
||||
return new EmailAction(emailParser.parsedTemplate(), account, auth, profile, dataAttachment);
|
||||
return new EmailAction(emailParser.parsedTemplate(), account, auth, profile, dataAttachment, attachments);
|
||||
}
|
||||
|
||||
public static Builder builder(EmailTemplate email) {
|
||||
|
@ -232,6 +243,7 @@ public class EmailAction implements Action {
|
|||
@Nullable Authentication auth;
|
||||
@Nullable Profile profile;
|
||||
@Nullable DataAttachment dataAttachment;
|
||||
@Nullable EmailAttachments attachments;
|
||||
|
||||
private Builder(EmailTemplate email) {
|
||||
this.email = email;
|
||||
|
@ -252,13 +264,19 @@ public class EmailAction implements Action {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public Builder setAttachPayload(DataAttachment dataAttachment) {
|
||||
this.dataAttachment = dataAttachment;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setAttachments(EmailAttachments attachments) {
|
||||
this.attachments = attachments;
|
||||
return this;
|
||||
}
|
||||
|
||||
public EmailAction build() {
|
||||
return new EmailAction(email, account, auth, profile, dataAttachment);
|
||||
return new EmailAction(email, account, auth, profile, dataAttachment, attachments);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -272,6 +290,7 @@ public class EmailAction implements Action {
|
|||
ParseField USER = new ParseField("user");
|
||||
ParseField PASSWORD = new ParseField("password");
|
||||
ParseField ATTACH_DATA = new ParseField("attach_data");
|
||||
ParseField ATTACHMENTS = new ParseField("attachments");
|
||||
|
||||
// result fields
|
||||
ParseField MESSAGE = new ParseField("message");
|
||||
|
|
|
@ -12,9 +12,12 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.watcher.actions.ActionFactory;
|
||||
import org.elasticsearch.watcher.actions.email.service.EmailService;
|
||||
import org.elasticsearch.watcher.actions.email.service.HtmlSanitizer;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentsParser;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -24,13 +27,18 @@ public class EmailActionFactory extends ActionFactory<EmailAction, ExecutableEma
|
|||
private final EmailService emailService;
|
||||
private final TextTemplateEngine templateEngine;
|
||||
private final HtmlSanitizer htmlSanitizer;
|
||||
private final EmailAttachmentsParser emailAttachmentsParser;
|
||||
private final Map<String, EmailAttachmentParser> emailAttachmentParsers;
|
||||
|
||||
@Inject
|
||||
public EmailActionFactory(Settings settings, EmailService emailService, TextTemplateEngine templateEngine, HtmlSanitizer htmlSanitizer) {
|
||||
public EmailActionFactory(Settings settings, EmailService emailService, TextTemplateEngine templateEngine, HtmlSanitizer htmlSanitizer,
|
||||
EmailAttachmentsParser emailAttachmentsParser, Map<String, EmailAttachmentParser> emailAttachmentParsers) {
|
||||
super(Loggers.getLogger(ExecutableEmailAction.class, settings));
|
||||
this.emailService = emailService;
|
||||
this.templateEngine = templateEngine;
|
||||
this.htmlSanitizer = htmlSanitizer;
|
||||
this.emailAttachmentsParser = emailAttachmentsParser;
|
||||
this.emailAttachmentParsers = emailAttachmentParsers;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -40,11 +48,11 @@ public class EmailActionFactory extends ActionFactory<EmailAction, ExecutableEma
|
|||
|
||||
@Override
|
||||
public EmailAction parseAction(String watchId, String actionId, XContentParser parser) throws IOException {
|
||||
return EmailAction.parse(watchId, actionId, parser);
|
||||
return EmailAction.parse(watchId, actionId, parser, emailAttachmentsParser);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableEmailAction createExecutable(EmailAction action) {
|
||||
return new ExecutableEmailAction(action, actionLogger, emailService, templateEngine, htmlSanitizer);
|
||||
return new ExecutableEmailAction(action, actionLogger, emailService, templateEngine, htmlSanitizer, emailAttachmentParsers);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.elasticsearch.watcher.actions.email.service.Attachment;
|
|||
import org.elasticsearch.watcher.actions.email.service.Email;
|
||||
import org.elasticsearch.watcher.actions.email.service.EmailService;
|
||||
import org.elasticsearch.watcher.actions.email.service.HtmlSanitizer;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentParser;
|
||||
import org.elasticsearch.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.watcher.support.Variables;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
|
@ -27,12 +28,15 @@ public class ExecutableEmailAction extends ExecutableAction<EmailAction> {
|
|||
final EmailService emailService;
|
||||
final TextTemplateEngine templateEngine;
|
||||
final HtmlSanitizer htmlSanitizer;
|
||||
private final Map<String, EmailAttachmentParser> emailAttachmentParsers;
|
||||
|
||||
public ExecutableEmailAction(EmailAction action, ESLogger logger, EmailService emailService, TextTemplateEngine templateEngine, HtmlSanitizer htmlSanitizer) {
|
||||
public ExecutableEmailAction(EmailAction action, ESLogger logger, EmailService emailService, TextTemplateEngine templateEngine, HtmlSanitizer htmlSanitizer,
|
||||
Map<String, EmailAttachmentParser> emailAttachmentParsers) {
|
||||
super(action, logger);
|
||||
this.emailService = emailService;
|
||||
this.templateEngine = templateEngine;
|
||||
this.htmlSanitizer = htmlSanitizer;
|
||||
this.emailAttachmentParsers = emailAttachmentParsers;
|
||||
}
|
||||
|
||||
public Action.Result execute(String actionId, WatchExecutionContext ctx, Payload payload) throws Exception {
|
||||
|
@ -45,6 +49,14 @@ public class ExecutableEmailAction extends ExecutableAction<EmailAction> {
|
|||
attachments.put(attachment.id(), attachment);
|
||||
}
|
||||
|
||||
if (action.getAttachments() != null && action.getAttachments().getAttachments().size() > 0) {
|
||||
for (EmailAttachmentParser.EmailAttachment emailAttachment : action.getAttachments().getAttachments()) {
|
||||
EmailAttachmentParser parser = emailAttachmentParsers.get(emailAttachment.type());
|
||||
Attachment attachment = parser.toAttachment(ctx, payload, emailAttachment);
|
||||
attachments.put(attachment.id(), attachment);
|
||||
}
|
||||
}
|
||||
|
||||
Email.Builder email = action.getEmail().render(templateEngine, model, htmlSanitizer, attachments);
|
||||
email.id(ctx.id().value());
|
||||
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DataAttachment implements EmailAttachmentParser.EmailAttachment {
|
||||
|
||||
private final String id;
|
||||
private final org.elasticsearch.watcher.actions.email.DataAttachment dataAttachment;
|
||||
|
||||
public DataAttachment(String id, org.elasticsearch.watcher.actions.email.DataAttachment dataAttachment) {
|
||||
this.id = id;
|
||||
this.dataAttachment = dataAttachment;
|
||||
}
|
||||
|
||||
public org.elasticsearch.watcher.actions.email.DataAttachment getDataAttachment() {
|
||||
return dataAttachment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(id).startObject(DataAttachmentParser.TYPE);
|
||||
if (dataAttachment == org.elasticsearch.watcher.actions.email.DataAttachment.YAML) {
|
||||
builder.field("format", "yaml");
|
||||
} else {
|
||||
builder.field("format", "json");
|
||||
}
|
||||
|
||||
return builder.endObject().endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return DataAttachmentParser.TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
DataAttachment otherDataAttachment = (DataAttachment) o;
|
||||
return Objects.equals(id, otherDataAttachment.id) && Objects.equals(dataAttachment, otherDataAttachment.dataAttachment);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, dataAttachment);
|
||||
}
|
||||
|
||||
public static Builder builder(String id) {
|
||||
return new Builder(id);
|
||||
}
|
||||
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private String id;
|
||||
private org.elasticsearch.watcher.actions.email.DataAttachment dataAttachment;
|
||||
|
||||
private Builder(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Builder dataAttachment(org.elasticsearch.watcher.actions.email.DataAttachment dataAttachment) {
|
||||
this.dataAttachment = dataAttachment;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DataAttachment build() {
|
||||
return new DataAttachment(id, dataAttachment);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.Attachment;
|
||||
import org.elasticsearch.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.watcher.support.Variables;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.watcher.actions.email.DataAttachment.resolve;
|
||||
|
||||
public class DataAttachmentParser implements EmailAttachmentParser<DataAttachment> {
|
||||
|
||||
interface Fields {
|
||||
ParseField FORMAT = new ParseField("format");
|
||||
}
|
||||
|
||||
public static final String TYPE = "data";
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataAttachment parse(String id, XContentParser parser) throws IOException {
|
||||
org.elasticsearch.watcher.actions.email.DataAttachment dataAttachment = org.elasticsearch.watcher.actions.email.DataAttachment.YAML;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (Strings.hasLength(currentFieldName) && ParseFieldMatcher.STRICT.match(currentFieldName, Fields.FORMAT)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
dataAttachment = resolve(parser.text());
|
||||
} else {
|
||||
throw new ElasticsearchParseException("could not parse data attachment. expected string value for [{}] field but found [{}] instead", currentFieldName, token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new DataAttachment(id, dataAttachment);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Attachment toAttachment(WatchExecutionContext ctx, Payload payload, DataAttachment attachment) {
|
||||
Map<String, Object> model = Variables.createCtxModel(ctx, payload);
|
||||
return attachment.getDataAttachment().create(model);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.Attachment;
|
||||
import org.elasticsearch.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Marker interface for email attachments that have an additional execution step and are used by
|
||||
* EmailAttachmentParser class
|
||||
*/
|
||||
public interface EmailAttachmentParser<T extends EmailAttachmentParser.EmailAttachment> {
|
||||
|
||||
interface EmailAttachment extends ToXContent {
|
||||
/**
|
||||
* @return A type to identify the email attachment, same as the parser identifier
|
||||
*/
|
||||
String type();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return An identifier of this parser
|
||||
*/
|
||||
String type();
|
||||
|
||||
/**
|
||||
* A parser to create an EmailAttachment, that is serializable and does not execute anything
|
||||
*
|
||||
* @param id The id of this attachment, parsed from the outer content
|
||||
* @param parser The XContentParser used for parsing
|
||||
* @return A concrete EmailAttachment
|
||||
* @throws IOException in case parsing fails
|
||||
*/
|
||||
T parse(String id, XContentParser parser) throws IOException;
|
||||
|
||||
/**
|
||||
* Converts an email attachment to an attachment, potentially executing code like an HTTP request
|
||||
* @param context The WatchExecutionContext supplied with the whole watch execution
|
||||
* @param payload The Payload supplied with the action
|
||||
* @param attachment The typed attachment
|
||||
* @return An attachment that is ready to be used in a MimeMessage
|
||||
*/
|
||||
Attachment toAttachment(WatchExecutionContext context, Payload payload, T attachment);
|
||||
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class EmailAttachments implements ToXContent {
|
||||
|
||||
public static final EmailAttachments EMPTY_ATTACHMENTS = new EmailAttachments(Collections.<EmailAttachmentParser.EmailAttachment>emptyList());
|
||||
|
||||
public interface Fields {
|
||||
ParseField ATTACHMENTS = new ParseField("attachments");
|
||||
}
|
||||
|
||||
private final List<EmailAttachmentParser.EmailAttachment> attachments;
|
||||
|
||||
public EmailAttachments(List<EmailAttachmentParser.EmailAttachment> attachments) {
|
||||
this.attachments = attachments;
|
||||
}
|
||||
|
||||
public List<EmailAttachmentParser.EmailAttachment> getAttachments() {
|
||||
return attachments;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (attachments != null && attachments.size() > 0) {
|
||||
builder.startObject(Fields.ATTACHMENTS.getPreferredName());
|
||||
for (EmailAttachmentParser.EmailAttachment attachment : attachments) {
|
||||
attachment.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
EmailAttachments other = (EmailAttachments) o;
|
||||
return Objects.equals(attachments, other.attachments);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(attachments);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class EmailAttachmentsParser {
|
||||
|
||||
private Map<String, EmailAttachmentParser> parsers;
|
||||
|
||||
@Inject
|
||||
public EmailAttachmentsParser(Map<String, EmailAttachmentParser> parsers) {
|
||||
this.parsers = parsers;
|
||||
}
|
||||
|
||||
public EmailAttachments parse(XContentParser parser) throws IOException {
|
||||
List<EmailAttachmentParser.EmailAttachment> attachments = new ArrayList<>();
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
if (token == XContentParser.Token.START_OBJECT && currentFieldName != null) {
|
||||
String currentAttachmentType = null;
|
||||
if (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
||||
currentAttachmentType = parser.currentName();
|
||||
}
|
||||
parser.nextToken();
|
||||
|
||||
EmailAttachmentParser emailAttachmentParser = parsers.get(currentAttachmentType);
|
||||
if (emailAttachmentParser == null) {
|
||||
throw new ElasticsearchParseException("Cannot parse attachment of type " + currentAttachmentType);
|
||||
}
|
||||
EmailAttachmentParser.EmailAttachment emailAttachment = emailAttachmentParser.parse(currentFieldName, parser);
|
||||
attachments.add(emailAttachment);
|
||||
// one further to skip the end_object from the attachment
|
||||
parser.nextToken();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new EmailAttachments(attachments);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,109 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.Attachment;
|
||||
import org.elasticsearch.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.watcher.support.Variables;
|
||||
import org.elasticsearch.watcher.support.http.HttpClient;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequest;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequestTemplate;
|
||||
import org.elasticsearch.watcher.support.http.HttpResponse;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class HttpEmailAttachementParser implements EmailAttachmentParser<HttpRequestAttachment> {
|
||||
|
||||
public interface Fields {
|
||||
ParseField REQUEST = new ParseField("request");
|
||||
ParseField CONTENT_TYPE = new ParseField("content_type");
|
||||
}
|
||||
|
||||
public static final String TYPE = "http";
|
||||
private final HttpClient httpClient;
|
||||
private HttpRequestTemplate.Parser requestTemplateParser;
|
||||
private final TextTemplateEngine templateEngine;
|
||||
private final ESLogger logger;
|
||||
|
||||
@Inject
|
||||
public HttpEmailAttachementParser(HttpClient httpClient, HttpRequestTemplate.Parser requestTemplateParser, TextTemplateEngine templateEngine) {
|
||||
this.httpClient = httpClient;
|
||||
this.requestTemplateParser = requestTemplateParser;
|
||||
this.templateEngine = templateEngine;
|
||||
this.logger = Loggers.getLogger(getClass());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HttpRequestAttachment parse(String id, XContentParser parser) throws IOException {
|
||||
String contentType = null;
|
||||
HttpRequestTemplate requestTemplate = null;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.CONTENT_TYPE)) {
|
||||
contentType = parser.text();
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.REQUEST)) {
|
||||
requestTemplate = requestTemplateParser.parse(parser);
|
||||
} else {
|
||||
throw new ElasticsearchParseException("Unknown field name [" + currentFieldName + "] in http request attachment configuration");
|
||||
}
|
||||
}
|
||||
|
||||
if (requestTemplate != null) {
|
||||
return new HttpRequestAttachment(id, requestTemplate, contentType);
|
||||
}
|
||||
|
||||
throw new ElasticsearchParseException("Could not parse http request attachment");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Attachment toAttachment(WatchExecutionContext context, Payload payload, HttpRequestAttachment attachment) {
|
||||
Map<String, Object> model = Variables.createCtxModel(context, payload);
|
||||
HttpRequest httpRequest = attachment.getRequestTemplate().render(templateEngine, model);
|
||||
|
||||
try {
|
||||
HttpResponse response = httpClient.execute(httpRequest);
|
||||
// check for status 200, only then append attachment
|
||||
if (response.status() >= 200 && response.status() < 300) {
|
||||
if (response.hasContent()) {
|
||||
String contentType = attachment.getContentType();
|
||||
String attachmentContentType = Strings.hasLength(contentType) ? contentType : response.contentType();
|
||||
return new Attachment.Bytes(attachment.getId(), response.body().toBytes(), attachmentContentType);
|
||||
} else {
|
||||
logger.error("Empty response body: [host[{}], port[{}], method[{}], path[{}]: response status [{}]", httpRequest.host(),
|
||||
httpRequest.port(), httpRequest.method(), httpRequest.path(), response.status());
|
||||
}
|
||||
} else {
|
||||
logger.error("Error getting http response: [host[{}], port[{}], method[{}], path[{}]: response status [{}]", httpRequest.host(),
|
||||
httpRequest.port(), httpRequest.method(), httpRequest.path(), response.status());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error("Error executing HTTP request: [host[{}], port[{}], method[{}], path[{}]: [{}]", e, httpRequest.port(),
|
||||
httpRequest.method(), httpRequest.path(), e.getMessage());
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequestTemplate;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class HttpRequestAttachment implements EmailAttachmentParser.EmailAttachment {
|
||||
|
||||
private final HttpRequestTemplate requestTemplate;
|
||||
private final String contentType;
|
||||
private String id;
|
||||
|
||||
public HttpRequestAttachment(String id, HttpRequestTemplate requestTemplate, @Nullable String contentType) {
|
||||
this.id = id;
|
||||
this.requestTemplate = requestTemplate;
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
public HttpRequestTemplate getRequestTemplate() {
|
||||
return requestTemplate;
|
||||
}
|
||||
|
||||
public String getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(id)
|
||||
.startObject(HttpEmailAttachementParser.TYPE)
|
||||
.field(HttpEmailAttachementParser.Fields.REQUEST.getPreferredName(), requestTemplate, params);
|
||||
if (Strings.hasLength(contentType)) {
|
||||
builder.field(HttpEmailAttachementParser.Fields.CONTENT_TYPE.getPreferredName(), contentType);
|
||||
}
|
||||
return builder.endObject().endObject();
|
||||
}
|
||||
|
||||
public static Builder builder(String id) {
|
||||
return new Builder(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return HttpEmailAttachementParser.TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
HttpRequestAttachment otherDataAttachment = (HttpRequestAttachment) o;
|
||||
return Objects.equals(id, otherDataAttachment.id) && Objects.equals(requestTemplate, otherDataAttachment.requestTemplate)
|
||||
&& Objects.equals(contentType, otherDataAttachment.contentType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, requestTemplate, contentType);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private String id;
|
||||
private HttpRequestTemplate httpRequestTemplate;
|
||||
private String contentType;
|
||||
|
||||
private Builder(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Builder httpRequestTemplate(HttpRequestTemplate httpRequestTemplate) {
|
||||
this.httpRequestTemplate = httpRequestTemplate;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder contentType(String contentType) {
|
||||
this.contentType = contentType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public HttpRequestAttachment build() {
|
||||
return new HttpRequestAttachment(id, httpRequestTemplate, contentType);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.watcher.actions.Action;
|
||||
import org.elasticsearch.watcher.actions.ExecutableAction;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyAccount;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyService;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.SentEvent;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.IncidentEvent;
|
||||
import org.elasticsearch.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.watcher.support.Variables;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ExecutablePagerDutyAction extends ExecutableAction<PagerDutyAction> {
|
||||
|
||||
private final TextTemplateEngine templateEngine;
|
||||
private final PagerDutyService pagerDutyService;
|
||||
|
||||
public ExecutablePagerDutyAction(PagerDutyAction action, ESLogger logger, PagerDutyService pagerDutyService, TextTemplateEngine templateEngine) {
|
||||
super(action, logger);
|
||||
this.pagerDutyService = pagerDutyService;
|
||||
this.templateEngine = templateEngine;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Action.Result execute(final String actionId, WatchExecutionContext ctx, Payload payload) throws Exception {
|
||||
|
||||
PagerDutyAccount account = action.event.account != null ?
|
||||
pagerDutyService.getAccount(action.event.account) :
|
||||
pagerDutyService.getDefaultAccount();
|
||||
|
||||
if (account == null) {
|
||||
// the account associated with this action was deleted
|
||||
throw new IllegalStateException("account [" + action.event.account + "] was not found. perhaps it was deleted");
|
||||
}
|
||||
|
||||
Map<String, Object> model = Variables.createCtxModel(ctx, payload);
|
||||
IncidentEvent event = action.event.render(ctx.watch().id(), actionId, templateEngine, model, account.getDefaults());
|
||||
|
||||
if (ctx.simulateAction(actionId)) {
|
||||
return new PagerDutyAction.Result.Simulated(event);
|
||||
}
|
||||
|
||||
SentEvent sentEvent = account.send(event, payload);
|
||||
return new PagerDutyAction.Result.Executed(account.getName(), sentEvent);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,138 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty;
|
||||
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.watcher.actions.Action;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.IncidentEvent;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.SentEvent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class PagerDutyAction implements Action {
|
||||
|
||||
public static final String TYPE = "pagerduty";
|
||||
|
||||
final IncidentEvent.Template event;
|
||||
|
||||
public PagerDutyAction(IncidentEvent.Template event) {
|
||||
this.event = event;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
PagerDutyAction that = (PagerDutyAction) o;
|
||||
return Objects.equals(event, that.event);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(event);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
event.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static PagerDutyAction parse(String watchId, String actionId, XContentParser parser) throws IOException {
|
||||
IncidentEvent.Template eventTemplate = IncidentEvent.Template.parse(watchId, actionId, parser);
|
||||
return new PagerDutyAction(eventTemplate);
|
||||
}
|
||||
|
||||
public static Builder builder(IncidentEvent.Template event) {
|
||||
return new Builder(new PagerDutyAction(event));
|
||||
}
|
||||
|
||||
public interface Result {
|
||||
|
||||
class Executed extends Action.Result implements Result {
|
||||
|
||||
private final String account;
|
||||
private final SentEvent sentEvent;
|
||||
|
||||
public Executed(String account, SentEvent sentEvent) {
|
||||
super(TYPE, status(sentEvent));
|
||||
this.account = account;
|
||||
this.sentEvent = sentEvent;
|
||||
}
|
||||
|
||||
public SentEvent sentEvent() {
|
||||
return sentEvent;
|
||||
}
|
||||
|
||||
public String account() {
|
||||
return account;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(type);
|
||||
builder.field(XField.SENT_EVENT.getPreferredName(), sentEvent, params);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
static Status status(SentEvent sentEvent) {
|
||||
return sentEvent.successful() ? Status.SUCCESS : Status.FAILURE;
|
||||
}
|
||||
}
|
||||
|
||||
class Simulated extends Action.Result implements Result {
|
||||
|
||||
private final IncidentEvent event;
|
||||
|
||||
protected Simulated(IncidentEvent event) {
|
||||
super(TYPE, Status.SIMULATED);
|
||||
this.event = event;
|
||||
}
|
||||
|
||||
public IncidentEvent event() {
|
||||
return event;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder.startObject(type)
|
||||
.field(XField.EVENT.getPreferredName(), event, params)
|
||||
.endObject();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder implements Action.Builder<PagerDutyAction> {
|
||||
|
||||
final PagerDutyAction action;
|
||||
|
||||
public Builder(PagerDutyAction action) {
|
||||
this.action = action;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PagerDutyAction build() {
|
||||
return action;
|
||||
}
|
||||
}
|
||||
|
||||
public interface XField {
|
||||
ParseField SENT_EVENT = new ParseField("sent_event");
|
||||
ParseField EVENT = new ParseField("event");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.watcher.actions.ActionFactory;
|
||||
import org.elasticsearch.watcher.actions.hipchat.ExecutableHipChatAction;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyAccount;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyService;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class PagerDutyActionFactory extends ActionFactory<PagerDutyAction, ExecutablePagerDutyAction> {
|
||||
|
||||
private final TextTemplateEngine templateEngine;
|
||||
private final PagerDutyService pagerDutyService;
|
||||
|
||||
@Inject
|
||||
public PagerDutyActionFactory(Settings settings, TextTemplateEngine templateEngine, PagerDutyService pagerDutyService) {
|
||||
super(Loggers.getLogger(ExecutableHipChatAction.class, settings));
|
||||
this.templateEngine = templateEngine;
|
||||
this.pagerDutyService = pagerDutyService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return PagerDutyAction.TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PagerDutyAction parseAction(String watchId, String actionId, XContentParser parser) throws IOException {
|
||||
PagerDutyAction action = PagerDutyAction.parse(watchId, actionId, parser);
|
||||
PagerDutyAccount account = pagerDutyService.getAccount(action.event.account);
|
||||
if (account == null) {
|
||||
throw new ElasticsearchParseException("could not parse [pagerduty] action [{}/{}]. unknown pager duty account [{}]", watchId, account, action.event.account);
|
||||
}
|
||||
return action;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutablePagerDutyAction createExecutable(PagerDutyAction action) {
|
||||
return new ExecutablePagerDutyAction(action, actionLogger, pagerDutyService, templateEngine);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,409 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.watcher.support.http.HttpMethod;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequest;
|
||||
import org.elasticsearch.watcher.support.http.Scheme;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplate;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Official documentation for this can be found at
|
||||
*
|
||||
* https://developer.pagerduty.com/documentation/howto/manually-trigger-an-incident/
|
||||
* https://developer.pagerduty.com/documentation/integration/events/trigger
|
||||
* https://developer.pagerduty.com/documentation/integration/events/acknowledge
|
||||
* https://developer.pagerduty.com/documentation/integration/events/resolve
|
||||
*/
|
||||
public class IncidentEvent implements ToXContent {
|
||||
|
||||
static final String HOST = "events.pagerduty.com";
|
||||
static final String PATH = "/generic/2010-04-15/create_event.json";
|
||||
|
||||
final String description;
|
||||
final @Nullable String incidentKey;
|
||||
final @Nullable String client;
|
||||
final @Nullable String clientUrl;
|
||||
final @Nullable String account;
|
||||
final String eventType;
|
||||
final boolean attachPayload;
|
||||
final @Nullable IncidentEventContext[] contexts;
|
||||
|
||||
public IncidentEvent(String description, @Nullable String eventType, @Nullable String incidentKey, @Nullable String client,
|
||||
@Nullable String clientUrl, @Nullable String account, boolean attachPayload, @Nullable IncidentEventContext[] contexts) {
|
||||
this.description = description;
|
||||
if (description == null) {
|
||||
throw new IllegalStateException("could not create pagerduty event. missing required [" + XField.DESCRIPTION.getPreferredName() + "] setting");
|
||||
}
|
||||
this.incidentKey = incidentKey;
|
||||
this.client = client;
|
||||
this.clientUrl = clientUrl;
|
||||
this.account = account;
|
||||
this.attachPayload = attachPayload;
|
||||
this.contexts = contexts;
|
||||
this.eventType = Strings.hasLength(eventType) ? eventType : "trigger";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
IncidentEvent template = (IncidentEvent) o;
|
||||
return Objects.equals(description, template.description) &&
|
||||
Objects.equals(incidentKey, template.incidentKey) &&
|
||||
Objects.equals(client, template.client) &&
|
||||
Objects.equals(clientUrl, template.clientUrl) &&
|
||||
Objects.equals(attachPayload, template.attachPayload) &&
|
||||
Objects.equals(eventType, template.eventType) &&
|
||||
Objects.equals(account, template.account) &&
|
||||
Arrays.equals(contexts, template.contexts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(description, incidentKey, client, clientUrl, account, attachPayload, eventType);
|
||||
result = 31 * result + Arrays.hashCode(contexts);
|
||||
return result;
|
||||
}
|
||||
|
||||
public HttpRequest createRequest(final String serviceKey, final Payload payload) throws IOException {
|
||||
return HttpRequest.builder(HOST, -1)
|
||||
.method(HttpMethod.POST)
|
||||
.scheme(Scheme.HTTPS)
|
||||
.path(PATH)
|
||||
.jsonBody(new ToXContent() {
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(XField.SERVICE_KEY.getPreferredName(), serviceKey);
|
||||
builder.field(XField.EVENT_TYPE.getPreferredName(), eventType);
|
||||
builder.field(XField.DESCRIPTION.getPreferredName(), description);
|
||||
if (incidentKey != null) {
|
||||
builder.field(XField.INCIDENT_KEY.getPreferredName(), incidentKey);
|
||||
}
|
||||
if (client != null) {
|
||||
builder.field(XField.CLIENT.getPreferredName(), client);
|
||||
}
|
||||
if (clientUrl != null) {
|
||||
builder.field(XField.CLIENT_URL.getPreferredName(), clientUrl);
|
||||
}
|
||||
if (attachPayload) {
|
||||
builder.startObject(XField.DETAILS.getPreferredName());
|
||||
builder.field(XField.PAYLOAD.getPreferredName());
|
||||
payload.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
if (contexts != null && contexts.length > 0) {
|
||||
builder.startArray(IncidentEvent.XField.CONTEXT.getPreferredName());
|
||||
for (IncidentEventContext context : contexts) {
|
||||
context.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
})
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(XField.DESCRIPTION.getPreferredName(), description);
|
||||
if (incidentKey != null) {
|
||||
builder.field(XField.INCIDENT_KEY.getPreferredName(), incidentKey);
|
||||
}
|
||||
if (client != null) {
|
||||
builder.field(XField.CLIENT.getPreferredName(), client);
|
||||
}
|
||||
if (clientUrl != null) {
|
||||
builder.field(XField.CLIENT_URL.getPreferredName(), clientUrl);
|
||||
}
|
||||
if (account != null) {
|
||||
builder.field(XField.ACCOUNT.getPreferredName(), account);
|
||||
}
|
||||
builder.field(XField.ATTACH_PAYLOAD.getPreferredName(), attachPayload);
|
||||
if (contexts != null) {
|
||||
builder.startArray(XField.CONTEXT.getPreferredName());
|
||||
for (IncidentEventContext context : contexts) {
|
||||
context.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
public static Template.Builder templateBuilder(String description) {
|
||||
return templateBuilder(TextTemplate.inline(description).build());
|
||||
}
|
||||
|
||||
public static Template.Builder templateBuilder(TextTemplate description) {
|
||||
return new Template.Builder(description);
|
||||
}
|
||||
|
||||
public static class Template implements ToXContent {
|
||||
|
||||
final TextTemplate description;
|
||||
final TextTemplate incidentKey;
|
||||
final TextTemplate client;
|
||||
final TextTemplate clientUrl;
|
||||
final TextTemplate eventType;
|
||||
public final String account;
|
||||
final Boolean attachPayload;
|
||||
final IncidentEventContext.Template[] contexts;
|
||||
|
||||
public Template(TextTemplate description, TextTemplate eventType, TextTemplate incidentKey, TextTemplate client, TextTemplate clientUrl, String account, Boolean attachPayload, IncidentEventContext.Template[] contexts) {
|
||||
this.description = description;
|
||||
this.eventType = eventType;
|
||||
this.incidentKey = incidentKey;
|
||||
this.client = client;
|
||||
this.clientUrl = clientUrl;
|
||||
this.account = account;
|
||||
this.attachPayload = attachPayload;
|
||||
this.contexts = contexts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
Template template = (Template) o;
|
||||
return Objects.equals(description, template.description) &&
|
||||
Objects.equals(incidentKey, template.incidentKey) &&
|
||||
Objects.equals(client, template.client) &&
|
||||
Objects.equals(clientUrl, template.clientUrl) &&
|
||||
Objects.equals(eventType, template.eventType) &&
|
||||
Objects.equals(attachPayload, template.attachPayload) &&
|
||||
Objects.equals(account, template.account) &&
|
||||
Arrays.equals(contexts, template.contexts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(description, eventType, incidentKey, client, clientUrl, attachPayload, account);
|
||||
result = 31 * result + Arrays.hashCode(contexts);
|
||||
return result;
|
||||
}
|
||||
|
||||
public IncidentEvent render(String watchId, String actionId, TextTemplateEngine engine, Map<String, Object> model, IncidentEventDefaults defaults) {
|
||||
String description = this.description != null ? engine.render(this.description, model) : defaults.description;
|
||||
String incidentKey = this.incidentKey != null ? engine.render(this.incidentKey, model) :
|
||||
defaults.incidentKey != null ? defaults.incidentKey : watchId;
|
||||
String client = this.client != null ? engine.render(this.client, model) : defaults.client;
|
||||
String clientUrl = this.clientUrl != null ? engine.render(this.clientUrl, model) : defaults.clientUrl;
|
||||
String eventType = this.eventType != null ? engine.render(this.eventType, model) : defaults.eventType;
|
||||
boolean attachPayload = this.attachPayload != null ? this.attachPayload : defaults.attachPayload;
|
||||
IncidentEventContext[] contexts = null;
|
||||
if (this.contexts != null) {
|
||||
contexts = new IncidentEventContext[this.contexts.length];
|
||||
for (int i = 0; i < this.contexts.length; i++) {
|
||||
contexts[i] = this.contexts[i].render(engine, model, defaults);
|
||||
}
|
||||
}
|
||||
return new IncidentEvent(description, eventType, incidentKey, client, clientUrl, account, attachPayload, contexts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(XField.DESCRIPTION.getPreferredName(), description, params);
|
||||
if (incidentKey != null) {
|
||||
builder.field(XField.INCIDENT_KEY.getPreferredName(), incidentKey, params);
|
||||
}
|
||||
if (client != null) {
|
||||
builder.field(XField.CLIENT.getPreferredName(), client, params);
|
||||
}
|
||||
if (clientUrl != null) {
|
||||
builder.field(XField.CLIENT_URL.getPreferredName(), clientUrl, params);
|
||||
}
|
||||
if (eventType != null) {
|
||||
builder.field(XField.EVENT_TYPE.getPreferredName(), eventType, params);
|
||||
}
|
||||
if (attachPayload != null) {
|
||||
builder.field(XField.ATTACH_PAYLOAD.getPreferredName(), attachPayload);
|
||||
}
|
||||
if (account != null) {
|
||||
builder.field(XField.ACCOUNT.getPreferredName(), account);
|
||||
}
|
||||
if (contexts != null) {
|
||||
builder.startArray(XField.CONTEXT.getPreferredName());
|
||||
for (IncidentEventContext.Template context : contexts) {
|
||||
context.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
public static Template parse(String watchId, String actionId, XContentParser parser) throws IOException {
|
||||
TextTemplate incidentKey = null;
|
||||
TextTemplate description = null;
|
||||
TextTemplate client = null;
|
||||
TextTemplate clientUrl = null;
|
||||
TextTemplate eventType = null;
|
||||
String account = null;
|
||||
Boolean attachPayload = null;
|
||||
IncidentEventContext.Template[] contexts = null;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.INCIDENT_KEY)) {
|
||||
try {
|
||||
incidentKey = TextTemplate.parse(parser);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", XField.INCIDENT_KEY.getPreferredName());
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.DESCRIPTION)) {
|
||||
try {
|
||||
description = TextTemplate.parse(parser);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", XField.DESCRIPTION.getPreferredName());
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.CLIENT)) {
|
||||
try {
|
||||
client = TextTemplate.parse(parser);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", XField.CLIENT.getPreferredName());
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.CLIENT_URL)) {
|
||||
try {
|
||||
clientUrl = TextTemplate.parse(parser);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", XField.CLIENT_URL.getPreferredName());
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.ACCOUNT)) {
|
||||
try {
|
||||
account = parser.text();
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", XField.CLIENT_URL.getPreferredName());
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.EVENT_TYPE)) {
|
||||
try {
|
||||
eventType = TextTemplate.parse(parser);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", XField.EVENT_TYPE.getPreferredName());
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.ATTACH_PAYLOAD)) {
|
||||
if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
attachPayload = parser.booleanValue();
|
||||
} else {
|
||||
throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}], expected a boolean value but found [{}] instead", XField.ATTACH_PAYLOAD.getPreferredName(), token);
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.CONTEXT)) {
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
List<IncidentEventContext.Template> list = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
try {
|
||||
list.add(IncidentEventContext.Template.parse(parser));
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", XField.CONTEXT.getPreferredName());
|
||||
}
|
||||
}
|
||||
contexts = list.toArray(new IncidentEventContext.Template[list.size()]);
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("could not parse pager duty event template. unexpected field [{}]", currentFieldName);
|
||||
}
|
||||
}
|
||||
return new Template(description, eventType, incidentKey, client, clientUrl, account, attachPayload, contexts);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
final TextTemplate description;
|
||||
TextTemplate incidentKey;
|
||||
TextTemplate client;
|
||||
TextTemplate clientUrl;
|
||||
TextTemplate eventType;
|
||||
String account;
|
||||
Boolean attachPayload;
|
||||
List<IncidentEventContext.Template> contexts = new ArrayList<>();
|
||||
|
||||
public Builder(TextTemplate description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Builder setIncidentKey(TextTemplate incidentKey) {
|
||||
this.incidentKey = incidentKey;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setClient(TextTemplate client) {
|
||||
this.client = client;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setClientUrl(TextTemplate clientUrl) {
|
||||
this.clientUrl = clientUrl;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setEventType(TextTemplate eventType) {
|
||||
this.eventType = eventType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setAccount(String account) {
|
||||
this.account= account;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setAttachPayload(Boolean attachPayload) {
|
||||
this.attachPayload = attachPayload;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addContext(IncidentEventContext.Template context) {
|
||||
this.contexts.add(context);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Template build() {
|
||||
IncidentEventContext.Template[] contexts = this.contexts.isEmpty() ? null :
|
||||
this.contexts.toArray(new IncidentEventContext.Template[this.contexts.size()]);
|
||||
return new Template(description, eventType, incidentKey, client, clientUrl, account, attachPayload, contexts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface XField {
|
||||
|
||||
ParseField TYPE = new ParseField("type");
|
||||
ParseField EVENT_TYPE = new ParseField("event_type");
|
||||
|
||||
ParseField ACCOUNT = new ParseField("account");
|
||||
ParseField DESCRIPTION = new ParseField("description");
|
||||
ParseField INCIDENT_KEY = new ParseField("incident_key");
|
||||
ParseField CLIENT = new ParseField("client");
|
||||
ParseField CLIENT_URL = new ParseField("client_url");
|
||||
ParseField ATTACH_PAYLOAD = new ParseField("attach_payload");
|
||||
ParseField CONTEXT = new ParseField("context");
|
||||
|
||||
ParseField SERVICE_KEY = new ParseField("service_key");
|
||||
ParseField PAYLOAD = new ParseField("payload");
|
||||
ParseField DETAILS = new ParseField("details");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,269 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplate;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class IncidentEventContext implements ToXContent {
|
||||
|
||||
enum Type {
|
||||
LINK, IMAGE
|
||||
}
|
||||
|
||||
final Type type;
|
||||
final String href;
|
||||
final String text;
|
||||
final String src;
|
||||
final String alt;
|
||||
|
||||
public static IncidentEventContext link(String href, @Nullable String text) {
|
||||
assert href != null;
|
||||
return new IncidentEventContext(Type.LINK, href, text, null, null);
|
||||
}
|
||||
|
||||
public static IncidentEventContext image(String src, @Nullable String href, @Nullable String alt) {
|
||||
assert src != null;
|
||||
return new IncidentEventContext(Type.IMAGE, href, null, src, alt);
|
||||
}
|
||||
|
||||
private IncidentEventContext(Type type, String href, String text, String src, String alt) {
|
||||
this.type = type;
|
||||
this.href = href;
|
||||
this.text = text;
|
||||
this.src = src;
|
||||
this.alt = alt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
IncidentEventContext that = (IncidentEventContext) o;
|
||||
|
||||
return Objects.equals(type, that.type) && Objects.equals(href, that.href) && Objects.equals(text, that.text) && Objects.equals(src, that.src)
|
||||
&& Objects.equals(alt, that.alt);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(type, href, text, src, alt);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(XField.TYPE.getPreferredName(), type.name().toLowerCase(Locale.ROOT));
|
||||
switch (type) {
|
||||
case LINK:
|
||||
builder.field(XField.HREF.getPreferredName(), href);
|
||||
if (text != null) {
|
||||
builder.field(XField.TEXT.getPreferredName(), text);
|
||||
}
|
||||
break;
|
||||
case IMAGE:
|
||||
builder.field(XField.SRC.getPreferredName(), src);
|
||||
if (href != null) {
|
||||
builder.field(XField.HREF.getPreferredName(), href);
|
||||
}
|
||||
if (alt != null) {
|
||||
builder.field(XField.ALT.getPreferredName(), alt);
|
||||
}
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
public static class Template implements ToXContent {
|
||||
|
||||
final Type type;
|
||||
final TextTemplate href;
|
||||
final TextTemplate text;
|
||||
final TextTemplate src;
|
||||
final TextTemplate alt;
|
||||
|
||||
public static Template link(TextTemplate href, @Nullable TextTemplate text) {
|
||||
if (href == null) {
|
||||
throw new IllegalStateException("could not create link context for pager duty trigger incident event. missing required [href] setting");
|
||||
}
|
||||
return new Template(Type.LINK, href, text, null, null);
|
||||
}
|
||||
|
||||
public static Template image(TextTemplate src, @Nullable TextTemplate href, @Nullable TextTemplate alt) {
|
||||
if (src == null) {
|
||||
throw new IllegalStateException("could not create link context for pager duty trigger incident event. missing required [src] setting");
|
||||
}
|
||||
return new Template(Type.IMAGE, href, null, src, alt);
|
||||
}
|
||||
|
||||
private Template(Type type, TextTemplate href, TextTemplate text, TextTemplate src, TextTemplate alt) {
|
||||
this.type = type;
|
||||
this.href = href;
|
||||
this.text = text;
|
||||
this.src = src;
|
||||
this.alt = alt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
Template that = (Template) o;
|
||||
return Objects.equals(type, that.type) && Objects.equals(href, that.href) && Objects.equals(text, that.text) && Objects.equals(src, that.src)
|
||||
&& Objects.equals(alt, that.alt);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(type, href, text, src, alt);
|
||||
}
|
||||
|
||||
public IncidentEventContext render(TextTemplateEngine engine, Map<String, Object> model, IncidentEventDefaults defaults) {
|
||||
switch (type) {
|
||||
case LINK:
|
||||
String href = this.href != null ? engine.render(this.href, model) : defaults.link.href;
|
||||
String text = this.text != null ? engine.render(this.text, model) : defaults.link.text;
|
||||
return IncidentEventContext.link(href, text);
|
||||
|
||||
default:
|
||||
assert type == Type.IMAGE;
|
||||
String src = this.src != null ? engine.render(this.src, model) : defaults.image.src;
|
||||
href = this.href != null ? engine.render(this.href, model) : defaults.image.href;
|
||||
String alt = this.alt != null ? engine.render(this.alt, model) : defaults.image.alt;
|
||||
return IncidentEventContext.image(src, href, alt);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(XField.TYPE.getPreferredName(), type.name().toLowerCase(Locale.ROOT));
|
||||
switch (type) {
|
||||
case LINK:
|
||||
builder.field(XField.HREF.getPreferredName(), href, params);
|
||||
if (text != null) {
|
||||
builder.field(XField.TEXT.getPreferredName(), text, params);
|
||||
}
|
||||
break;
|
||||
case IMAGE:
|
||||
builder.field(XField.SRC.getPreferredName(), src, params);
|
||||
if (href != null) {
|
||||
builder.field(XField.HREF.getPreferredName(), href, params);
|
||||
}
|
||||
if (alt != null) {
|
||||
builder.field(XField.ALT.getPreferredName(), alt, params);
|
||||
}
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
public static Template parse(XContentParser parser) throws IOException {
|
||||
Type type = null;
|
||||
TextTemplate href = null;
|
||||
TextTemplate text = null;
|
||||
TextTemplate src = null;
|
||||
TextTemplate alt = null;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (Strings.hasLength(currentFieldName)) {
|
||||
if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TYPE)) {
|
||||
try {
|
||||
type = Type.valueOf(parser.text().toUpperCase(Locale.ROOT));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. unknown context type [{}]", parser.text());
|
||||
}
|
||||
} else {
|
||||
TextTemplate parsedTemplate;
|
||||
try {
|
||||
parsedTemplate = TextTemplate.parse(parser);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. failed to parse [{}] field", e, parser.text(), currentFieldName);
|
||||
}
|
||||
|
||||
if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.HREF)) {
|
||||
href = parsedTemplate;
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TEXT)) {
|
||||
text = parsedTemplate;
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.SRC)) {
|
||||
src = parsedTemplate;
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.ALT)) {
|
||||
alt = parsedTemplate;
|
||||
} else {
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. unknown field [{}]", currentFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return createAndValidateTemplate(type, href, src, alt, text);
|
||||
}
|
||||
|
||||
private static Template createAndValidateTemplate(Type type, TextTemplate href, TextTemplate src, TextTemplate alt, TextTemplate text) {
|
||||
if (type == null) {
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. missing required field [{}]", XField.TYPE.getPreferredName());
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case LINK:
|
||||
if (href == null) {
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. missing required field [{}] for [{}] context", XField.HREF.getPreferredName(), Type.LINK.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (src != null) {
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. unexpected field [{}] for [{}] context", XField.SRC.getPreferredName(), Type.LINK.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (alt != null) {
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. unexpected field [{}] for [{}] context", XField.ALT.getPreferredName(), Type.LINK.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
return link(href, text);
|
||||
case IMAGE:
|
||||
if (src == null) {
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. missing required field [{}] for [{}] context", XField.SRC.getPreferredName(), Type.IMAGE.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (text != null) {
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. unexpected field [{}] for [{}] context", XField.TEXT.getPreferredName(), Type.IMAGE.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
return image(src, href, alt);
|
||||
default:
|
||||
throw new ElasticsearchParseException("could not parse trigger incident event context. unknown context type [{}]", type);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
interface XField {
|
||||
ParseField TYPE = new ParseField("type");
|
||||
ParseField HREF = new ParseField("href");
|
||||
|
||||
// "link" context fields
|
||||
ParseField TEXT = new ParseField("text");
|
||||
|
||||
// "image" context fields
|
||||
ParseField SRC = new ParseField("src");
|
||||
ParseField ALT = new ParseField("alt");
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Get trigger default configurations either from global settings or specific account settings and merge them
|
||||
*/
|
||||
public class IncidentEventDefaults {
|
||||
|
||||
final String description;
|
||||
final String incidentKey;
|
||||
final String client;
|
||||
final String clientUrl;
|
||||
final String eventType;
|
||||
final boolean attachPayload;
|
||||
final Context.LinkDefaults link;
|
||||
final Context.ImageDefaults image;
|
||||
|
||||
public IncidentEventDefaults(Settings accountSettings) {
|
||||
description = accountSettings.get(IncidentEvent.XField.DESCRIPTION.getPreferredName(), null);
|
||||
incidentKey = accountSettings.get(IncidentEvent.XField.INCIDENT_KEY.getPreferredName(), null);
|
||||
client = accountSettings.get(IncidentEvent.XField.CLIENT.getPreferredName(), null);
|
||||
clientUrl = accountSettings.get(IncidentEvent.XField.CLIENT_URL.getPreferredName(), null);
|
||||
eventType = accountSettings.get(IncidentEvent.XField.EVENT_TYPE.getPreferredName(), null);
|
||||
attachPayload = accountSettings.getAsBoolean(IncidentEvent.XField.ATTACH_PAYLOAD.getPreferredName(), false);
|
||||
link = new Context.LinkDefaults(accountSettings.getAsSettings("link"));
|
||||
image = new Context.ImageDefaults(accountSettings.getAsSettings("image"));
|
||||
|
||||
}
|
||||
|
||||
static class Context {
|
||||
|
||||
static class LinkDefaults {
|
||||
|
||||
final String href;
|
||||
final String text;
|
||||
|
||||
public LinkDefaults(Settings settings) {
|
||||
href = settings.get(IncidentEventContext.XField.HREF.getPreferredName(), null);
|
||||
text = settings.get(IncidentEventContext.XField.TEXT.getPreferredName(), null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(href, text);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()){
|
||||
return false;
|
||||
}
|
||||
final LinkDefaults other = (LinkDefaults) obj;
|
||||
return Objects.equals(href, other.href) && Objects.equals(text, other.text);
|
||||
}
|
||||
}
|
||||
|
||||
static class ImageDefaults {
|
||||
|
||||
final String href;
|
||||
final String src;
|
||||
final String alt;
|
||||
|
||||
public ImageDefaults(Settings settings) {
|
||||
href = settings.get(IncidentEventContext.XField.HREF.getPreferredName(), null);
|
||||
src = settings.get(IncidentEventContext.XField.SRC.getPreferredName(), null);
|
||||
alt = settings.get(IncidentEventContext.XField.ALT.getPreferredName(), null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(href, src, alt);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()){
|
||||
return false;
|
||||
}
|
||||
final ImageDefaults other = (ImageDefaults) obj;
|
||||
return Objects.equals(href, other.href) && Objects.equals(src, other.src) && Objects.equals(alt, other.alt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.watcher.shield.WatcherSettingsFilter;
|
||||
import org.elasticsearch.watcher.support.http.HttpClient;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class InternalPagerDutyService extends AbstractLifecycleComponent<PagerDutyService> implements PagerDutyService {
|
||||
|
||||
public static final Setting<Settings> PAGERDUTY_ACCOUNT_SETTING = Setting.groupSetting("watcher.actions.pagerduty.service.", true, Setting.Scope.CLUSTER);
|
||||
|
||||
private final HttpClient httpClient;
|
||||
private volatile PagerDutyAccounts accounts;
|
||||
|
||||
@Inject
|
||||
public InternalPagerDutyService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings,
|
||||
WatcherSettingsFilter settingsFilter) {
|
||||
super(settings);
|
||||
this.httpClient = httpClient;
|
||||
settingsFilter.filterOut(
|
||||
"watcher.actions.pagerduty.service." + PagerDutyAccount.SERVICE_KEY_SETTING,
|
||||
"watcher.actions.pagerduty.service.account.*." + PagerDutyAccount.SERVICE_KEY_SETTING
|
||||
);
|
||||
clusterSettings.addSettingsUpdateConsumer(PAGERDUTY_ACCOUNT_SETTING, this::setPagerDutyAccountSetting);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStart() {
|
||||
setPagerDutyAccountSetting(PAGERDUTY_ACCOUNT_SETTING.get(settings));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStop() {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doClose() {
|
||||
}
|
||||
|
||||
private void setPagerDutyAccountSetting(Settings settings) {
|
||||
accounts = new PagerDutyAccounts(settings, httpClient, logger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PagerDutyAccount getDefaultAccount() {
|
||||
return accounts.account(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PagerDutyAccount getAccount(String name) {
|
||||
return accounts.account(name);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.watcher.support.http.HttpClient;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequest;
|
||||
import org.elasticsearch.watcher.support.http.HttpResponse;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class PagerDutyAccount {
|
||||
|
||||
public static final String SERVICE_KEY_SETTING = "service_api_key";
|
||||
public static final String TRIGGER_DEFAULTS_SETTING = "event_defaults";
|
||||
|
||||
final String name;
|
||||
final String serviceKey;
|
||||
final HttpClient httpClient;
|
||||
final IncidentEventDefaults eventDefaults;
|
||||
final ESLogger logger;
|
||||
|
||||
public PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient, ESLogger logger) {
|
||||
this.name = name;
|
||||
this.serviceKey = accountSettings.get(SERVICE_KEY_SETTING, serviceSettings.get(SERVICE_KEY_SETTING, null));
|
||||
if (this.serviceKey == null) {
|
||||
throw new SettingsException("invalid pagerduty account [" + name + "]. missing required [" + SERVICE_KEY_SETTING + "] setting");
|
||||
}
|
||||
this.httpClient = httpClient;
|
||||
|
||||
this.eventDefaults = new IncidentEventDefaults(accountSettings.getAsSettings(TRIGGER_DEFAULTS_SETTING));
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public IncidentEventDefaults getDefaults() {
|
||||
return eventDefaults;
|
||||
}
|
||||
|
||||
public SentEvent send(IncidentEvent event, Payload payload) throws IOException {
|
||||
HttpRequest request = event.createRequest(serviceKey, payload);
|
||||
HttpResponse response = httpClient.execute(request);
|
||||
return SentEvent.responded(event, request, response);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.watcher.support.http.HttpClient;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class PagerDutyAccounts {
|
||||
|
||||
private final Map<String, PagerDutyAccount> accounts;
|
||||
private final String defaultAccountName;
|
||||
|
||||
public PagerDutyAccounts(Settings serviceSettings, HttpClient httpClient, ESLogger logger) {
|
||||
Settings accountsSettings = serviceSettings.getAsSettings("account");
|
||||
accounts = new HashMap<>();
|
||||
for (String name : accountsSettings.names()) {
|
||||
Settings accountSettings = accountsSettings.getAsSettings(name);
|
||||
PagerDutyAccount account = new PagerDutyAccount(name, accountSettings, serviceSettings, httpClient, logger);
|
||||
accounts.put(name, account);
|
||||
}
|
||||
|
||||
String defaultAccountName = serviceSettings.get("default_account");
|
||||
if (defaultAccountName == null) {
|
||||
if (accounts.isEmpty()) {
|
||||
this.defaultAccountName = null;
|
||||
} else {
|
||||
PagerDutyAccount account = accounts.values().iterator().next();
|
||||
logger.info("default pager duty account set to [{}]", account.name);
|
||||
this.defaultAccountName = account.name;
|
||||
}
|
||||
} else if (!accounts.containsKey(defaultAccountName)) {
|
||||
throw new SettingsException("could not find default pagerduty account [" + defaultAccountName + "]");
|
||||
} else {
|
||||
this.defaultAccountName = defaultAccountName;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the account associated with the given name. If there is not such account, {@code null} is returned.
|
||||
* If the given name is {@code null}, the default account will be returned.
|
||||
*
|
||||
* @param name The name of the requested account
|
||||
* @return The account associated with the given name, or {@code null} when requested an unknown account.
|
||||
* @throws IllegalStateException if the name is null and the default account is null.
|
||||
*/
|
||||
public PagerDutyAccount account(String name) throws IllegalStateException {
|
||||
if (name == null) {
|
||||
if (defaultAccountName == null) {
|
||||
throw new IllegalStateException("cannot find default pagerduty account as no accounts have been configured");
|
||||
}
|
||||
name = defaultAccountName;
|
||||
}
|
||||
return accounts.get(name);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface PagerDutyService extends LifecycleComponent<PagerDutyService> {
|
||||
|
||||
PagerDutyAccount getDefaultAccount();
|
||||
|
||||
PagerDutyAccount getAccount(String accountName);
|
||||
}
|
|
@ -0,0 +1,161 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.PagerDutyAction;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequest;
|
||||
import org.elasticsearch.watcher.support.http.HttpResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SentEvent implements ToXContent {
|
||||
|
||||
final IncidentEvent event;
|
||||
final @Nullable HttpRequest request;
|
||||
final @Nullable HttpResponse response;
|
||||
final @Nullable String failureReason;
|
||||
|
||||
public static SentEvent responded(IncidentEvent event, HttpRequest request, HttpResponse response) {
|
||||
String failureReason = resolveFailureReason(response);
|
||||
return new SentEvent(event, request, response, failureReason);
|
||||
}
|
||||
|
||||
public static SentEvent error(IncidentEvent event, String reason) {
|
||||
return new SentEvent(event, null, null, reason);
|
||||
}
|
||||
|
||||
private SentEvent(IncidentEvent event, HttpRequest request, HttpResponse response, String failureReason) {
|
||||
this.event = event;
|
||||
this.request = request;
|
||||
this.response = response;
|
||||
this.failureReason = failureReason;
|
||||
}
|
||||
|
||||
public boolean successful() {
|
||||
return failureReason == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
SentEvent sentEvent = (SentEvent) o;
|
||||
return Objects.equals(event, sentEvent.event) && Objects.equals(request, sentEvent.request) && Objects.equals(failureReason, sentEvent.failureReason);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(event, request, response, failureReason);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(XField.EVENT.getPreferredName(), event, params);
|
||||
if (!successful()) {
|
||||
builder.field(XField.REASON.getPreferredName(), failureReason);
|
||||
if (request != null) {
|
||||
builder.field(XField.REQUEST.getPreferredName(), request, params);
|
||||
}
|
||||
if (response != null) {
|
||||
builder.field(XField.RESPONSE.getPreferredName(), response, params);
|
||||
}
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
private static String resolveFailureReason(HttpResponse response) {
|
||||
|
||||
// if for some reason we failed to parse the body, lets fall back on the http status code.
|
||||
int status = response.status();
|
||||
if (status < 300) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// ok... we have an error
|
||||
|
||||
// lets first try to parse the error response in the body
|
||||
// based on https://developer.pagerduty.com/documentation/rest/errors
|
||||
try {
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(response.body());
|
||||
parser.nextToken();
|
||||
|
||||
String message = null;
|
||||
List<String> errors = new ArrayList<>();
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.MESSAGE)) {
|
||||
message = parser.text();
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.CODE)) {
|
||||
// we don't use this code.. so just consume the token
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.ERRORS)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
errors.add(parser.text());
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("could not parse pagerduty event response. unexpected field [{}]", currentFieldName);
|
||||
}
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (message != null) {
|
||||
sb.append(message);
|
||||
}
|
||||
if (!errors.isEmpty()) {
|
||||
sb.append(":");
|
||||
for (String error : errors) {
|
||||
sb.append(" ").append(error).append(".");
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
} catch (Exception e) {
|
||||
// too bad... we couldn't parse the body... note that we don't log this error as there's no real
|
||||
// need for it. This whole error parsing is a nice to have, nothing more. On error, the http
|
||||
// response object is anyway added to the action result in the watch record (though not searchable)
|
||||
}
|
||||
|
||||
switch (status) {
|
||||
case 400: return "Bad Request";
|
||||
case 401: return "Unauthorized. The account service api key is invalid.";
|
||||
case 403: return "Forbidden. The account doesn't have permission to send this trigger.";
|
||||
case 404: return "The account used invalid HipChat APIs";
|
||||
case 408: return "Request Timeout. The request took too long to process.";
|
||||
case 500: return "PagerDuty Server Error. Internal error occurred while processing request.";
|
||||
default:
|
||||
return "Unknown Error";
|
||||
}
|
||||
}
|
||||
|
||||
public interface XField {
|
||||
ParseField EVENT = PagerDutyAction.XField.EVENT;
|
||||
ParseField REASON = new ParseField("reason");
|
||||
ParseField REQUEST = new ParseField("request");
|
||||
ParseField RESPONSE = new ParseField("response");
|
||||
|
||||
ParseField MESSAGE = new ParseField("message");
|
||||
ParseField CODE = new ParseField("code");
|
||||
ParseField ERRORS = new ParseField("errors");
|
||||
}
|
||||
}
|
|
@ -56,6 +56,4 @@ public class InternalSlackService extends AbstractLifecycleComponent<SlackServic
|
|||
public SlackAccount getAccount(String name) {
|
||||
return accounts.account(name);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -68,15 +68,15 @@ public class RestExecuteWatchAction extends WatcherRestHandler {
|
|||
private ExecuteWatchRequest parseRequest(RestRequest request, WatcherClient client) throws IOException {
|
||||
ExecuteWatchRequestBuilder builder = client.prepareExecuteWatch();
|
||||
builder.setId(request.param("id"));
|
||||
|
||||
if (WatcherParams.debug(request)) {
|
||||
builder.setDebug(true);
|
||||
}
|
||||
builder.setDebug(WatcherParams.debug(request));
|
||||
|
||||
if (request.content() == null || request.content().length() == 0) {
|
||||
return builder.request();
|
||||
}
|
||||
|
||||
builder.setRecordExecution(request.paramAsBoolean(Field.RECORD_EXECUTION.getPreferredName(), builder.request().isRecordExecution()));
|
||||
builder.setIgnoreCondition(request.paramAsBoolean(Field.IGNORE_CONDITION.getPreferredName(), builder.request().isIgnoreCondition()));
|
||||
|
||||
XContentParser parser = XContentHelper.createParser(request.content());
|
||||
parser.nextToken();
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.cluster.ClusterService;
|
|||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.plugin.core.LicenseUtils;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.watcher.license.WatcherLicensee;
|
||||
|
@ -35,9 +36,9 @@ public abstract class WatcherTransportAction<Request extends MasterNodeRequest<R
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
|
||||
if (watcherLicensee.isWatcherTransportActionAllowed()) {
|
||||
super.doExecute(request, listener);
|
||||
super.doExecute(task, request, listener);
|
||||
} else {
|
||||
listener.onFailure(LicenseUtils.newComplianceException(WatcherLicensee.ID));
|
||||
}
|
||||
|
|
|
@ -205,12 +205,7 @@ public class WatchStore extends AbstractComponent {
|
|||
|
||||
IndexRequest createIndexRequest(String id, BytesReference source, long version) {
|
||||
IndexRequest indexRequest = new IndexRequest(INDEX, DOC_TYPE, id);
|
||||
// TODO (2.0 upgrade): move back to BytesReference instead of dealing with the array directly
|
||||
if (source.hasArray()) {
|
||||
indexRequest.source(source.array(), source.arrayOffset(), source.length());
|
||||
} else {
|
||||
indexRequest.source(source.toBytes());
|
||||
}
|
||||
indexRequest.source(source.toBytes());
|
||||
indexRequest.version(version);
|
||||
return indexRequest;
|
||||
}
|
||||
|
|
|
@ -451,6 +451,87 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pagerduty" : {
|
||||
"type": "object",
|
||||
"dynamic": true,
|
||||
"properties": {
|
||||
"account": {
|
||||
"type": "string",
|
||||
"index": "not_analyzed"
|
||||
},
|
||||
"sent_event": {
|
||||
"type": "nested",
|
||||
"include_in_parent": true,
|
||||
"dynamic": true,
|
||||
"properties": {
|
||||
"reason": {
|
||||
"type": "string"
|
||||
},
|
||||
"request" : {
|
||||
"type" : "object",
|
||||
"enabled" : false
|
||||
},
|
||||
"response" : {
|
||||
"type" : "object",
|
||||
"enabled" : false
|
||||
},
|
||||
"event" : {
|
||||
"type" : "object",
|
||||
"dynamic" : true,
|
||||
"properties" : {
|
||||
"type" : {
|
||||
"type" : "string",
|
||||
"index" : "not_analyzed"
|
||||
},
|
||||
"client" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"client_url" : {
|
||||
"index" : "not_analyzed",
|
||||
"type" : "string"
|
||||
},
|
||||
"account" : {
|
||||
"index" : "not_analyzed",
|
||||
"type" : "string"
|
||||
},
|
||||
"attach_payload" : {
|
||||
"type" : "boolean"
|
||||
},
|
||||
"incident_key" : {
|
||||
"index" : "not_analyzed",
|
||||
"type" : "string"
|
||||
},
|
||||
"description" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"context" : {
|
||||
"type" : "nested",
|
||||
"include_in_parent": true,
|
||||
"dynamic" : true,
|
||||
"properties" : {
|
||||
"type" : {
|
||||
"type" : "string",
|
||||
"index" : "not_analyzed"
|
||||
},
|
||||
"href" : {
|
||||
"type" : "string",
|
||||
"index" : "not_analyzed"
|
||||
},
|
||||
"src" : {
|
||||
"type" : "string",
|
||||
"index" : "not_analyzed"
|
||||
},
|
||||
"alt" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -463,4 +544,4 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,12 +43,15 @@ public class WatcherF {
|
|||
settings.put("watcher.actions.hipchat.service.account.user.profile", "user");
|
||||
settings.put("watcher.actions.hipchat.service.account.user.auth_token", "FYVx16oDH78ZW9r13wtXbcszyoyA7oX5tiMWg9X0");
|
||||
|
||||
// this is for the `test-watcher-v1` notification token
|
||||
// this is for the `test-watcher-v1` notification token (hipchat)
|
||||
settings.put("watcher.actions.hipchat.service.account.v1.profile", "v1");
|
||||
settings.put("watcher.actions.hipchat.service.account.v1.auth_token", "a734baf62df618b96dda55b323fc30");
|
||||
|
||||
// this is for our test slack incoming webhook (under elasticsearch team)
|
||||
System.setProperty("es.watcher.actions.slack.service.account.a1.url", "https://hooks.slack.com/services/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");
|
||||
|
||||
System.setProperty("es.watcher.actions.pagerduty.service.account.service1.service_api_key", "fc082467005d4072a914e0bb041882d0");
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final Node node = new MockNode(settings.build(), Version.CURRENT, Arrays.asList(XPackPlugin.class, XPackPlugin.class));
|
||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||
|
|
|
@ -5,9 +5,11 @@
|
|||
*/
|
||||
package org.elasticsearch.watcher.actions.email;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -15,24 +17,47 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.actions.Action;
|
||||
import org.elasticsearch.watcher.actions.email.service.Attachment;
|
||||
import org.elasticsearch.watcher.actions.email.service.Authentication;
|
||||
import org.elasticsearch.watcher.actions.email.service.Email;
|
||||
import org.elasticsearch.watcher.actions.email.service.EmailService;
|
||||
import org.elasticsearch.watcher.actions.email.service.EmailTemplate;
|
||||
import org.elasticsearch.watcher.actions.email.service.HtmlSanitizer;
|
||||
import org.elasticsearch.watcher.actions.email.service.Profile;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.DataAttachmentParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachments;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentsParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.HttpEmailAttachementParser;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.HttpRequestAttachment;
|
||||
import org.elasticsearch.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.watcher.execution.Wid;
|
||||
import org.elasticsearch.watcher.support.http.HttpClient;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequest;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequestTemplate;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequestTemplateTests;
|
||||
import org.elasticsearch.watcher.support.http.HttpResponse;
|
||||
import org.elasticsearch.watcher.support.http.auth.HttpAuthRegistry;
|
||||
import org.elasticsearch.watcher.support.http.auth.basic.BasicAuthFactory;
|
||||
import org.elasticsearch.watcher.support.secret.Secret;
|
||||
import org.elasticsearch.watcher.support.secret.SecretService;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplate;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
import org.elasticsearch.watcher.support.xcontent.WatcherParams;
|
||||
import org.elasticsearch.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
import org.jboss.netty.handler.codec.http.HttpHeaders;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
@ -43,10 +68,12 @@ import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
@ -54,6 +81,20 @@ import static org.mockito.Mockito.when;
|
|||
*
|
||||
*/
|
||||
public class EmailActionTests extends ESTestCase {
|
||||
|
||||
private SecretService secretService = mock(SecretService.class);
|
||||
private HttpAuthRegistry registry = new HttpAuthRegistry(singletonMap("basic", new BasicAuthFactory(secretService)));
|
||||
private HttpClient httpClient = mock(HttpClient.class);
|
||||
private EmailAttachmentsParser emailAttachmentParser;
|
||||
private Map<String, EmailAttachmentParser> emailAttachmentParsers = new HashMap<>();
|
||||
|
||||
@Before
|
||||
public void addEmailAttachmentParsers() {
|
||||
emailAttachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, new HttpRequestTemplate.Parser(registry), new HttpRequestTemplateTests.MockTextTemplateEngine()));
|
||||
emailAttachmentParsers.put(DataAttachmentParser.TYPE, new DataAttachmentParser());
|
||||
emailAttachmentParser = new EmailAttachmentsParser(emailAttachmentParsers);
|
||||
}
|
||||
|
||||
public void testExecute() throws Exception {
|
||||
final String account = "account1";
|
||||
EmailService service = new AbstractWatcherIntegrationTestCase.NoopEmailService() {
|
||||
|
@ -92,9 +133,10 @@ public class EmailActionTests extends ESTestCase {
|
|||
Profile profile = randomFrom(Profile.values());
|
||||
|
||||
DataAttachment dataAttachment = randomDataAttachment();
|
||||
EmailAttachments emailAttachments = randomEmailAttachments();
|
||||
|
||||
EmailAction action = new EmailAction(email, account, auth, profile, dataAttachment);
|
||||
ExecutableEmailAction executable = new ExecutableEmailAction(action, logger, service, engine, htmlSanitizer);
|
||||
EmailAction action = new EmailAction(email, account, auth, profile, dataAttachment, emailAttachments);
|
||||
ExecutableEmailAction executable = new ExecutableEmailAction(action, logger, service, engine, htmlSanitizer, emailAttachmentParsers);
|
||||
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
Payload payload = new Payload.Simple(data);
|
||||
|
@ -253,7 +295,7 @@ public class EmailActionTests extends ESTestCase {
|
|||
XContentParser parser = JsonXContent.jsonXContent.createParser(bytes);
|
||||
parser.nextToken();
|
||||
|
||||
ExecutableEmailAction executable = new EmailActionFactory(Settings.EMPTY, emailService, engine, htmlSanitizer)
|
||||
ExecutableEmailAction executable = new EmailActionFactory(Settings.EMPTY, emailService, engine, htmlSanitizer, emailAttachmentParser, Collections.emptyMap())
|
||||
.parseExecutable(randomAsciiOfLength(8), randomAsciiOfLength(3), parser);
|
||||
|
||||
assertThat(executable, notNullValue());
|
||||
|
@ -331,9 +373,10 @@ public class EmailActionTests extends ESTestCase {
|
|||
Profile profile = randomFrom(Profile.values());
|
||||
String account = randomAsciiOfLength(6);
|
||||
DataAttachment dataAttachment = randomDataAttachment();
|
||||
EmailAttachments emailAttachments = randomEmailAttachments();
|
||||
|
||||
EmailAction action = new EmailAction(email, account, auth, profile, dataAttachment);
|
||||
ExecutableEmailAction executable = new ExecutableEmailAction(action, logger, service, engine, htmlSanitizer);
|
||||
EmailAction action = new EmailAction(email, account, auth, profile, dataAttachment, emailAttachments);
|
||||
ExecutableEmailAction executable = new ExecutableEmailAction(action, logger, service, engine, htmlSanitizer, emailAttachmentParsers);
|
||||
|
||||
boolean hideSecrets = randomBoolean();
|
||||
ToXContent.Params params = WatcherParams.builder().hideSecrets(hideSecrets).build();
|
||||
|
@ -344,7 +387,8 @@ public class EmailActionTests extends ESTestCase {
|
|||
logger.info(bytes.toUtf8());
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(bytes);
|
||||
parser.nextToken();
|
||||
ExecutableEmailAction parsed = new EmailActionFactory(Settings.EMPTY, service, engine, htmlSanitizer)
|
||||
|
||||
ExecutableEmailAction parsed = new EmailActionFactory(Settings.EMPTY, service, engine, htmlSanitizer, emailAttachmentParser, emailAttachmentParsers)
|
||||
.parseExecutable(randomAsciiOfLength(4), randomAsciiOfLength(10), parser);
|
||||
|
||||
if (!hideSecrets) {
|
||||
|
@ -369,18 +413,107 @@ public class EmailActionTests extends ESTestCase {
|
|||
EmailService emailService = mock(EmailService.class);
|
||||
TextTemplateEngine engine = mock(TextTemplateEngine.class);
|
||||
HtmlSanitizer htmlSanitizer = mock(HtmlSanitizer.class);
|
||||
EmailAttachmentsParser emailAttachmentsParser = mock(EmailAttachmentsParser.class);
|
||||
|
||||
XContentBuilder builder = jsonBuilder().startObject().field("unknown_field", "value");
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
try {
|
||||
new EmailActionFactory(Settings.EMPTY, emailService, engine, htmlSanitizer)
|
||||
new EmailActionFactory(Settings.EMPTY, emailService, engine, htmlSanitizer, emailAttachmentsParser, Collections.emptyMap())
|
||||
.parseExecutable(randomAsciiOfLength(3), randomAsciiOfLength(7), parser);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("unexpected string field [unknown_field]"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRequestAttachmentGetsAppendedToEmailAttachments() throws Exception {
|
||||
String attachmentId = "my_attachment";
|
||||
|
||||
EmailService emailService = new AbstractWatcherIntegrationTestCase.NoopEmailService();
|
||||
TextTemplateEngine engine = mock(TextTemplateEngine.class);
|
||||
HtmlSanitizer htmlSanitizer = mock(HtmlSanitizer.class);
|
||||
HttpClient httpClient = mock(HttpClient.class);
|
||||
|
||||
// setup mock response
|
||||
Map<String, String[]> headers = new HashMap<>(1);
|
||||
headers.put(HttpHeaders.Names.CONTENT_TYPE, new String[]{"plain/text"});
|
||||
String content = "My wonderful text";
|
||||
HttpResponse mockResponse = new HttpResponse(200, content, headers);
|
||||
when(httpClient.execute(any(HttpRequest.class))).thenReturn(mockResponse);
|
||||
|
||||
// setup email attachment parsers
|
||||
HttpRequestTemplate.Parser httpRequestTemplateParser = new HttpRequestTemplate.Parser(registry);
|
||||
Map<String, EmailAttachmentParser> attachmentParsers = new HashMap<>();
|
||||
attachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, httpRequestTemplateParser, engine));
|
||||
EmailAttachmentsParser emailAttachmentsParser = new EmailAttachmentsParser(attachmentParsers);
|
||||
|
||||
XContentBuilder builder = jsonBuilder().startObject()
|
||||
.startObject("attachments")
|
||||
.startObject(attachmentId)
|
||||
.startObject("http")
|
||||
.startObject("request")
|
||||
.field("host", "localhost")
|
||||
.field("port", 443)
|
||||
.field("path", "/the/evil/test")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
logger.info("JSON: {}", builder.string());
|
||||
|
||||
parser.nextToken();
|
||||
|
||||
ExecutableEmailAction executableEmailAction = new EmailActionFactory(Settings.EMPTY, emailService, engine, htmlSanitizer, emailAttachmentsParser, attachmentParsers)
|
||||
.parseExecutable(randomAsciiOfLength(3), randomAsciiOfLength(7), parser);
|
||||
|
||||
DateTime now = DateTime.now(DateTimeZone.UTC);
|
||||
Wid wid = new Wid(randomAsciiOfLength(5), randomLong(), now);
|
||||
Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map();
|
||||
WatchExecutionContext ctx = mockExecutionContextBuilder("watch1")
|
||||
.wid(wid)
|
||||
.payload(new Payload.Simple())
|
||||
.time("watch1", now)
|
||||
.metadata(metadata)
|
||||
.buildMock();
|
||||
|
||||
Action.Result result = executableEmailAction.execute("test", ctx, new Payload.Simple());
|
||||
assertThat(result, instanceOf(EmailAction.Result.Success.class));
|
||||
|
||||
EmailAction.Result.Success successResult = (EmailAction.Result.Success) result;
|
||||
Map<String, Attachment> attachments = successResult.email().attachments();
|
||||
assertThat(attachments.keySet(), hasSize(1));
|
||||
assertThat(attachments, hasKey(attachmentId));
|
||||
Attachment externalAttachment = attachments.get(attachmentId);
|
||||
|
||||
assertThat(externalAttachment.bodyPart(), is(notNullValue()));
|
||||
InputStream is = externalAttachment.bodyPart().getInputStream();
|
||||
String data = Streams.copyToString(new InputStreamReader(is, Charsets.UTF_8));
|
||||
assertThat(data, is(content));
|
||||
}
|
||||
|
||||
static DataAttachment randomDataAttachment() {
|
||||
return randomFrom(DataAttachment.JSON, DataAttachment.YAML, null);
|
||||
}
|
||||
|
||||
private EmailAttachments randomEmailAttachments() throws IOException {
|
||||
List<EmailAttachmentParser.EmailAttachment> attachments = new ArrayList<>();
|
||||
|
||||
String attachmentType = randomFrom("http", "data", null);
|
||||
if ("http".equals(attachmentType)) {
|
||||
Map<String, String[]> headers = new HashMap<>(1);
|
||||
headers.put(HttpHeaders.Names.CONTENT_TYPE, new String[]{"plain/text"});
|
||||
String content = "My wonderful text";
|
||||
HttpResponse mockResponse = new HttpResponse(200, content, headers);
|
||||
when(httpClient.execute(any(HttpRequest.class))).thenReturn(mockResponse);
|
||||
|
||||
HttpRequestTemplate template = HttpRequestTemplate.builder("localhost", 1234).build();
|
||||
attachments.add(new HttpRequestAttachment(randomAsciiOfLength(10), template, randomFrom("my/custom-type", null)));
|
||||
} else if ("data".equals(attachmentType)) {
|
||||
attachments.add(new org.elasticsearch.watcher.actions.email.service.attachment.DataAttachment(randomAsciiOfLength(10), randomFrom(DataAttachment.JSON, DataAttachment.YAML)));
|
||||
}
|
||||
|
||||
return new EmailAttachments(attachments);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
public class DataAttachmentParserTests extends ESTestCase {
|
||||
|
||||
public void testSerializationWorks() throws Exception {
|
||||
Map<String, EmailAttachmentParser> attachmentParsers = new HashMap<>();
|
||||
attachmentParsers.put(DataAttachmentParser.TYPE, new DataAttachmentParser());
|
||||
EmailAttachmentsParser emailAttachmentsParser = new EmailAttachmentsParser(attachmentParsers);
|
||||
|
||||
String id = "some-id";
|
||||
XContentBuilder builder = jsonBuilder().startObject().startObject(id)
|
||||
.startObject(DataAttachmentParser.TYPE).field("format", randomFrom("yaml", "json")).endObject()
|
||||
.endObject().endObject();
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
logger.info("JSON: {}", builder.string());
|
||||
|
||||
EmailAttachments emailAttachments = emailAttachmentsParser.parse(parser);
|
||||
assertThat(emailAttachments.getAttachments(), hasSize(1));
|
||||
|
||||
XContentBuilder toXcontentBuilder = jsonBuilder().startObject();
|
||||
emailAttachments.getAttachments().get(0).toXContent(toXcontentBuilder, ToXContent.EMPTY_PARAMS);
|
||||
toXcontentBuilder.endObject();
|
||||
assertThat(toXcontentBuilder.string(), is(builder.string()));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,180 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.actions.email.service.Attachment;
|
||||
import org.elasticsearch.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequestTemplate;
|
||||
import org.elasticsearch.watcher.support.http.Scheme;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class EmailAttachmentParsersTests extends ESTestCase {
|
||||
|
||||
private WatchExecutionContext ctx = mock(WatchExecutionContext.class);
|
||||
|
||||
public void testThatCustomParsersCanBeRegistered() throws Exception {
|
||||
Map<String, EmailAttachmentParser> parsers = new HashMap<>();
|
||||
parsers.put("test", new TestEmailAttachmentParser());
|
||||
EmailAttachmentsParser parser = new EmailAttachmentsParser(parsers);
|
||||
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
builder.startObject()
|
||||
.startObject("my-id")
|
||||
.startObject("test")
|
||||
.field("foo", "bar")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("my-other-id")
|
||||
.startObject("test")
|
||||
.field("foo", "baz")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
logger.info("JSON: {}", builder.string());
|
||||
XContentParser xContentParser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
EmailAttachments attachments = parser.parse(xContentParser);
|
||||
assertThat(attachments.getAttachments(), hasSize(2));
|
||||
|
||||
EmailAttachmentParser.EmailAttachment emailAttachment = attachments.getAttachments().get(0);
|
||||
assertThat(emailAttachment, instanceOf(TestEmailAttachment.class));
|
||||
|
||||
Attachment attachment = parsers.get("test").toAttachment(ctx, new Payload.Simple(), emailAttachment);
|
||||
assertThat(attachment.name(), is("my-id"));
|
||||
assertThat(attachment.contentType(), is("personalContentType"));
|
||||
|
||||
assertThat(parsers.get("test").toAttachment(ctx, new Payload.Simple(), attachments.getAttachments().get(1)).id(), is("my-other-id"));
|
||||
}
|
||||
|
||||
public void testThatUnknownParserThrowsException() throws IOException {
|
||||
EmailAttachmentsParser parser = new EmailAttachmentsParser(Collections.emptyMap());
|
||||
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
String type = randomAsciiOfLength(8);
|
||||
builder.startObject().startObject("some-id").startObject(type);
|
||||
|
||||
XContentParser xContentParser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
try {
|
||||
parser.parse(xContentParser);
|
||||
fail("Expected random parser of type [" + type + "] to throw an exception");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("Cannot parse attachment of type " + type));
|
||||
}
|
||||
}
|
||||
|
||||
public void testThatToXContentSerializationWorks() throws Exception {
|
||||
List<EmailAttachmentParser.EmailAttachment> attachments = new ArrayList<>();
|
||||
attachments.add(new DataAttachment("my-id", org.elasticsearch.watcher.actions.email.DataAttachment.JSON));
|
||||
|
||||
HttpRequestTemplate requestTemplate = HttpRequestTemplate.builder("localhost", 80).scheme(Scheme.HTTP).path("/").build();
|
||||
HttpRequestAttachment httpRequestAttachment = new HttpRequestAttachment("other-id", requestTemplate, null);
|
||||
|
||||
attachments.add(httpRequestAttachment);
|
||||
EmailAttachments emailAttachments = new EmailAttachments(attachments);
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
emailAttachments.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
logger.info("JSON is: " + builder.string());
|
||||
assertThat(builder.string(), containsString("my-id"));
|
||||
assertThat(builder.string(), containsString("json"));
|
||||
assertThat(builder.string(), containsString("other-id"));
|
||||
assertThat(builder.string(), containsString("localhost"));
|
||||
assertThat(builder.string(), containsString("/"));
|
||||
}
|
||||
|
||||
public class TestEmailAttachmentParser implements EmailAttachmentParser<TestEmailAttachment> {
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return "test";
|
||||
}
|
||||
|
||||
@Override
|
||||
public TestEmailAttachment parse(String id, XContentParser parser) throws IOException {
|
||||
TestEmailAttachment attachment = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
if ("foo".equals(currentFieldName)) {
|
||||
attachment = new TestEmailAttachment(id, parser.text());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (attachment == null) {
|
||||
throw new ElasticsearchParseException("Expected test parser to have field [foo]");
|
||||
}
|
||||
|
||||
return attachment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Attachment toAttachment(WatchExecutionContext ctx, Payload payload, TestEmailAttachment attachment) {
|
||||
return new Attachment.Bytes(attachment.getId(), attachment.getValue().getBytes(Charsets.UTF_8), "personalContentType");
|
||||
}
|
||||
}
|
||||
|
||||
public static class TestEmailAttachment implements EmailAttachmentParser.EmailAttachment {
|
||||
|
||||
private final String value;
|
||||
private final String id;
|
||||
|
||||
interface Fields {
|
||||
ParseField FOO = new ParseField("foo");
|
||||
}
|
||||
|
||||
public TestEmailAttachment(String id, String value) {
|
||||
this.id = id;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return "test";
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder.startObject(id)
|
||||
.startObject(type())
|
||||
.field(Fields.FOO.getPreferredName(), value)
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.email.service.attachment;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.support.http.HttpClient;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequest;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequestTemplate;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequestTemplateTests;
|
||||
import org.elasticsearch.watcher.support.http.HttpResponse;
|
||||
import org.elasticsearch.watcher.support.http.auth.HttpAuthRegistry;
|
||||
import org.elasticsearch.watcher.support.http.auth.basic.BasicAuth;
|
||||
import org.elasticsearch.watcher.support.http.auth.basic.BasicAuthFactory;
|
||||
import org.elasticsearch.watcher.support.secret.SecretService;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class HttpEmailAttachementParserTests extends ESTestCase {
|
||||
|
||||
private SecretService.PlainText secretService;
|
||||
private HttpAuthRegistry authRegistry;
|
||||
private HttpRequestTemplate.Parser httpRequestTemplateParser;
|
||||
private HttpClient httpClient;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
secretService = new SecretService.PlainText();
|
||||
authRegistry = new HttpAuthRegistry(singletonMap(BasicAuth.TYPE, new BasicAuthFactory(secretService)));
|
||||
httpRequestTemplateParser = new HttpRequestTemplate.Parser(authRegistry);
|
||||
httpClient = mock(HttpClient.class);
|
||||
|
||||
HttpResponse response = new HttpResponse(200, "This is my response".getBytes(UTF_8));
|
||||
when(httpClient.execute(any(HttpRequest.class))).thenReturn(response);
|
||||
}
|
||||
|
||||
|
||||
public void testSerializationWorks() throws Exception {
|
||||
Map<String, EmailAttachmentParser> attachmentParsers = new HashMap<>();
|
||||
attachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, httpRequestTemplateParser, new HttpRequestTemplateTests.MockTextTemplateEngine()));
|
||||
EmailAttachmentsParser emailAttachmentsParser = new EmailAttachmentsParser(attachmentParsers);
|
||||
|
||||
String id = "some-id";
|
||||
XContentBuilder builder = jsonBuilder().startObject().startObject(id)
|
||||
.startObject(HttpEmailAttachementParser.TYPE)
|
||||
.startObject("request")
|
||||
.field("scheme", "http")
|
||||
.field("host", "test.de")
|
||||
.field("port", 80)
|
||||
.field("method", "get")
|
||||
.field("path", "/foo")
|
||||
.startObject("params").endObject()
|
||||
.startObject("headers").endObject()
|
||||
.endObject();
|
||||
|
||||
boolean configureContentType = randomBoolean();
|
||||
if (configureContentType) {
|
||||
builder.field("content_type", "application/foo");
|
||||
}
|
||||
builder.endObject().endObject().endObject();
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
logger.info("JSON: {}", builder.string());
|
||||
|
||||
EmailAttachments emailAttachments = emailAttachmentsParser.parse(parser);
|
||||
assertThat(emailAttachments.getAttachments(), hasSize(1));
|
||||
|
||||
XContentBuilder toXcontentBuilder = jsonBuilder().startObject();
|
||||
emailAttachments.getAttachments().get(0).toXContent(toXcontentBuilder, ToXContent.EMPTY_PARAMS);
|
||||
toXcontentBuilder.endObject();
|
||||
assertThat(toXcontentBuilder.string(), is(builder.string()));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyAccount;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyService;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
import org.junit.Before;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.watcher.actions.ActionBuilders.triggerPagerDutyAction;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class PagerDutyActionFactoryTests extends ESTestCase {
|
||||
|
||||
private PagerDutyActionFactory factory;
|
||||
private PagerDutyService service;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
service = mock(PagerDutyService.class);
|
||||
factory = new PagerDutyActionFactory(Settings.EMPTY, mock(TextTemplateEngine.class), service);
|
||||
}
|
||||
|
||||
public void testParseAction() throws Exception {
|
||||
|
||||
PagerDutyAccount account = mock(PagerDutyAccount.class);
|
||||
when(service.getAccount("_account1")).thenReturn(account);
|
||||
|
||||
PagerDutyAction action = triggerPagerDutyAction("_account1", "_description").build();
|
||||
XContentBuilder jsonBuilder = jsonBuilder().value(action);
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(jsonBuilder.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
PagerDutyAction parsedAction = factory.parseAction("_w1", "_a1", parser);
|
||||
assertThat(parsedAction, is(action));
|
||||
}
|
||||
|
||||
public void testParseActionUnknownAccount() throws Exception {
|
||||
try {
|
||||
when(service.getAccount("_unknown")).thenReturn(null);
|
||||
|
||||
PagerDutyAction action = triggerPagerDutyAction("_unknown", "_body").build();
|
||||
XContentBuilder jsonBuilder = jsonBuilder().value(action);
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(jsonBuilder.bytes());
|
||||
parser.nextToken();
|
||||
factory.parseAction("_w1", "_a1", parser);
|
||||
fail("Expected ElasticsearchParseException due to unknown account");
|
||||
} catch (ElasticsearchParseException e) {}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,243 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.actions.Action;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.IncidentEvent;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.IncidentEventContext;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.IncidentEventDefaults;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyAccount;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.PagerDutyService;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.service.SentEvent;
|
||||
import org.elasticsearch.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.watcher.execution.Wid;
|
||||
import org.elasticsearch.watcher.support.http.HttpRequest;
|
||||
import org.elasticsearch.watcher.support.http.HttpResponse;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplate;
|
||||
import org.elasticsearch.watcher.support.text.TextTemplateEngine;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.watcher.actions.ActionBuilders.pagerDutyAction;
|
||||
import static org.elasticsearch.watcher.test.WatcherTestUtils.mockExecutionContextBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class PagerDutyActionTests extends ESTestCase {
|
||||
|
||||
private PagerDutyService service;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
service = mock(PagerDutyService.class);
|
||||
}
|
||||
|
||||
public void testExecute() throws Exception {
|
||||
final String accountName = "account1";
|
||||
|
||||
TextTemplateEngine templateEngine = mock(TextTemplateEngine.class);
|
||||
|
||||
TextTemplate description = TextTemplate.inline("_description").build();
|
||||
IncidentEvent.Template.Builder eventBuilder = new IncidentEvent.Template.Builder(description);
|
||||
boolean attachPayload = randomBoolean();
|
||||
eventBuilder.setAttachPayload(attachPayload);
|
||||
eventBuilder.setAccount(accountName);
|
||||
IncidentEvent.Template eventTemplate = eventBuilder.build();
|
||||
|
||||
PagerDutyAction action = new PagerDutyAction(eventTemplate);
|
||||
ExecutablePagerDutyAction executable = new ExecutablePagerDutyAction(action, logger, service, templateEngine);
|
||||
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
Payload payload = new Payload.Simple(data);
|
||||
|
||||
Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map();
|
||||
|
||||
DateTime now = DateTime.now(DateTimeZone.UTC);
|
||||
|
||||
Wid wid = new Wid(randomAsciiOfLength(5), randomLong(), now);
|
||||
WatchExecutionContext ctx = mockExecutionContextBuilder(wid.watchId())
|
||||
.wid(wid)
|
||||
.payload(payload)
|
||||
.time(wid.watchId(), now)
|
||||
.metadata(metadata)
|
||||
.buildMock();
|
||||
|
||||
Map<String, Object> ctxModel = new HashMap<>();
|
||||
ctxModel.put("id", ctx.id().value());
|
||||
ctxModel.put("watch_id", wid.watchId());
|
||||
ctxModel.put("payload", data);
|
||||
ctxModel.put("metadata", metadata);
|
||||
ctxModel.put("execution_time", now);
|
||||
Map<String, Object> triggerModel = new HashMap<>();
|
||||
triggerModel.put("triggered_time", now);
|
||||
triggerModel.put("scheduled_time", now);
|
||||
ctxModel.put("trigger", triggerModel);
|
||||
ctxModel.put("vars", Collections.emptyMap());
|
||||
Map<String, Object> expectedModel = new HashMap<>();
|
||||
expectedModel.put("ctx", ctxModel);
|
||||
|
||||
when(templateEngine.render(description, expectedModel)).thenReturn(description.getTemplate());
|
||||
|
||||
IncidentEvent event = new IncidentEvent(description.getTemplate(), null, wid.watchId(), null, null, accountName, attachPayload, null);
|
||||
PagerDutyAccount account = mock(PagerDutyAccount.class);
|
||||
when(account.getDefaults()).thenReturn(new IncidentEventDefaults(Settings.EMPTY));
|
||||
HttpResponse response = mock(HttpResponse.class);
|
||||
when(response.status()).thenReturn(200);
|
||||
HttpRequest request = mock(HttpRequest.class);
|
||||
SentEvent sentEvent = SentEvent.responded(event, request, response);
|
||||
when(account.send(event, payload)).thenReturn(sentEvent);
|
||||
when(service.getAccount(accountName)).thenReturn(account);
|
||||
|
||||
Action.Result result = executable.execute("_id", ctx, payload);
|
||||
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result, instanceOf(PagerDutyAction.Result.Executed.class));
|
||||
assertThat(result.status(), equalTo(Action.Result.Status.SUCCESS));
|
||||
assertThat(((PagerDutyAction.Result.Executed) result).sentEvent(), sameInstance(sentEvent));
|
||||
}
|
||||
|
||||
public void testParser() throws Exception {
|
||||
|
||||
XContentBuilder builder = jsonBuilder().startObject();
|
||||
|
||||
String accountName = randomAsciiOfLength(10);
|
||||
builder.field("account", accountName);
|
||||
|
||||
TextTemplate incidentKey = null;
|
||||
if (randomBoolean()) {
|
||||
incidentKey = TextTemplate.inline("_incident_key").build();
|
||||
builder.field("incident_key", incidentKey);
|
||||
}
|
||||
|
||||
TextTemplate description = null;
|
||||
if (randomBoolean()) {
|
||||
description = TextTemplate.inline("_description").build();
|
||||
builder.field("description", description);
|
||||
}
|
||||
|
||||
TextTemplate client = null;
|
||||
if (randomBoolean()) {
|
||||
client = TextTemplate.inline("_client").build();
|
||||
builder.field("client", client);
|
||||
}
|
||||
|
||||
TextTemplate clientUrl = null;
|
||||
if (randomBoolean()) {
|
||||
clientUrl = TextTemplate.inline("_client_url").build();
|
||||
builder.field("client_url", clientUrl);
|
||||
}
|
||||
|
||||
TextTemplate eventType = null;
|
||||
if (randomBoolean()) {
|
||||
eventType = TextTemplate.inline(randomFrom("trigger", "resolve", "acknowledge")).build();
|
||||
builder.field("eventType", eventType);
|
||||
}
|
||||
|
||||
Boolean attachPayload = randomBoolean() ? null : randomBoolean();
|
||||
if (attachPayload != null) {
|
||||
builder.field("attach_payload", attachPayload.booleanValue());
|
||||
}
|
||||
|
||||
IncidentEventContext.Template[] contexts = null;
|
||||
if (randomBoolean()) {
|
||||
contexts = new IncidentEventContext.Template[] {
|
||||
IncidentEventContext.Template.link(TextTemplate.inline("_href").build(), TextTemplate.inline("_text").build()),
|
||||
IncidentEventContext.Template.image(TextTemplate.inline("_src").build(), TextTemplate.inline("_href").build(), TextTemplate.inline("_alt").build())
|
||||
};
|
||||
builder.array("context", (Object) contexts);
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
|
||||
BytesReference bytes = builder.bytes();
|
||||
logger.info("pagerduty action json [{}]", bytes.toUtf8());
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(bytes);
|
||||
parser.nextToken();
|
||||
|
||||
PagerDutyAction action = PagerDutyAction.parse("_watch", "_action", parser);
|
||||
|
||||
assertThat(action, notNullValue());
|
||||
assertThat(action.event.account, is(accountName));
|
||||
assertThat(action.event, notNullValue());
|
||||
assertThat(action.event, instanceOf(IncidentEvent.Template.class));
|
||||
assertThat(action.event, is(new IncidentEvent.Template(description, eventType, incidentKey, client, clientUrl, accountName, attachPayload, contexts)));
|
||||
}
|
||||
|
||||
public void testParserSelfGenerated() throws Exception {
|
||||
IncidentEvent.Template.Builder event = IncidentEvent.templateBuilder(randomAsciiOfLength(50));
|
||||
|
||||
if (randomBoolean()) {
|
||||
event.setIncidentKey(TextTemplate.inline(randomAsciiOfLength(50)).build());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
event.setClient(TextTemplate.inline(randomAsciiOfLength(50)).build());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
event.setClientUrl(TextTemplate.inline(randomAsciiOfLength(50)).build());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
event.setAttachPayload(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
event.addContext(IncidentEventContext.Template.link(TextTemplate.inline("_href").build(), TextTemplate.inline("_text").build()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
event.addContext(IncidentEventContext.Template.image(TextTemplate.inline("_src").build(), TextTemplate.inline("_href").build(), TextTemplate.inline("_alt").build()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
event.setEventType(TextTemplate.inline(randomAsciiOfLength(50)).build());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
event.setAccount(randomAsciiOfLength(50)).build();
|
||||
}
|
||||
|
||||
PagerDutyAction action = pagerDutyAction(event).build();
|
||||
XContentBuilder jsonBuilder = jsonBuilder();
|
||||
action.toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS);
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(jsonBuilder.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
PagerDutyAction parsedAction = PagerDutyAction.parse("_w1", "_a1", parser);
|
||||
assertThat(parsedAction, notNullValue());
|
||||
assertThat(parsedAction, is(action));
|
||||
}
|
||||
|
||||
public void testParserInvalid() throws Exception {
|
||||
try {
|
||||
XContentBuilder builder = jsonBuilder().startObject().field("unknown_field", "value");
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
PagerDutyAction.parse("_watch", "_action", parser);
|
||||
fail("Expected ElasticsearchParseException but did not happen");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class IncidentEventDefaultsTests extends ESTestCase {
|
||||
|
||||
public void testConstructor() throws Exception {
|
||||
Settings settings = randomSettings();
|
||||
IncidentEventDefaults defaults = new IncidentEventDefaults(settings);
|
||||
assertThat(defaults.incidentKey, is(settings.get("incident_key", null)));
|
||||
assertThat(defaults.description, is(settings.get("description", null)));
|
||||
assertThat(defaults.clientUrl, is(settings.get("client_url", null)));
|
||||
assertThat(defaults.client, is(settings.get("client", null)));
|
||||
assertThat(defaults.eventType, is(settings.get("event_type", null)));
|
||||
assertThat(defaults.attachPayload, is(settings.getAsBoolean("attach_payload", false)));
|
||||
if (settings.getAsSettings("link").names().isEmpty()) {
|
||||
IncidentEventDefaults.Context.LinkDefaults linkDefaults = new IncidentEventDefaults.Context.LinkDefaults(Settings.EMPTY);
|
||||
assertThat(defaults.link, is(linkDefaults));
|
||||
} else {
|
||||
assertThat(defaults.link, notNullValue());
|
||||
assertThat(defaults.link.href, is(settings.get("link.href", null)));
|
||||
assertThat(defaults.link.text, is(settings.get("link.text", null)));
|
||||
}
|
||||
if (settings.getAsSettings("image").names().isEmpty()) {
|
||||
IncidentEventDefaults.Context.ImageDefaults imageDefaults = new IncidentEventDefaults.Context.ImageDefaults(Settings.EMPTY);
|
||||
assertThat(defaults.image, is(imageDefaults));
|
||||
} else {
|
||||
assertThat(defaults.image, notNullValue());
|
||||
assertThat(defaults.image.href, is(settings.get("image.href", null)));
|
||||
assertThat(defaults.image.alt, is(settings.get("image.alt", null)));
|
||||
assertThat(defaults.image.src, is(settings.get("image.src", null)));
|
||||
}
|
||||
}
|
||||
|
||||
public static Settings randomSettings() {
|
||||
Settings.Builder settings = Settings.builder();
|
||||
if (randomBoolean()) {
|
||||
settings.put("from", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String[] to = new String[randomIntBetween(1, 3)];
|
||||
for (int i = 0; i < to.length; i++) {
|
||||
to[i] = randomAsciiOfLength(10);
|
||||
}
|
||||
settings.putArray("to", to);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("text", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("event_type", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("icon", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.fallback", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.color", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.pretext", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.author_name", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.author_link", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.author_icon", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.title", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.title_link", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.text", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.image_url", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.thumb_url", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.field.title", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.field.value", randomAsciiOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
settings.put("attachment.field.short", randomBoolean());
|
||||
}
|
||||
return settings.build();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,133 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.actions.slack.service.message.SlackMessageDefaultsTests;
|
||||
import org.elasticsearch.watcher.support.http.HttpClient;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.isOneOf;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class PagerDutyAccountsTests extends ESTestCase {
|
||||
|
||||
private HttpClient httpClient;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
httpClient = mock(HttpClient.class);
|
||||
}
|
||||
|
||||
public void testSingleAccount() throws Exception {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put("default_account", "account1");
|
||||
addAccountSettings("account1", builder);
|
||||
|
||||
PagerDutyAccounts accounts = new PagerDutyAccounts(builder.build(), httpClient, logger);
|
||||
PagerDutyAccount account = accounts.account("account1");
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, equalTo("account1"));
|
||||
account = accounts.account(null); // falling back on the default
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, equalTo("account1"));
|
||||
}
|
||||
|
||||
public void testSingleAccountNoExplicitDefault() throws Exception {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
addAccountSettings("account1", builder);
|
||||
|
||||
PagerDutyAccounts accounts = new PagerDutyAccounts(builder.build(), httpClient, logger);
|
||||
PagerDutyAccount account = accounts.account("account1");
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, equalTo("account1"));
|
||||
account = accounts.account(null); // falling back on the default
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, equalTo("account1"));
|
||||
}
|
||||
|
||||
public void testMultipleAccounts() throws Exception {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put("default_account", "account1");
|
||||
addAccountSettings("account1", builder);
|
||||
addAccountSettings("account2", builder);
|
||||
|
||||
PagerDutyAccounts accounts = new PagerDutyAccounts(builder.build(), httpClient, logger);
|
||||
PagerDutyAccount account = accounts.account("account1");
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, equalTo("account1"));
|
||||
account = accounts.account("account2");
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, equalTo("account2"));
|
||||
account = accounts.account(null); // falling back on the default
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, equalTo("account1"));
|
||||
}
|
||||
|
||||
public void testMultipleAccounts_NoExplicitDefault() throws Exception {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put("default_account", "account1");
|
||||
addAccountSettings("account1", builder);
|
||||
addAccountSettings("account2", builder);
|
||||
|
||||
PagerDutyAccounts accounts = new PagerDutyAccounts(builder.build(), httpClient, logger);
|
||||
PagerDutyAccount account = accounts.account("account1");
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, equalTo("account1"));
|
||||
account = accounts.account("account2");
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, equalTo("account2"));
|
||||
account = accounts.account(null);
|
||||
assertThat(account, notNullValue());
|
||||
assertThat(account.name, isOneOf("account1", "account2"));
|
||||
}
|
||||
|
||||
public void testMultipleAccounts_UnknownDefault() throws Exception {
|
||||
try {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put("default_account", "unknown");
|
||||
addAccountSettings("account1", builder);
|
||||
addAccountSettings("account2", builder);
|
||||
new PagerDutyAccounts(builder.build(), httpClient, logger);
|
||||
fail("Expected a SettingsException to happen");
|
||||
} catch (SettingsException e) {}
|
||||
}
|
||||
|
||||
public void testNoAccount() throws Exception {
|
||||
try {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
PagerDutyAccounts accounts = new PagerDutyAccounts(builder.build(), httpClient, logger);
|
||||
accounts.account(null);
|
||||
fail("no accounts are configured so trying to get the default account should throw an IllegalStateException");
|
||||
} catch (IllegalStateException e) {}
|
||||
}
|
||||
|
||||
public void testNoAccount_WithDefaultAccount() throws Exception {
|
||||
try {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put("default_account", "unknown");
|
||||
new PagerDutyAccounts(builder.build(), httpClient, logger);
|
||||
fail("Expected a SettingsException to happen");
|
||||
} catch (SettingsException e) {}
|
||||
}
|
||||
|
||||
private void addAccountSettings(String name, Settings.Builder builder) {
|
||||
builder.put("account." + name + ".service_api_key", randomAsciiOfLength(50));
|
||||
Settings defaults = SlackMessageDefaultsTests.randomSettings();
|
||||
for (Map.Entry<String, String> setting : defaults.getAsMap().entrySet()) {
|
||||
builder.put("message_defaults." + setting.getKey(), setting.getValue());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.watcher.actions.pagerduty.service;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.junit.annotations.Network;
|
||||
import org.elasticsearch.watcher.actions.pagerduty.PagerDutyAction;
|
||||
import org.elasticsearch.watcher.test.AbstractWatcherIntegrationTestCase;
|
||||
import org.elasticsearch.watcher.transport.actions.put.PutWatchResponse;
|
||||
import org.elasticsearch.watcher.watch.Payload;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.watcher.actions.ActionBuilders.pagerDutyAction;
|
||||
import static org.elasticsearch.watcher.client.WatchSourceBuilders.watchBuilder;
|
||||
import static org.elasticsearch.watcher.condition.ConditionBuilders.alwaysCondition;
|
||||
import static org.elasticsearch.watcher.input.InputBuilders.simpleInput;
|
||||
import static org.elasticsearch.watcher.trigger.TriggerBuilders.schedule;
|
||||
import static org.elasticsearch.watcher.trigger.schedule.Schedules.interval;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@Network
|
||||
public class PagerDutyServiceTests extends AbstractWatcherIntegrationTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean timeWarped() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean enableShield() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("watcher.actions.pagerduty.service.account.test_account.service_api_key", "fc082467005d4072a914e0bb041882d0")
|
||||
.build();
|
||||
}
|
||||
|
||||
public void testSendTriggerEvent() throws Exception {
|
||||
PagerDutyService service = getInstanceFromMaster(PagerDutyService.class);
|
||||
|
||||
IncidentEvent event = new IncidentEvent("#testIncidentEvent()", null, null, "PagerDutyServiceTests", "_client_url", "_account", true, new IncidentEventContext[] {
|
||||
IncidentEventContext.link("_href", "_text"),
|
||||
IncidentEventContext.image("_src", "_href", "_alt")
|
||||
});
|
||||
|
||||
Payload payload = new Payload.Simple("_key", "_val");
|
||||
|
||||
PagerDutyAccount account = service.getAccount("test_account");
|
||||
assertThat(account, notNullValue());
|
||||
SentEvent sentEvent = account.send(event, payload);
|
||||
assertThat(sentEvent, notNullValue());
|
||||
assertThat(sentEvent.successful(), is(true));
|
||||
assertThat(sentEvent.request, notNullValue());
|
||||
assertThat(sentEvent.response, notNullValue());
|
||||
assertThat(sentEvent.response.status(), lessThan(300));
|
||||
}
|
||||
|
||||
public void testWatchWithPagerDutyAction() throws Exception {
|
||||
String account = "test_account";
|
||||
PagerDutyAction.Builder actionBuilder = pagerDutyAction(IncidentEvent
|
||||
.templateBuilder("pager duty integration test `{{ctx.payload.ref}}`").setAccount(account));
|
||||
|
||||
PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("1").setSource(watchBuilder()
|
||||
.trigger(schedule(interval("10m")))
|
||||
.input(simpleInput("ref", "testWatchWithPagerDutyAction()"))
|
||||
.condition(alwaysCondition())
|
||||
.addAction("pd", actionBuilder))
|
||||
.execute().get();
|
||||
|
||||
assertThat(putWatchResponse.isCreated(), is(true));
|
||||
|
||||
timeWarp().scheduler().trigger("1");
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
assertWatchWithMinimumPerformedActionsCount("1", 1L, false);
|
||||
SearchResponse response = searchHistory(searchSource().query(boolQuery()
|
||||
.must(termQuery("result.actions.id", "pd"))
|
||||
.must(termQuery("result.actions.type", "pagerduty"))
|
||||
.must(termQuery("result.actions.status", "success"))
|
||||
.must(termQuery("result.actions.pagerduty.sent_event.event.account", account))));
|
||||
|
||||
assertThat(response, notNullValue());
|
||||
assertHitCount(response, 1L);
|
||||
}
|
||||
}
|
|
@ -169,7 +169,7 @@ public class HttpRequestTemplateTests extends ESTestCase {
|
|||
assertThat(parsedTemplate, is(urlParsedTemplate));
|
||||
}
|
||||
|
||||
static class MockTextTemplateEngine implements TextTemplateEngine {
|
||||
public static class MockTextTemplateEngine implements TextTemplateEngine {
|
||||
@Override
|
||||
public String render(TextTemplate template, Map<String, Object> model) {
|
||||
return template.getTemplate();
|
||||
|
|
|
@ -74,6 +74,7 @@ import java.io.IOException;
|
|||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
|
@ -210,8 +211,8 @@ public final class WatcherTestUtils {
|
|||
|
||||
Authentication auth = new Authentication("testname", new Secret("testpassword".toCharArray()));
|
||||
|
||||
EmailAction action = new EmailAction(email, "testaccount", auth, Profile.STANDARD, null);
|
||||
ExecutableEmailAction executale = new ExecutableEmailAction(action, logger, emailService, templateEngine, new HtmlSanitizer(Settings.EMPTY));
|
||||
EmailAction action = new EmailAction(email, "testaccount", auth, Profile.STANDARD, null, null);
|
||||
ExecutableEmailAction executale = new ExecutableEmailAction(action, logger, emailService, templateEngine, new HtmlSanitizer(Settings.EMPTY), Collections.emptyMap());
|
||||
|
||||
actions.add(new ActionWrapper("_email", executale));
|
||||
|
||||
|
|
|
@ -30,6 +30,8 @@ import org.elasticsearch.watcher.actions.email.service.EmailService;
|
|||
import org.elasticsearch.watcher.actions.email.service.EmailTemplate;
|
||||
import org.elasticsearch.watcher.actions.email.service.HtmlSanitizer;
|
||||
import org.elasticsearch.watcher.actions.email.service.Profile;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachments;
|
||||
import org.elasticsearch.watcher.actions.email.service.attachment.EmailAttachmentsParser;
|
||||
import org.elasticsearch.watcher.actions.index.ExecutableIndexAction;
|
||||
import org.elasticsearch.watcher.actions.index.IndexAction;
|
||||
import org.elasticsearch.watcher.actions.index.IndexActionFactory;
|
||||
|
@ -420,8 +422,8 @@ public class WatchTests extends ESTestCase {
|
|||
List<ActionWrapper> list = new ArrayList<>();
|
||||
if (randomBoolean()) {
|
||||
ExecutableTransform transform = randomTransform();
|
||||
EmailAction action = new EmailAction(EmailTemplate.builder().build(), null, null, Profile.STANDARD, randomFrom(DataAttachment.JSON, DataAttachment.YAML, null));
|
||||
list.add(new ActionWrapper("_email_" + randomAsciiOfLength(8), randomThrottler(), transform, new ExecutableEmailAction(action, logger, emailService, templateEngine, htmlSanitizer)));
|
||||
EmailAction action = new EmailAction(EmailTemplate.builder().build(), null, null, Profile.STANDARD, randomFrom(DataAttachment.JSON, DataAttachment.YAML), EmailAttachments.EMPTY_ATTACHMENTS);
|
||||
list.add(new ActionWrapper("_email_" + randomAsciiOfLength(8), randomThrottler(), transform, new ExecutableEmailAction(action, logger, emailService, templateEngine, htmlSanitizer, Collections.emptyMap())));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
DateTimeZone timeZone = randomBoolean() ? DateTimeZone.UTC : null;
|
||||
|
@ -445,7 +447,7 @@ public class WatchTests extends ESTestCase {
|
|||
for (ActionWrapper action : actions) {
|
||||
switch (action.action().type()) {
|
||||
case EmailAction.TYPE:
|
||||
parsers.put(EmailAction.TYPE, new EmailActionFactory(settings, emailService, templateEngine, htmlSanitizer));
|
||||
parsers.put(EmailAction.TYPE, new EmailActionFactory(settings, emailService, templateEngine, htmlSanitizer, new EmailAttachmentsParser(Collections.emptyMap()), Collections.emptyMap()));
|
||||
break;
|
||||
case IndexAction.TYPE:
|
||||
parsers.put(IndexAction.TYPE, new IndexActionFactory(settings, client));
|
||||
|
|
Loading…
Reference in New Issue