Moved testAlertsWithDifferentSearchType and testWithAggregations to more unit like tests.

Original commit: elastic/x-pack-elasticsearch@115e361c42
This commit is contained in:
Martijn van Groningen 2015-03-03 15:54:44 +01:00
parent f6c17bd802
commit 0382310cae
3 changed files with 137 additions and 84 deletions

View File

@ -0,0 +1,110 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.alerts.condition.script;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.alerts.ExecutionContext;
import org.elasticsearch.alerts.Payload;
import org.elasticsearch.alerts.support.Script;
import org.elasticsearch.alerts.support.init.proxy.ScriptServiceProxy;
import org.elasticsearch.alerts.test.AbstractAlertsSingleNodeTests;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.env.Environment;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.groovy.GroovyScriptEngineService;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.HashSet;
import java.util.Set;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
*/
public class ScriptConditionSearchTests extends AbstractAlertsSingleNodeTests {
private ThreadPool tp = null;
private ScriptServiceProxy scriptService;
@Before
public void init() {
tp = new ThreadPool(ThreadPool.Names.SAME);
Settings settings = ImmutableSettings.settingsBuilder().build();
GroovyScriptEngineService groovyScriptEngineService = new GroovyScriptEngineService(settings);
Set<ScriptEngineService> engineServiceSet = new HashSet<>();
engineServiceSet.add(groovyScriptEngineService);
scriptService = ScriptServiceProxy.of(new ScriptService(settings, new Environment(), engineServiceSet, new ResourceWatcherService(settings, tp)));
}
@After
public void cleanup() {
tp.shutdownNow();
}
@Test
public void testExecute_withAggs() throws Exception {
createIndex("my-index", client().admin().indices().prepareCreate("my-index")
.addMapping("my-type", "_timestamp", "enabled=true"));
client().prepareIndex("my-index", "my-type").setTimestamp("2005-01-01T00:00").setSource("{}").get();
client().prepareIndex("my-index", "my-type").setTimestamp("2005-01-01T00:10").setSource("{}").get();
client().prepareIndex("my-index", "my-type").setTimestamp("2005-01-01T00:20").setSource("{}").get();
client().prepareIndex("my-index", "my-type").setTimestamp("2005-01-01T00:30").setSource("{}").get();
refresh();
SearchResponse response = client().prepareSearch("my-index")
.addAggregation(AggregationBuilders.dateHistogram("rate").field("_timestamp").interval(DateHistogram.Interval.HOUR).order(Histogram.Order.COUNT_DESC))
.get();
ScriptCondition condition = new ScriptCondition(logger, scriptService, new Script("aggregations.rate.buckets[0]?.doc_count >= 5"));
ExecutionContext ctx = mock(ExecutionContext.class);
when(ctx.payload()).thenReturn(new Payload.ActionResponse(response));
assertFalse(condition.execute(ctx).met());
client().prepareIndex("my-index", "my-type").setTimestamp("2005-01-01T00:40").setSource("{}").get();
refresh();
response = client().prepareSearch("my-index")
.addAggregation(AggregationBuilders.dateHistogram("rate").field("_timestamp").interval(DateHistogram.Interval.HOUR).order(Histogram.Order.COUNT_DESC))
.get();
when(ctx.payload()).thenReturn(new Payload.ActionResponse(response));
assertTrue(condition.execute(ctx).met());
}
@Test
public void testExecute_accessHits() throws Exception {
ScriptCondition condition = new ScriptCondition(logger, scriptService, new Script("hits?.hits[0]?._score == 1.0"));
InternalSearchHit hit = new InternalSearchHit(0, "1", new StringText("type"), null);
hit.score(1f);
hit.shard(new SearchShardTarget("a", "a", 0));
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(new InternalSearchHits(new InternalSearchHit[]{hit}, 1l, 1f), null, null, null, false, null);
SearchResponse response = new SearchResponse(internalSearchResponse, "", 3, 3, 500l, new ShardSearchFailure[0]);
ExecutionContext ctx = mock(ExecutionContext.class);
when(ctx.payload()).thenReturn(new Payload.ActionResponse(response));
assertTrue(condition.execute(ctx).met());
hit.score(2f);
when(ctx.payload()).thenReturn(new Payload.ActionResponse(response));
assertFalse(condition.execute(ctx).met());
}
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.alerts.input.search;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.alerts.ExecutionContext;
import org.elasticsearch.alerts.input.Input;
import org.elasticsearch.alerts.input.InputException;
@ -46,7 +47,6 @@ public class SearchInputTests extends ElasticsearchIntegrationTest {
@Test
public void testExecute() throws Exception {
SearchSourceBuilder searchSourceBuilder = searchSource().query(
filteredQuery(matchQuery("event_type", "a"), rangeFilter("_timestamp").from("{{" + Variables.SCHEDULED_FIRE_TIME + "}}||-30s").to("{{" + Variables.SCHEDULED_FIRE_TIME + "}}"))
);
@ -70,6 +70,32 @@ public class SearchInputTests extends ElasticsearchIntegrationTest {
assertEquals(result.request().indicesOptions(), request.indicesOptions());
}
@Test
public void testDifferentSearchType() throws Exception {
SearchSourceBuilder searchSourceBuilder = searchSource().query(
filteredQuery(matchQuery("event_type", "a"), rangeFilter("_timestamp").from("{{" + Variables.SCHEDULED_FIRE_TIME + "}}||-30s").to("{{" + Variables.SCHEDULED_FIRE_TIME + "}}"))
);
SearchType searchType = randomFrom(SearchType.values());
SearchRequest request = client()
.prepareSearch()
.setSearchType(searchType)
.request()
.source(searchSourceBuilder);
SearchInput searchInput = new SearchInput(logger,
ScriptServiceProxy.of(internalCluster().getInstance(ScriptService.class)),
ClientProxy.of(client()), request);
ExecutionContext ctx = new ExecutionContext("test-alert", null,
new DateTime(0, DateTimeZone.UTC), new DateTime(0, DateTimeZone.UTC));
SearchInput.Result result = searchInput.execute(ctx);
assertThat((Integer) XContentMapValues.extractValue("hits.total", result.payload().data()), equalTo(0));
assertNotNull(result.request());
assertEquals(result.request().searchType(), searchType);
assertArrayEquals(result.request().indices(), request.indices());
assertEquals(result.request().indicesOptions(), request.indicesOptions());
}
@Test
public void testParser_Valid() throws Exception {
SearchRequest request = client().prepareSearch()

View File

@ -6,7 +6,6 @@
package org.elasticsearch.alerts.test.integration;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.alerts.AlertsException;
import org.elasticsearch.alerts.AlertsStore;
import org.elasticsearch.alerts.client.AlertSourceBuilder;
@ -22,14 +21,8 @@ import org.elasticsearch.alerts.transport.actions.get.GetAlertResponse;
import org.elasticsearch.alerts.transport.actions.put.PutAlertResponse;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.junit.Test;
import java.util.Locale;
@ -93,7 +86,6 @@ public class BasicAlertsTests extends AbstractAlertsIntegrationTests {
}
@Test
@TestLogging("action.admin.indices.delete:TRACE")
public void testDeleteAlert() throws Exception {
AlertsClient alertsClient = alertClient();
SearchRequest searchRequest = AlertsTestUtils.newInputSearchRequest("my-index").source(searchSource().query(matchAllQuery()));
@ -162,25 +154,6 @@ public class BasicAlertsTests extends AbstractAlertsIntegrationTests {
}
}
@Test
public void testAlertWithDifferentSearchType() throws Exception {
AlertsClient alertsClient = alertClient();
createIndex("my-index");
// Have a sample document in the index, the alert is going to evaluate
client().prepareIndex("my-index", "my-type").setSource("field", "value").get();
SearchRequest searchRequest = AlertsTestUtils.newInputSearchRequest("my-index").source(searchSource().query(matchAllQuery()));
searchRequest.searchType(SearchType.QUERY_THEN_FETCH);
// By accessing the actual hit we know that the fetch phase has been performed
PutAlertResponse indexResponse = alertsClient.preparePutAlert("my-first-alert")
.source(alertSourceBuilder()
.schedule(interval(5, IntervalSchedule.Interval.Unit.SECONDS))
.input(searchInput(searchRequest))
.condition(scriptCondition("ctx.payload.hits?.hits[0]._score == 1.0")))
.get();
assertThat(indexResponse.indexResponse().isCreated(), is(true));
assertAlertWithMinimumPerformedActionsCount("my-first-alert", 1);
}
@Test
public void testModifyAlerts() throws Exception {
SearchRequest searchRequest = AlertsTestUtils.newInputSearchRequest("my-index")
@ -212,62 +185,6 @@ public class BasicAlertsTests extends AbstractAlertsIntegrationTests {
assertThat(count, equalTo(findNumberOfPerformedActions("1")));
}
@Test
public void testAggregations() throws Exception {
class Indexer extends Thread {
private final long sleepTime;
private final long totalTime;
Indexer(long sleepTime, long totalTime) {
this.sleepTime = sleepTime;
this.totalTime = totalTime;
}
@Override
public void run() {
long startTime = System.currentTimeMillis();
while ((System.currentTimeMillis() - startTime) < totalTime) {
client().prepareIndex("my-index", "my-type").setCreate(true).setSource("{}").get();
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
}
}
assertAcked(prepareCreate("my-index").addMapping("my-type", "_timestamp", "enabled=true"));
SearchRequest searchRequest = AlertsTestUtils.newInputSearchRequest("my-index").source(
searchSource()
.query(QueryBuilders.constantScoreQuery(FilterBuilders.rangeFilter("_timestamp").from("{{scheduled_fire_time}}||-1m").to("{{scheduled_fire_time}}")))
.aggregation(AggregationBuilders.dateHistogram("rate").field("_timestamp").interval(DateHistogram.Interval.SECOND).order(Histogram.Order.COUNT_DESC)));
// BytesReference reference = createAlertSource("* 0/1 * * * ? *", searchRequest, "aggregations.rate.buckets[0]?.doc_count > 5");
alertClient().preparePutAlert("rate-alert")
.source(alertSourceBuilder()
.schedule(cron("* 0/1 * * * ? *"))
.input(searchInput(searchRequest))
.condition(scriptCondition("ctx.payload.aggregations.rate.buckets[0]?.doc_count > 5"))
.addAction(indexAction("my-index", "trail")))
.get();
Indexer indexer = new Indexer(500, 60000);
indexer.start();
indexer.join();
assertAlertWithExactPerformedActionsCount("rate-alert", 0);
assertAlertWithNoActionNeeded("rate-alert", 1);
indexer = new Indexer(100, 60000);
indexer.start();
indexer.join();
assertAlertWithMinimumPerformedActionsCount("rate-alert", 1);
}
private final SearchSourceBuilder searchSourceBuilder = searchSource().query(
filteredQuery(matchQuery("event_type", "a"), rangeFilter("_timestamp").from("{{" + Variables.SCHEDULED_FIRE_TIME + "}}||-30s").to("{{" + Variables.SCHEDULED_FIRE_TIME + "}}"))
);