Adapting test by using mock script engine

This commit is contained in:
Christoph Büscher 2015-11-06 17:10:47 +01:00
parent e3f9824931
commit 5108e6fd1f
1 changed files with 144 additions and 173 deletions

View File

@ -16,8 +16,10 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.messy.tests;
package org.elasticsearch.search.aggregations.bucket;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
@ -27,13 +29,21 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.groovy.GroovyPlugin;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
@ -45,8 +55,8 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
@ -71,12 +81,7 @@ import static org.hamcrest.core.IsNull.notNullValue;
*
*/
@ESIntegTestCase.SuiteScopeTestCase
public class DateHistogramTests extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(GroovyPlugin.class);
}
public class DateHistogramIT extends ESIntegTestCase {
private DateTime date(int month, int day) {
return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC);
@ -132,6 +137,12 @@ public class DateHistogramTests extends ESIntegTestCase {
ensureSearchable();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(
ExtractFieldScriptPlugin.class);
}
@After
public void afterEachTest() throws IOException {
internalCluster().wipeIndices("idx2");
@ -519,46 +530,6 @@ public class DateHistogramTests extends ESIntegTestCase {
}
}
public void testSingleValuedFieldWithValueScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo")
.field("date")
.script(new Script("new DateTime(_value).plusMonths(1).getMillis()"))
.interval(DateHistogramInterval.MONTH)).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2l));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3l));
}
/*
[ Jan 2, Feb 3]
[ Feb 2, Mar 3]
@ -645,126 +616,6 @@ public class DateHistogramTests extends ESIntegTestCase {
assertThat(bucket.getDocCount(), equalTo(1l));
}
/**
* The script will change to document date values to the following:
* <p>
* doc 1: [ Feb 2, Mar 3]
* doc 2: [ Mar 2, Apr 3]
* doc 3: [ Mar 15, Apr 16]
* doc 4: [ Apr 2, May 3]
* doc 5: [ Apr 15, May 16]
* doc 6: [ Apr 23, May 24]
*/
public void testMultiValuedFieldWithValueScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo")
.field("dates")
.script(new Script("new DateTime(_value, DateTimeZone.UTC).plusMonths(1).getMillis()"))
.interval(DateHistogramInterval.MONTH)).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3l));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5l));
key = new DateTime(2012, 5, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3l));
}
/**
* The script will change to document date values to the following:
* <p>
* doc 1: [ Feb 2, Mar 3]
* doc 2: [ Mar 2, Apr 3]
* doc 3: [ Mar 15, Apr 16]
* doc 4: [ Apr 2, May 3]
* doc 5: [ Apr 15, May 16]
* doc 6: [ Apr 23, May 24]
*/
public void testMultiValuedFieldWithValueScriptWithInheritedSubAggregator() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo")
.field("dates")
.script(new Script("new DateTime((long)_value, DateTimeZone.UTC).plusMonths(1).getMillis()"))
.interval(DateHistogramInterval.MONTH).subAggregation(max("max"))).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 3, 3, 0, 0, DateTimeZone.UTC).getMillis()));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 4, 16, 0, 0, DateTimeZone.UTC).getMillis()));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 5, 24, 0, 0, DateTimeZone.UTC).getMillis()));
key = new DateTime(2012, 5, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 5, 24, 0, 0, DateTimeZone.UTC).getMillis()));
}
/**
* Jan 2
* Feb 2
@ -775,7 +626,7 @@ public class DateHistogramTests extends ESIntegTestCase {
*/
public void testScriptSingleValue() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo").script(new Script("doc['date'].value")).interval(DateHistogramInterval.MONTH))
.addAggregation(dateHistogram("histo").script(new Script("date", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).interval(DateHistogramInterval.MONTH))
.execute().actionGet();
assertSearchResponse(response);
@ -811,7 +662,7 @@ public class DateHistogramTests extends ESIntegTestCase {
public void testScriptSingleValueWithSubAggregatorInherited() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo")
.script(new Script("doc['date'].value")).interval(DateHistogramInterval.MONTH)
.script(new Script("date", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).interval(DateHistogramInterval.MONTH)
.subAggregation(max("max"))).execute().actionGet();
assertSearchResponse(response);
@ -855,7 +706,7 @@ public class DateHistogramTests extends ESIntegTestCase {
public void testScriptMultiValued() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo").script(new Script("doc['dates'].values")).interval(DateHistogramInterval.MONTH))
.addAggregation(dateHistogram("histo").script(new Script("dates", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).interval(DateHistogramInterval.MONTH))
.execute().actionGet();
assertSearchResponse(response);
@ -909,7 +760,7 @@ public class DateHistogramTests extends ESIntegTestCase {
public void testScriptMultiValuedWithAggregatorInherited() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo")
.script(new Script("doc['dates'].values")).interval(DateHistogramInterval.MONTH)
.script(new Script("dates", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).interval(DateHistogramInterval.MONTH)
.subAggregation(max("max"))).execute().actionGet();
assertSearchResponse(response);
@ -1369,4 +1220,124 @@ public class DateHistogramTests extends ESIntegTestCase {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), greaterThan(0));
}
public static class ExtractFieldScriptPlugin extends Plugin {
@Override
public String name() {
return ExtractFieldScriptEngine.NAME;
}
@Override
public String description() {
return "Mock script engine for " + DateHistogramIT.class;
}
public void onModule(ScriptModule module) {
module.addScriptEngine(ExtractFieldScriptEngine.class);
}
}
public static class ExtractFieldScriptEngine implements ScriptEngineService {
public static final String NAME = "extract_field";
@Override
public void close() throws IOException {
}
@Override
public String[] types() {
return new String[] { NAME };
}
@Override
public String[] extensions() {
return types();
}
@Override
public boolean sandboxed() {
return true;
}
@Override
public Object compile(String script) {
return script;
}
@Override
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
throw new UnsupportedOperationException();
}
@Override
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
return new SearchScript() {
@Override
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
return new LeafSearchScript() {
@Override
public Object unwrap(Object value) {
return null;
}
@Override
public void setNextVar(String name, Object value) {
}
@Override
public Object run() {
String fieldName = (String) compiledScript.compiled();
return leafLookup.doc().get(fieldName);
}
@Override
public void setScorer(Scorer scorer) {
}
@Override
public void setSource(Map<String, Object> source) {
}
@Override
public void setDocument(int doc) {
if (leafLookup != null) {
leafLookup.setDocument(doc);
}
}
@Override
public long runAsLong() {
return 0;
}
@Override
public float runAsFloat() {
return 0;
}
@Override
public double runAsDouble() {
return 0;
}
};
}
@Override
public boolean needsScores() {
return false;
}
};
}
@Override
public void scriptRemoved(CompiledScript script) {
}
}
}