This commit is contained in:
parent
7b4bacebb5
commit
22a98ec279
|
@ -110,28 +110,19 @@ public class ValuesSourceConfig {
|
|||
) {
|
||||
ValuesSourceConfig config;
|
||||
MappedFieldType fieldType = null;
|
||||
ValuesSourceType valuesSourceType;
|
||||
ValueType scriptValueType = null;
|
||||
AggregationScript.LeafFactory aggregationScript = null;
|
||||
ValuesSourceType valuesSourceType = null;
|
||||
ValueType scriptValueType = userValueTypeHint;
|
||||
AggregationScript.LeafFactory aggregationScript = createScript(script, context); // returns null if script is null
|
||||
boolean unmapped = false;
|
||||
if (userValueTypeHint != null) {
|
||||
// If the user gave us a type hint, respect that.
|
||||
valuesSourceType = userValueTypeHint.getValuesSourceType();
|
||||
}
|
||||
if (field == null) {
|
||||
// Stand Alone Script Case
|
||||
if (script == null) {
|
||||
throw new IllegalStateException(
|
||||
"value source config is invalid; must have either a field context or a script or marked as unmapped");
|
||||
"value source config is invalid; must have either a field or a script");
|
||||
}
|
||||
/*
|
||||
* This is the Stand Alone Script path. We should have a script that will produce a value independent of the presence or
|
||||
* absence of any one field. The type of the script is given by the userValueTypeHint field, if the user specified a type,
|
||||
* or the aggregation's default type if the user didn't.
|
||||
*/
|
||||
if (userValueTypeHint != null) {
|
||||
valuesSourceType = userValueTypeHint.getValuesSourceType();
|
||||
} else {
|
||||
valuesSourceType = defaultValueSourceType;
|
||||
}
|
||||
aggregationScript = createScript(script, context);
|
||||
scriptValueType = userValueTypeHint;
|
||||
} else {
|
||||
// Field case
|
||||
fieldType = context.fieldMapper(field);
|
||||
|
@ -141,22 +132,17 @@ public class ValuesSourceConfig {
|
|||
* pattern. In this case, we're going to end up using the EMPTY variant of the ValuesSource, and possibly applying a user
|
||||
* specified missing value.
|
||||
*/
|
||||
if (userValueTypeHint != null) {
|
||||
valuesSourceType = userValueTypeHint.getValuesSourceType();
|
||||
} else {
|
||||
valuesSourceType = defaultValueSourceType;
|
||||
}
|
||||
unmapped = true;
|
||||
if (userValueTypeHint != null) {
|
||||
// todo do we really need this for unmapped?
|
||||
scriptValueType = userValueTypeHint;
|
||||
}
|
||||
} else {
|
||||
aggregationScript = null; // Value scripts are not allowed on unmapped fields. What would that do, anyway?
|
||||
} else if (valuesSourceType == null) {
|
||||
// We have a field, and the user didn't specify a type, so get the type from the field
|
||||
valuesSourceType = fieldResolver.getValuesSourceType(context, fieldType, aggregationName, userValueTypeHint,
|
||||
defaultValueSourceType);
|
||||
aggregationScript = createScript(script, context);
|
||||
}
|
||||
}
|
||||
if (valuesSourceType == null) {
|
||||
valuesSourceType = defaultValueSourceType;
|
||||
}
|
||||
config = new ValuesSourceConfig(valuesSourceType, fieldType, unmapped, aggregationScript, scriptValueType , context);
|
||||
config.format(resolveFormat(format, valuesSourceType, timeZone, fieldType));
|
||||
config.missing(missing);
|
||||
|
|
|
@ -101,13 +101,14 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase {
|
|||
));
|
||||
assertThat(e.getMessage(), equalTo("Aggregation [_name] cannot support regular expression style include/exclude settings as " +
|
||||
"they can only be applied to string fields. Use an array of values for include/exclude clauses"));
|
||||
}
|
||||
|
||||
e = expectThrows(AggregationExecutionException.class, () -> testBothCases(new MatchNoDocsQuery(), dataset,
|
||||
aggregation -> aggregation.field(BINARY_FIELD).includeExclude(includeExclude),
|
||||
public void testBadUserValueTypeHint() throws IOException {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testBothCases(new MatchNoDocsQuery(), dataset,
|
||||
aggregation -> aggregation.field(BINARY_FIELD),
|
||||
agg -> fail("test should have failed with exception"), ValueType.NUMERIC // numeric type hint
|
||||
));
|
||||
assertThat(e.getMessage(), equalTo("Aggregation [_name] cannot support regular expression style include/exclude settings as " +
|
||||
"they can only be applied to string fields. Use an array of values for include/exclude clauses"));
|
||||
assertThat(e.getMessage(), equalTo("Expected numeric type on field [binary], but got [binary]"));
|
||||
}
|
||||
|
||||
private void testSearchCase(Query query, List<Long> dataset,
|
||||
|
|
|
@ -58,7 +58,12 @@ import org.elasticsearch.index.mapper.Uid;
|
|||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptEngine;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
|
@ -98,9 +103,11 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static java.util.Collections.singleton;
|
||||
import static org.elasticsearch.index.mapper.SeqNoFieldMapper.PRIMARY_TERM_NAME;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
|
||||
import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketScript;
|
||||
|
@ -112,6 +119,26 @@ public class TermsAggregatorTests extends AggregatorTestCase {
|
|||
|
||||
private boolean randomizeAggregatorImpl = true;
|
||||
|
||||
// Constants for a script that returns a string
|
||||
private static final String STRING_SCRIPT_NAME = "string_script";
|
||||
private static final String STRING_SCRIPT_OUTPUT = "Orange";
|
||||
|
||||
@Override
|
||||
protected ScriptService getMockScriptService() {
|
||||
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
Map<String, Function<Map<String, Object>, Object>> nonDeterministicScripts = new HashMap<>();
|
||||
|
||||
scripts.put(STRING_SCRIPT_NAME, value -> STRING_SCRIPT_OUTPUT);
|
||||
|
||||
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME,
|
||||
scripts,
|
||||
nonDeterministicScripts,
|
||||
Collections.emptyMap());
|
||||
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
|
||||
|
||||
return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS);
|
||||
}
|
||||
|
||||
protected <A extends Aggregator> A createAggregator(AggregationBuilder aggregationBuilder,
|
||||
IndexSearcher indexSearcher, MappedFieldType... fieldTypes) throws IOException {
|
||||
try {
|
||||
|
@ -1246,6 +1273,26 @@ public class TermsAggregatorTests extends AggregatorTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testNumberToStringValueScript() throws IOException {
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER);
|
||||
fieldType.setName("number");
|
||||
|
||||
TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("name")
|
||||
.userValueTypeHint(ValueType.STRING)
|
||||
.field("number")
|
||||
.script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, STRING_SCRIPT_NAME, Collections.emptyMap()));
|
||||
|
||||
testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> {
|
||||
final int numDocs = 10;
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
iw.addDocument(singleton(new NumericDocValuesField("number", i + 1)));
|
||||
}
|
||||
}, (Consumer<InternalTerms>) terms -> {
|
||||
assertTrue(AggregationInspectionHelper.hasValue(terms));
|
||||
}, fieldType);
|
||||
}
|
||||
|
||||
|
||||
private void assertNestedTopHitsScore(InternalMultiBucketAggregation<?, ?> terms, boolean withScore) {
|
||||
assertThat(terms.getBuckets().size(), equalTo(9));
|
||||
int ptr = 9;
|
||||
|
|
|
@ -74,7 +74,6 @@ import org.elasticsearch.search.aggregations.AggregationBuilder;
|
|||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
|
||||
|
@ -934,24 +933,6 @@ public class MinAggregatorTests extends AggregatorTestCase {
|
|||
testCase(aggregationBuilder, query, buildIndex, verify, fieldType);
|
||||
}
|
||||
|
||||
private <T extends AggregationBuilder, V extends InternalAggregation> void testCase(T aggregationBuilder, Query query,
|
||||
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
|
||||
Consumer<V> verify, MappedFieldType fieldType) throws IOException {
|
||||
try (Directory directory = newDirectory()) {
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
buildIndex.accept(indexWriter);
|
||||
indexWriter.close();
|
||||
|
||||
try (IndexReader indexReader = DirectoryReader.open(directory)) {
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
|
||||
V agg = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
|
||||
verify.accept(agg);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
|
||||
return Arrays.asList(
|
||||
|
|
|
@ -48,6 +48,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
|
@ -126,6 +127,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -541,6 +543,25 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
|||
InternalAggregationTestCase.assertMultiBucketConsumer(agg, bucketConsumer);
|
||||
}
|
||||
|
||||
protected <T extends AggregationBuilder,
|
||||
V extends InternalAggregation> void testCase(T aggregationBuilder, Query query,
|
||||
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
|
||||
Consumer<V> verify, MappedFieldType fieldType) throws IOException {
|
||||
try (Directory directory = newDirectory()) {
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
buildIndex.accept(indexWriter);
|
||||
indexWriter.close();
|
||||
|
||||
try (IndexReader indexReader = DirectoryReader.open(directory)) {
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
|
||||
V agg = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
|
||||
verify.accept(agg);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static class ShardSearcher extends IndexSearcher {
|
||||
private final List<LeafReaderContext> ctx;
|
||||
|
||||
|
|
|
@ -9,14 +9,10 @@ package org.elasticsearch.xpack.analytics.boxplot;
|
|||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -30,7 +26,6 @@ import org.elasticsearch.script.ScriptModule;
|
|||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
|
@ -350,24 +345,4 @@ public class BoxplotAggregatorTests extends AggregatorTestCase {
|
|||
testCase(aggregationBuilder, query, buildIndex, verify, fieldType);
|
||||
}
|
||||
|
||||
private <T extends AggregationBuilder, V extends InternalAggregation> void testCase(
|
||||
T aggregationBuilder, Query query,
|
||||
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
|
||||
Consumer<V> verify, MappedFieldType fieldType) throws IOException {
|
||||
try (Directory directory = newDirectory()) {
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
buildIndex.accept(indexWriter);
|
||||
indexWriter.close();
|
||||
|
||||
try (IndexReader indexReader = DirectoryReader.open(directory)) {
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
|
||||
V agg = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
|
||||
verify.accept(agg);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -19,22 +19,39 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.IpFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper;
|
||||
import org.elasticsearch.plugins.SearchPlugin;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptEngine;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.lookup.LeafDocLookup;
|
||||
import org.elasticsearch.xpack.analytics.AnalyticsPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
|
||||
|
@ -59,9 +76,16 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
|
|||
testCase(aggregationBuilder, query, buildIndex, verify, fieldType);
|
||||
}
|
||||
|
||||
private void testCase(AggregationBuilder aggregationBuilder, Query query,
|
||||
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
|
||||
Consumer<InternalStringStats> verify, MappedFieldType fieldType) throws IOException {
|
||||
/* TODO: This should just use the base test case in AggregatorTestCase. The main incompatibility is around returning a null
|
||||
InternalAggregation instance when no docs are found, I think. --Tozzi
|
||||
*/
|
||||
@Override
|
||||
protected <T extends AggregationBuilder, V extends InternalAggregation> void testCase(
|
||||
T aggregationBuilder,
|
||||
Query query, CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
|
||||
Consumer<V> verify,
|
||||
MappedFieldType fieldType) throws IOException {
|
||||
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
buildIndex.accept(indexWriter);
|
||||
|
@ -74,7 +98,10 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
|
|||
aggregator.preCollection();
|
||||
indexSearcher.search(query, aggregator);
|
||||
aggregator.postCollection();
|
||||
verify.accept((InternalStringStats) aggregator.buildAggregation(0L));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
V aggregation = (V) aggregator.buildAggregation(0L);
|
||||
verify.accept(aggregation);
|
||||
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
|
@ -99,7 +126,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
|
|||
for(int i = 0; i < 10; i++) {
|
||||
iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO)));
|
||||
}
|
||||
}, stats -> {
|
||||
}, (InternalStringStats stats) -> {
|
||||
assertEquals(0, stats.getCount());
|
||||
assertEquals(Integer.MIN_VALUE, stats.getMaxLength());
|
||||
assertEquals(Integer.MAX_VALUE, stats.getMinLength());
|
||||
|
@ -118,7 +145,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
|
|||
for(int i=0; i < 10; i++) {
|
||||
iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO)));
|
||||
}
|
||||
}, stats -> {
|
||||
}, (InternalStringStats stats) -> {
|
||||
assertEquals(10, stats.getCount());
|
||||
assertEquals(4, stats.getMaxLength());
|
||||
assertEquals(4, stats.getMinLength());
|
||||
|
@ -131,6 +158,32 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
|
|||
}, null);
|
||||
}
|
||||
|
||||
public void testMissing() throws IOException {
|
||||
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType();
|
||||
fieldType.setName("text");
|
||||
fieldType.setFielddata(true);
|
||||
|
||||
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name")
|
||||
.field(fieldType.name())
|
||||
.missing("b");
|
||||
|
||||
testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> {
|
||||
iw.addDocument(singleton(new TextField(fieldType.name(), "a", Field.Store.NO)));
|
||||
iw.addDocument(Collections.emptySet());
|
||||
iw.addDocument(singleton(new TextField(fieldType.name(), "a", Field.Store.NO)));
|
||||
iw.addDocument(Collections.emptySet());
|
||||
}, (InternalStringStats stats) -> {
|
||||
assertEquals(4, stats.getCount());
|
||||
assertEquals(1, stats.getMaxLength());
|
||||
assertEquals(1, stats.getMinLength());
|
||||
assertEquals(1.0, stats.getAvgLength(), 0);
|
||||
assertEquals(2, stats.getDistribution().size());
|
||||
assertEquals(0.5, stats.getDistribution().get("a"), 0);
|
||||
assertEquals(0.5, stats.getDistribution().get("b"), 0);
|
||||
assertEquals(1.0, stats.getEntropy(), 0);
|
||||
}, fieldType);
|
||||
}
|
||||
|
||||
public void testSingleValuedField() throws IOException {
|
||||
testCase(new MatchAllDocsQuery(), iw -> {
|
||||
for(int i=0; i < 10; i++) {
|
||||
|
@ -197,7 +250,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
|
|||
for(int i=0; i < 10; i++) {
|
||||
iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO)));
|
||||
}
|
||||
}, stats -> {
|
||||
}, (InternalStringStats stats) -> {
|
||||
assertEquals("0010.00", stats.getCountAsString());
|
||||
assertEquals("0005.00", stats.getMaxLengthAsString());
|
||||
assertEquals("0005.00", stats.getMinLengthAsString());
|
||||
|
@ -271,4 +324,145 @@ public class StringStatsAggregatorTests extends AggregatorTestCase {
|
|||
directory.close();
|
||||
}
|
||||
|
||||
public void testValueScriptSingleValuedField() throws IOException {
|
||||
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType();
|
||||
fieldType.setName("text");
|
||||
fieldType.setFielddata(true);
|
||||
|
||||
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name")
|
||||
.field(fieldType.name())
|
||||
.script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, Collections.emptyMap()));
|
||||
|
||||
testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> {
|
||||
iw.addDocument(singleton(new TextField(fieldType.name(), "b", Field.Store.NO)));
|
||||
iw.addDocument(singleton(new TextField(fieldType.name(), "b", Field.Store.NO)));
|
||||
}, (InternalStringStats stats) -> {
|
||||
assertEquals(2, stats.getCount());
|
||||
assertEquals(2, stats.getMaxLength());
|
||||
assertEquals(2, stats.getMinLength());
|
||||
assertEquals(2.0, stats.getAvgLength(), 0);
|
||||
assertEquals(2, stats.getDistribution().size());
|
||||
assertEquals(0.5, stats.getDistribution().get("a"), 0);
|
||||
assertEquals(0.5, stats.getDistribution().get("b"), 0);
|
||||
assertEquals(1.0, stats.getEntropy(), 0);
|
||||
}, fieldType);
|
||||
}
|
||||
|
||||
public void testValueScriptMultiValuedField() throws IOException {
|
||||
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType();
|
||||
fieldType.setName("text");
|
||||
fieldType.setFielddata(true);
|
||||
|
||||
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name")
|
||||
.field(fieldType.name())
|
||||
.script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, Collections.emptyMap()));
|
||||
|
||||
testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> {
|
||||
Set<TextField> FieldData = new java.util.HashSet<>();
|
||||
FieldData.add(new TextField(fieldType.name(), "b", Field.Store.NO));
|
||||
FieldData.add(new TextField(fieldType.name(), "c", Field.Store.NO));
|
||||
iw.addDocument(FieldData);
|
||||
Set<TextField> FieldData2 = new java.util.HashSet<>();
|
||||
FieldData2.add(new TextField(fieldType.name(), "b", Field.Store.NO));
|
||||
FieldData2.add(new TextField(fieldType.name(), "c", Field.Store.NO));
|
||||
iw.addDocument(FieldData2);
|
||||
}, (InternalStringStats stats) -> {
|
||||
assertEquals(4, stats.getCount());
|
||||
assertEquals(2, stats.getMaxLength());
|
||||
assertEquals(2, stats.getMinLength());
|
||||
assertEquals(2.0, stats.getAvgLength(), 0);
|
||||
assertEquals(3, stats.getDistribution().size());
|
||||
assertEquals(0.5, stats.getDistribution().get("a"), 0);
|
||||
assertEquals(0.25, stats.getDistribution().get("b"), 0);
|
||||
assertEquals(0.25, stats.getDistribution().get("c"), 0);
|
||||
assertEquals(1.5, stats.getEntropy(), 0);
|
||||
}, fieldType);
|
||||
}
|
||||
|
||||
public void testFieldScriptSingleValuedField() throws IOException {
|
||||
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType();
|
||||
fieldType.setName("text");
|
||||
fieldType.setFielddata(true);
|
||||
|
||||
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name")
|
||||
.script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME,
|
||||
Collections.singletonMap("field", fieldType.name())));
|
||||
|
||||
testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> {
|
||||
iw.addDocument(singleton(new TextField(fieldType.name(), "b", Field.Store.NO)));
|
||||
iw.addDocument(singleton(new TextField(fieldType.name(), "b", Field.Store.NO)));
|
||||
}, (InternalStringStats stats) -> {
|
||||
assertEquals(2, stats.getCount());
|
||||
assertEquals(2, stats.getMaxLength());
|
||||
assertEquals(2, stats.getMinLength());
|
||||
assertEquals(2.0, stats.getAvgLength(), 0);
|
||||
assertEquals(2, stats.getDistribution().size());
|
||||
assertEquals(0.5, stats.getDistribution().get("a"), 0);
|
||||
assertEquals(0.5, stats.getDistribution().get("b"), 0);
|
||||
assertEquals(1.0, stats.getEntropy(), 0);
|
||||
}, fieldType);
|
||||
}
|
||||
|
||||
public void testFieldScriptMultiValuedField() throws IOException {
|
||||
final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType();
|
||||
fieldType.setName("text");
|
||||
fieldType.setFielddata(true);
|
||||
|
||||
final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name")
|
||||
.script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME,
|
||||
Collections.singletonMap("field", fieldType.name())));
|
||||
|
||||
testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> {
|
||||
Set<TextField> doc = new java.util.HashSet<>();
|
||||
doc.add(new TextField(fieldType.name(), "b", Field.Store.NO));
|
||||
doc.add(new TextField(fieldType.name(), "c", Field.Store.NO));
|
||||
iw.addDocument(doc);
|
||||
Set<TextField> doc1 = new java.util.HashSet<>();
|
||||
doc1.add(new TextField(fieldType.name(), "b", Field.Store.NO));
|
||||
doc1.add(new TextField(fieldType.name(), "c", Field.Store.NO));
|
||||
iw.addDocument(doc1);
|
||||
}, (InternalStringStats stats) -> {
|
||||
assertEquals(4, stats.getCount());
|
||||
assertEquals(2, stats.getMaxLength());
|
||||
assertEquals(2, stats.getMinLength());
|
||||
assertEquals(2.0, stats.getAvgLength(), 0);
|
||||
assertEquals(3, stats.getDistribution().size());
|
||||
assertEquals(0.5, stats.getDistribution().get("a"), 0);
|
||||
assertEquals(0.25, stats.getDistribution().get("b"), 0);
|
||||
assertEquals(0.25, stats.getDistribution().get("c"), 0);
|
||||
assertEquals(1.5, stats.getEntropy(), 0);
|
||||
}, fieldType);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
|
||||
return new StringStatsAggregationBuilder("_name")
|
||||
.field(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
|
||||
return Collections.singletonList(CoreValuesSourceType.BYTES);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<String> unsupportedMappedFieldTypes() {
|
||||
return Collections.singletonList(IpFieldMapper.CONTENT_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ScriptService getMockScriptService() {
|
||||
final Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
scripts.put(VALUE_SCRIPT_NAME, vars -> "a" + vars.get("_value"));
|
||||
scripts.put(FIELD_SCRIPT_NAME, vars -> {
|
||||
final String fieldName = (String) vars.get("field");
|
||||
final LeafDocLookup lookup = (LeafDocLookup) vars.get("doc");
|
||||
return lookup.get(fieldName).stream()
|
||||
.map(value -> "a" + value)
|
||||
.collect(Collectors.toList());
|
||||
});
|
||||
final MockScriptEngine engine = new MockScriptEngine(MockScriptEngine.NAME, scripts, Collections.emptyMap());
|
||||
final Map<String, ScriptEngine> engines = Collections.singletonMap(engine.getType(), engine);
|
||||
return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue