Aggs - support for arrays of numeric values in include/exclude clauses

Closes #7714
This commit is contained in:
markharwood 2014-09-12 20:05:37 +01:00
parent a90d7b1670
commit e97b8fd217
11 changed files with 368 additions and 27 deletions

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.terms.LongTermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
@ -40,9 +41,9 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator {
public SignificantLongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format,
long estimatedBucketCount, BucketCountThresholds bucketCountThresholds,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory) {
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, IncludeExclude.LongFilter includeExclude) {
super(name, factories, valuesSource, format, estimatedBucketCount, null, bucketCountThresholds, aggregationContext, parent, SubAggCollectionMode.DEPTH_FIRST, false);
super(name, factories, valuesSource, format, estimatedBucketCount, null, bucketCountThresholds, aggregationContext, parent, SubAggCollectionMode.DEPTH_FIRST, false, includeExclude);
this.termsAggFactory = termsAggFactory;
}

View File

@ -194,9 +194,10 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
return execution.create(name, factories, valuesSource, estimatedBucketCount, bucketCountThresholds, includeExclude, aggregationContext, parent, this);
}
if (includeExclude != null) {
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support the include/exclude " +
"settings as it can only be applied to string values");
if ((includeExclude != null) && (includeExclude.isRegexBased())) {
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude " +
"settings as they can only be applied to string fields. Use an array of numeric values for include/exclude clauses used to filter numeric fields");
}
if (valuesSource instanceof ValuesSource.Numeric) {
@ -204,7 +205,11 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) {
throw new UnsupportedOperationException("No support for examining floating point numerics");
}
return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), estimatedBucketCount, bucketCountThresholds, aggregationContext, parent, this);
IncludeExclude.LongFilter longFilter = null;
if (includeExclude != null) {
longFilter = includeExclude.convertToLongFilter();
}
return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), estimatedBucketCount, bucketCountThresholds, aggregationContext, parent, this, longFilter);
}
throw new AggregationExecutionException("sigfnificant_terms aggregation cannot be applied to field [" + config.fieldContext().field() +

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
@ -45,6 +46,8 @@ public class SignificantTermsBuilder extends AggregationBuilder<SignificantTerms
private int includeFlags;
private String excludePattern;
private int excludeFlags;
private String[] includeTerms = null;
private String[] excludeTerms = null;
private FilterBuilder filterBuilder;
private SignificanceHeuristicBuilder significanceHeuristicBuilder;
@ -129,11 +132,45 @@ public class SignificantTermsBuilder extends AggregationBuilder<SignificantTerms
* @see java.util.regex.Pattern#compile(String, int)
*/
public SignificantTermsBuilder include(String regex, int flags) {
if (includeTerms != null) {
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of strings or a regex, not both");
}
this.includePattern = regex;
this.includeFlags = flags;
return this;
}
/**
* Define a set of terms that should be aggregated.
*/
public SignificantTermsBuilder include(String [] terms) {
if (includePattern != null) {
throw new ElasticsearchIllegalArgumentException("include clause must be an array of exact values or a regex, not both");
}
this.includeTerms = terms;
return this;
}
/**
* Define a set of terms that should be aggregated.
*/
public SignificantTermsBuilder include(long [] terms) {
if (includePattern != null) {
throw new ElasticsearchIllegalArgumentException("include clause must be an array of exact values or a regex, not both");
}
this.includeTerms = longsArrToStringArr(terms);
return this;
}
private String[] longsArrToStringArr(long[] terms) {
String[] termsAsString = new String[terms.length];
for (int i = 0; i < terms.length; i++) {
termsAsString[i] = Long.toString(terms[i]);
}
return termsAsString;
}
/**
* Define a regular expression that will filter out terms that should be excluded from the aggregation. The regular
* expression is based on the {@link java.util.regex.Pattern} class.
@ -151,11 +188,37 @@ public class SignificantTermsBuilder extends AggregationBuilder<SignificantTerms
* @see java.util.regex.Pattern#compile(String, int)
*/
public SignificantTermsBuilder exclude(String regex, int flags) {
if (excludeTerms != null) {
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of strings or a regex, not both");
}
this.excludePattern = regex;
this.excludeFlags = flags;
return this;
}
/**
* Define a set of terms that should not be aggregated.
*/
public SignificantTermsBuilder exclude(String [] terms) {
if (excludePattern != null) {
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of strings or a regex, not both");
}
this.excludeTerms = terms;
return this;
}
/**
* Define a set of terms that should not be aggregated.
*/
public SignificantTermsBuilder exclude(long [] terms) {
if (excludePattern != null) {
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of longs or a regex, not both");
}
this.excludeTerms = longsArrToStringArr(terms);
return this;
}
@Override
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
@ -176,6 +239,10 @@ public class SignificantTermsBuilder extends AggregationBuilder<SignificantTerms
.endObject();
}
}
if (includeTerms != null) {
builder.array("include", includeTerms);
}
if (excludePattern != null) {
if (excludeFlags == 0) {
builder.field("exclude", excludePattern);
@ -186,6 +253,9 @@ public class SignificantTermsBuilder extends AggregationBuilder<SignificantTerms
.endObject();
}
}
if (excludeTerms != null) {
builder.array("exclude", excludeTerms);
}
if (filterBuilder != null) {
builder.field(SignificantTermsParametersParser.BACKGROUND_FILTER.getPreferredName());

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
@ -37,8 +38,8 @@ import java.util.Arrays;
public class DoubleTermsAggregator extends LongTermsAggregator {
public DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format, long estimatedBucketCount,
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError) {
super(name, factories, valuesSource, format, estimatedBucketCount, order, bucketCountThresholds, aggregationContext, parent, collectionMode, showTermDocCountError);
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter) {
super(name, factories, valuesSource, format, estimatedBucketCount, order, bucketCountThresholds, aggregationContext, parent, collectionMode, showTermDocCountError, longFilter);
}
@Override

View File

@ -27,6 +27,8 @@ import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude.LongFilter;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
@ -46,13 +48,15 @@ public class LongTermsAggregator extends TermsAggregator {
protected final LongHash bucketOrds;
private boolean showTermDocCountError;
private SortedNumericDocValues values;
private LongFilter longFilter;
public LongTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format, long estimatedBucketCount,
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError) {
Terms.Order order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter) {
super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent, bucketCountThresholds, order, subAggCollectMode);
this.valuesSource = valuesSource;
this.showTermDocCountError = showTermDocCountError;
this.formatter = format != null ? format.formatter() : null;
this.longFilter = longFilter;
bucketOrds = new LongHash(estimatedBucketCount, aggregationContext.bigArrays());
}
@ -82,6 +86,7 @@ public class LongTermsAggregator extends TermsAggregator {
for (int i = 0; i < valuesCount; ++i) {
final long val = values.valueAt(i);
if (previous != val || i == 0) {
if ((longFilter == null) || (longFilter.accept(val))) {
long bucketOrdinal = bucketOrds.add(val);
if (bucketOrdinal < 0) { // already seen
bucketOrdinal = - 1 - bucketOrdinal;
@ -89,6 +94,8 @@ public class LongTermsAggregator extends TermsAggregator {
} else {
collectBucket(doc, bucketOrdinal);
}
}
previous = val;
}
}

View File

@ -231,16 +231,26 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory {
return execution.create(name, factories, valuesSource, estimatedBucketCount, maxOrd, order, bucketCountThresholds, includeExclude, aggregationContext, parent, subAggCollectMode, showTermDocCountError);
}
if (includeExclude != null) {
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support the include/exclude " +
"settings as it can only be applied to string values");
if ((includeExclude != null) && (includeExclude.isRegexBased())) {
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude " +
"settings as they can only be applied to string fields. Use an array of numeric values for include/exclude clauses used to filter numeric fields");
}
if (valuesSource instanceof ValuesSource.Numeric) {
IncludeExclude.LongFilter longFilter = null;
if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) {
return new DoubleTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), estimatedBucketCount, order, bucketCountThresholds, aggregationContext, parent, subAggCollectMode, showTermDocCountError);
if (includeExclude != null) {
longFilter = includeExclude.convertToDoubleFilter();
}
return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), estimatedBucketCount, order, bucketCountThresholds, aggregationContext, parent, subAggCollectMode, showTermDocCountError);
return new DoubleTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(),
estimatedBucketCount, order, bucketCountThresholds, aggregationContext, parent, subAggCollectMode,
showTermDocCountError, longFilter);
}
if (includeExclude != null) {
longFilter = includeExclude.convertToLongFilter();
}
return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), estimatedBucketCount,
order, bucketCountThresholds, aggregationContext, parent, subAggCollectMode, showTermDocCountError, longFilter);
}
throw new AggregationExecutionException("terms aggregation cannot be applied to field [" + config.fieldContext().field() +

View File

@ -117,12 +117,51 @@ public class TermsBuilder extends ValuesSourceAggregationBuilder<TermsBuilder> {
*/
public TermsBuilder include(String [] terms) {
if (includePattern != null) {
throw new ElasticsearchIllegalArgumentException("include clause must be an array of strings or a regex, not both");
throw new ElasticsearchIllegalArgumentException("include clause must be an array of exact values or a regex, not both");
}
this.includeTerms = terms;
return this;
}
/**
* Define a set of terms that should be aggregated.
*/
public TermsBuilder include(long [] terms) {
if (includePattern != null) {
throw new ElasticsearchIllegalArgumentException("include clause must be an array of exact values or a regex, not both");
}
this.includeTerms = longsArrToStringArr(terms);
return this;
}
private String[] longsArrToStringArr(long[] terms) {
String[] termsAsString = new String[terms.length];
for (int i = 0; i < terms.length; i++) {
termsAsString[i] = Long.toString(terms[i]);
}
return termsAsString;
}
/**
* Define a set of terms that should be aggregated.
*/
public TermsBuilder include(double [] terms) {
if (includePattern != null) {
throw new ElasticsearchIllegalArgumentException("include clause must be an array of exact values or a regex, not both");
}
this.includeTerms = doubleArrToStringArr(terms);
return this;
}
private String[] doubleArrToStringArr(double[] terms) {
String[] termsAsString = new String[terms.length];
for (int i = 0; i < terms.length; i++) {
termsAsString[i] = Double.toString(terms[i]);
}
return termsAsString;
}
/**
* Define a regular expression that will filter out terms that should be excluded from the aggregation. The regular
* expression is based on the {@link java.util.regex.Pattern} class.
@ -141,7 +180,7 @@ public class TermsBuilder extends ValuesSourceAggregationBuilder<TermsBuilder> {
*/
public TermsBuilder exclude(String regex, int flags) {
if (excludeTerms != null) {
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of strings or a regex, not both");
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of exact values or a regex, not both");
}
this.excludePattern = regex;
this.excludeFlags = flags;
@ -153,13 +192,37 @@ public class TermsBuilder extends ValuesSourceAggregationBuilder<TermsBuilder> {
*/
public TermsBuilder exclude(String [] terms) {
if (excludePattern != null) {
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of strings or a regex, not both");
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of exact values or a regex, not both");
}
this.excludeTerms = terms;
return this;
}
/**
* Define a set of terms that should not be aggregated.
*/
public TermsBuilder exclude(long [] terms) {
if (excludePattern != null) {
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of exact values or a regex, not both");
}
this.excludeTerms = longsArrToStringArr(terms);
return this;
}
/**
* Define a set of terms that should not be aggregated.
*/
public TermsBuilder exclude(double [] terms) {
if (excludePattern != null) {
throw new ElasticsearchIllegalArgumentException("exclude clause must be an array of exact values or a regex, not both");
}
this.excludeTerms = doubleArrToStringArr(terms);
return this;
}
/**
* When using scripts, the value type indicates the types of the values the script is generating.
*/

View File

@ -18,12 +18,11 @@
*/
package org.elasticsearch.search.aggregations.bucket.terms.support;
import com.carrotsearch.hppc.LongOpenHashSet;
import com.carrotsearch.hppc.LongSet;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.LongBitSet;
import org.apache.lucene.util.*;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.regex.Regex;
@ -44,6 +43,35 @@ import java.util.regex.Pattern;
*/
public class IncludeExclude {
// The includeValue and excludeValue ByteRefs which are the result of the parsing
// process are converted into a LongFilter when used on numeric fields
// in the index.
public static class LongFilter {
private LongSet valids;
private LongSet invalids;
private LongFilter(int numValids, int numInvalids) {
if (numValids > 0) {
valids = new LongOpenHashSet(numValids);
}
if (numInvalids > 0) {
invalids = new LongOpenHashSet(numInvalids);
}
}
public boolean accept(long value) {
return ((valids == null) || (valids.contains(value))) && ((invalids == null) || (!invalids.contains(value)));
}
private void addAccept(long val) {
valids.add(val);
}
private void addReject(long val) {
invalids.add(val);
}
}
private final Matcher include;
private final Matcher exclude;
private final CharsRefBuilder scratch = new CharsRefBuilder();
@ -281,4 +309,43 @@ public class IncludeExclude {
}
}
public boolean isRegexBased() {
return hasRegexTest;
}
public LongFilter convertToLongFilter() {
int numValids = includeValues == null ? 0 : includeValues.size();
int numInvalids = excludeValues == null ? 0 : excludeValues.size();
LongFilter result = new LongFilter(numValids, numInvalids);
if (includeValues != null) {
for (BytesRef val : includeValues) {
result.addAccept(Long.parseLong(val.utf8ToString()));
}
}
if (excludeValues != null) {
for (BytesRef val : excludeValues) {
result.addReject(Long.parseLong(val.utf8ToString()));
}
}
return result;
}
public LongFilter convertToDoubleFilter() {
int numValids = includeValues == null ? 0 : includeValues.size();
int numInvalids = excludeValues == null ? 0 : excludeValues.size();
LongFilter result = new LongFilter(numValids, numInvalids);
if (includeValues != null) {
for (BytesRef val : includeValues) {
double dval=Double.parseDouble(val.utf8ToString());
result.addAccept( NumericUtils.doubleToSortableLong(dval));
}
}
if (excludeValues != null) {
for (BytesRef val : excludeValues) {
double dval=Double.parseDouble(val.utf8ToString());
result.addReject( NumericUtils.doubleToSortableLong(dval));
}
}
return result;
}
}

View File

@ -280,6 +280,38 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
}
}
@Test
public void singleValueFieldWithFiltering() throws Exception {
double includes[] = { 1, 2, 3, 98.2 };
double excludes[] = { 2, 4, 99 };
double empty[] = {};
testIncludeExcludeResults(includes, empty, new double[] { 1, 2, 3 });
testIncludeExcludeResults(includes, excludes, new double[] { 1, 3 });
testIncludeExcludeResults(empty, excludes, new double[] { 0, 1, 3 });
}
private void testIncludeExcludeResults(double[] includes, double[] excludes, double[] expecteds) {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.include(includes)
.exclude(excludes)
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
assertThat(terms.getBuckets().size(), equalTo(expecteds.length));
for (int i = 0; i < expecteds.length; i++) {
Terms.Bucket bucket = terms.getBucketByKey("" + expecteds[i]);
assertThat(bucket, notNullValue());
assertThat(bucket.getDocCount(), equalTo(1l));
}
}
@Test
public void singleValueField_OrderedByTermAsc() throws Exception {
SearchResponse response = client().prepareSearch("idx").setTypes("type")

View File

@ -256,6 +256,37 @@ public class LongTermsTests extends ElasticsearchIntegrationTest {
}
}
@Test
public void singleValueFieldWithFiltering() throws Exception {
long includes[] = { 1, 2, 3, 98 };
long excludes[] = { -1, 2, 4 };
long empty[] = {};
testIncludeExcludeResults(includes, empty, new long[] { 1, 2, 3 });
testIncludeExcludeResults(includes, excludes, new long[] { 1, 3 });
testIncludeExcludeResults(empty, excludes, new long[] { 0, 1, 3 });
}
private void testIncludeExcludeResults(long[] includes, long[] excludes, long[] expecteds) {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.include(includes)
.exclude(excludes)
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
assertThat(terms.getBuckets().size(), equalTo(expecteds.length));
for (int i = 0; i < expecteds.length; i++) {
Terms.Bucket bucket = terms.getBucketByKey("" + expecteds[i]);
assertThat(bucket, notNullValue());
assertThat(bucket.getDocCount(), equalTo(1l));
}
}
@Test
public void singleValueField_WithMaxSize() throws Exception {
SearchResponse response = client().prepareSearch("idx").setTypes("high_card_type")

View File

@ -124,6 +124,23 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
assertTrue(topCategory.equals(new Long(SNOWBOARDING_CATEGORY)));
}
@Test
public void structuredAnalysisWithIncludeExclude() throws Exception {
long[] excludeTerms = { MUSIC_CATEGORY };
SearchResponse response = client().prepareSearch("test")
.setSearchType(SearchType.QUERY_AND_FETCH)
.setQuery(new TermQueryBuilder("_all", "paul"))
.setFrom(0).setSize(60).setExplain(true)
.addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint())
.minDocCount(1).exclude(excludeTerms))
.execute()
.actionGet();
assertSearchResponse(response);
SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms");
Number topCategory = topTerms.getBuckets().iterator().next().getKeyAsNumber();
assertTrue(topCategory.equals(new Long(OTHER_CATEGORY)));
}
@Test
public void includeExclude() throws Exception {
SearchResponse response = client().prepareSearch("test")
@ -160,6 +177,43 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
assertThat(terms.contains("weller"), is(true));
}
@Test
public void includeExcludeExactValues() throws Exception {
String []incExcTerms={"weller","nosuchterm"};
SearchResponse response = client().prepareSearch("test")
.setQuery(new TermQueryBuilder("_all", "weller"))
.addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint())
.exclude(incExcTerms))
.get();
assertSearchResponse(response);
SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms");
Set<String> terms = new HashSet<>();
for (Bucket topTerm : topTerms) {
terms.add(topTerm.getKey());
}
assertThat(terms, hasSize(6));
assertThat(terms.contains("jam"), is(true));
assertThat(terms.contains("council"), is(true));
assertThat(terms.contains("style"), is(true));
assertThat(terms.contains("paul"), is(true));
assertThat(terms.contains("of"), is(true));
assertThat(terms.contains("the"), is(true));
response = client().prepareSearch("test")
.setQuery(new TermQueryBuilder("_all", "weller"))
.addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint())
.include(incExcTerms))
.get();
assertSearchResponse(response);
topTerms = response.getAggregations().get("mySignificantTerms");
terms = new HashSet<>();
for (Bucket topTerm : topTerms) {
terms.add(topTerm.getKey());
}
assertThat(terms, hasSize(1));
assertThat(terms.contains("weller"), is(true));
}
@Test
public void unmapped() throws Exception {
SearchResponse response = client().prepareSearch("idx_unmapped")