Fix parsing RareTerms aggregation response in RestHighLevelClient (#65144)
Backport of #64454 - Add LongRareTerms and StringRareTerms to the DefaultNamedXContents, ensure that the response of RareTerms aggregation can be parsed correctly. - Add testSearchWithRareTermsAgg method to test the response of RareTerms aggregation can be parsed correctly. - Add some test code to ensure the AggregationsTests can execute successfully. Co-authored-by: bellengao <gbl_long@163.com>
This commit is contained in:
parent
196c2a52c1
commit
b2475f9ccf
|
@ -131,8 +131,11 @@ import org.elasticsearch.search.aggregations.bucket.range.ParsedRange;
|
|||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.ParsedSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.LongRareTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongRareTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedSignificantLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedSignificantStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringRareTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.SignificantLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.SignificantStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
|
||||
|
@ -140,6 +143,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
|
|||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringRareTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder;
|
||||
|
@ -1953,6 +1957,8 @@ public class RestHighLevelClient implements Closeable {
|
|||
map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c));
|
||||
map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c));
|
||||
map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c));
|
||||
map.put(LongRareTerms.NAME, (p, c) -> ParsedLongRareTerms.fromXContent(p, (String) c));
|
||||
map.put(StringRareTerms.NAME, (p, c) -> ParsedStringRareTerms.fromXContent(p, (String) c));
|
||||
map.put(MissingAggregationBuilder.NAME, (p, c) -> ParsedMissing.fromXContent(p, (String) c));
|
||||
map.put(NestedAggregationBuilder.NAME, (p, c) -> ParsedNested.fromXContent(p, (String) c));
|
||||
map.put(ReverseNestedAggregationBuilder.NAME, (p, c) -> ParsedReverseNested.fromXContent(p, (String) c));
|
||||
|
|
|
@ -68,6 +68,8 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSou
|
|||
import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.Range;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.RareTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.RareTermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats;
|
||||
|
@ -287,6 +289,26 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(0, type2.getAggregations().asList().size());
|
||||
}
|
||||
|
||||
public void testSearchWithRareTermsAgg() throws IOException {
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||
searchSourceBuilder.aggregation(new RareTermsAggregationBuilder("agg1").userValueTypeHint(ValueType.STRING)
|
||||
.field("type.keyword").maxDocCount(2));
|
||||
searchSourceBuilder.size(0);
|
||||
searchRequest.source(searchSourceBuilder);
|
||||
SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
|
||||
assertSearchHeader(searchResponse);
|
||||
assertNull(searchResponse.getSuggest());
|
||||
assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
|
||||
assertEquals(0, searchResponse.getHits().getHits().length);
|
||||
RareTerms termsAgg = searchResponse.getAggregations().get("agg1");
|
||||
assertEquals("agg1", termsAgg.getName());
|
||||
assertEquals(1, termsAgg.getBuckets().size());
|
||||
RareTerms.Bucket type2 = termsAgg.getBucketByKey("type2");
|
||||
assertEquals(2, type2.getDocCount());
|
||||
assertEquals(0, type2.getAggregations().asList().size());
|
||||
}
|
||||
|
||||
public void testSearchWithCompositeAgg() throws IOException {
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ParsedLongRareTerms extends ParsedRareTerms {
|
||||
@Override
|
||||
public String getType() {
|
||||
return LongRareTerms.NAME;
|
||||
}
|
||||
|
||||
private static final ObjectParser<ParsedLongRareTerms, Void> PARSER =
|
||||
new ObjectParser<>(ParsedLongRareTerms.class.getSimpleName(), true, ParsedLongRareTerms::new);
|
||||
|
||||
static {
|
||||
declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
|
||||
}
|
||||
|
||||
public static ParsedLongRareTerms fromXContent(XContentParser parser, String name) throws IOException {
|
||||
ParsedLongRareTerms aggregation = PARSER.parse(parser, null);
|
||||
aggregation.setName(name);
|
||||
return aggregation;
|
||||
}
|
||||
|
||||
public static class ParsedBucket extends ParsedRareTerms.ParsedBucket {
|
||||
|
||||
private Long key;
|
||||
|
||||
@Override
|
||||
public Object getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
String keyAsString = super.getKeyAsString();
|
||||
if (keyAsString != null) {
|
||||
return keyAsString;
|
||||
}
|
||||
if (key != null) {
|
||||
return Long.toString(key);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public Number getKeyAsNumber() {
|
||||
return key;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
|
||||
builder.field(CommonFields.KEY.getPreferredName(), key);
|
||||
if (super.getKeyAsString() != null) {
|
||||
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
static ParsedLongRareTerms.ParsedBucket fromXContent(XContentParser parser) throws IOException {
|
||||
return parseRareTermsBucketXContent(parser, ParsedLongRareTerms.ParsedBucket::new, (p, bucket) -> bucket.key = p.longValue());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.elasticsearch.common.CheckedBiConsumer;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public abstract class ParsedRareTerms extends ParsedMultiBucketAggregation<ParsedRareTerms.ParsedBucket> implements RareTerms {
|
||||
@Override
|
||||
public List<? extends RareTerms.Bucket> getBuckets() {
|
||||
return buckets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RareTerms.Bucket getBucketByKey(String term) {
|
||||
for (RareTerms.Bucket bucket : getBuckets()) {
|
||||
if (bucket.getKeyAsString().equals(term)) {
|
||||
return bucket;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startArray(CommonFields.BUCKETS.getPreferredName());
|
||||
for (RareTerms.Bucket bucket : getBuckets()) {
|
||||
bucket.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
return builder;
|
||||
}
|
||||
|
||||
static void declareParsedTermsFields(final ObjectParser<? extends ParsedRareTerms, Void> objectParser,
|
||||
final CheckedFunction<XContentParser, ParsedBucket, IOException> bucketParser) {
|
||||
declareMultiBucketAggregationFields(objectParser, bucketParser::apply, bucketParser::apply);
|
||||
}
|
||||
|
||||
public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements RareTerms.Bucket {
|
||||
|
||||
@Override
|
||||
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
keyToXContent(builder);
|
||||
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
|
||||
getAggregations().toXContentInternal(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
static <B extends ParsedBucket> B parseRareTermsBucketXContent(final XContentParser parser, final Supplier<B> bucketSupplier,
|
||||
final CheckedBiConsumer<XContentParser, B, IOException> keyConsumer)
|
||||
throws IOException {
|
||||
|
||||
final B bucket = bucketSupplier.get();
|
||||
final List<Aggregation> aggregations = new ArrayList<>();
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = parser.currentName();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
|
||||
bucket.setKeyAsString(parser.text());
|
||||
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
|
||||
keyConsumer.accept(parser, bucket);
|
||||
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
|
||||
bucket.setDocCount(parser.longValue());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
bucket.setAggregations(new Aggregations(aggregations));
|
||||
return bucket;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.CharBuffer;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
public class ParsedStringRareTerms extends ParsedRareTerms {
|
||||
@Override
|
||||
public String getType() {
|
||||
return StringRareTerms.NAME;
|
||||
}
|
||||
|
||||
private static final ObjectParser<ParsedStringRareTerms, Void> PARSER =
|
||||
new ObjectParser<>(ParsedStringRareTerms.class.getSimpleName(), true, ParsedStringRareTerms::new);
|
||||
|
||||
static {
|
||||
declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
|
||||
}
|
||||
|
||||
public static ParsedStringRareTerms fromXContent(XContentParser parser, String name) throws IOException {
|
||||
ParsedStringRareTerms aggregation = PARSER.parse(parser, null);
|
||||
aggregation.setName(name);
|
||||
return aggregation;
|
||||
}
|
||||
|
||||
public static class ParsedBucket extends ParsedRareTerms.ParsedBucket {
|
||||
|
||||
private BytesRef key;
|
||||
|
||||
@Override
|
||||
public Object getKey() {
|
||||
return getKeyAsString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
String keyAsString = super.getKeyAsString();
|
||||
if (keyAsString != null) {
|
||||
return keyAsString;
|
||||
}
|
||||
if (key != null) {
|
||||
return key.utf8ToString();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public Number getKeyAsNumber() {
|
||||
if (key != null) {
|
||||
return Double.parseDouble(key.utf8ToString());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
|
||||
return builder.field(CommonFields.KEY.getPreferredName(), getKey());
|
||||
}
|
||||
|
||||
static ParsedStringRareTerms.ParsedBucket fromXContent(XContentParser parser) throws IOException {
|
||||
return parseRareTermsBucketXContent(parser, ParsedStringRareTerms.ParsedBucket::new, (p, bucket) -> {
|
||||
CharBuffer cb = p.charBufferOrNull();
|
||||
if (cb == null) {
|
||||
bucket.key = null;
|
||||
} else {
|
||||
bucket.key = new BytesRef(cb);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
|
@ -51,9 +51,11 @@ import org.elasticsearch.search.aggregations.bucket.range.InternalGeoDistanceTes
|
|||
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTests;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSamplerTests;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTermsTests;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.LongRareTermsTests;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.LongTermsTests;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.SignificantLongTermsTests;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.SignificantStringTermsTests;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringRareTermsTests;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTermsTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalExtendedStatsTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalMaxTests;
|
||||
|
@ -135,6 +137,8 @@ public class AggregationsTests extends ESTestCase {
|
|||
aggsTests.add(new LongTermsTests());
|
||||
aggsTests.add(new DoubleTermsTests());
|
||||
aggsTests.add(new StringTermsTests());
|
||||
aggsTests.add(new LongRareTermsTests());
|
||||
aggsTests.add(new StringRareTermsTests());
|
||||
aggsTests.add(new InternalMissingTests());
|
||||
aggsTests.add(new InternalNestedTests());
|
||||
aggsTests.add(new InternalReverseNestedTests());
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public abstract class InternalRareTermsTestCase extends InternalMultiBucketAggregationTestCase<InternalRareTerms<?, ?>> {
|
||||
|
||||
private long maxDocCount;
|
||||
|
||||
@Before
|
||||
public void init() {
|
||||
maxDocCount = randomIntBetween(1, 5);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final InternalRareTerms<?, ?> createTestInstance(String name,
|
||||
Map<String, Object> metadata,
|
||||
InternalAggregations aggregations) {
|
||||
return createTestInstance(name, metadata, aggregations, maxDocCount);
|
||||
}
|
||||
|
||||
protected abstract InternalRareTerms<?, ?> createTestInstance(String name,
|
||||
Map<String, Object> metadata,
|
||||
InternalAggregations aggregations,
|
||||
long maxDocCount);
|
||||
|
||||
@Override
|
||||
protected InternalRareTerms<?, ?> createUnmappedInstance(String name, Map<String, Object> metadata) {
|
||||
return new UnmappedRareTerms(name, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertReduced(InternalRareTerms<?, ?> reduced, List<InternalRareTerms<?, ?>> inputs) {
|
||||
Map<Object, Long> reducedCounts = toCounts(reduced.getBuckets().stream());
|
||||
Map<Object, Long> totalCounts = toCounts(inputs.stream().map(RareTerms::getBuckets).flatMap(List::stream));
|
||||
|
||||
Map<Object, Long> expectedReducedCounts = new HashMap<>(totalCounts);
|
||||
expectedReducedCounts.keySet().retainAll(reducedCounts.keySet());
|
||||
assertEquals(expectedReducedCounts, reducedCounts);
|
||||
}
|
||||
|
||||
private static Map<Object, Long> toCounts(Stream<? extends RareTerms.Bucket> buckets) {
|
||||
return buckets.collect(Collectors.toMap(
|
||||
RareTerms.Bucket::getKey,
|
||||
RareTerms.Bucket::getDocCount,
|
||||
Long::sum));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.elasticsearch.common.Randomness;
|
||||
import org.elasticsearch.common.util.SetBackedScalingCuckooFilter;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class LongRareTermsTests extends InternalRareTermsTestCase {
|
||||
|
||||
@Override
|
||||
protected InternalRareTerms<?, ?> createTestInstance(String name,
|
||||
Map<String, Object> metadata,
|
||||
InternalAggregations aggregations,
|
||||
long maxDocCount) {
|
||||
BucketOrder order = BucketOrder.count(false);
|
||||
DocValueFormat format = randomNumericDocValueFormat();
|
||||
List<LongRareTerms.Bucket> buckets = new ArrayList<>();
|
||||
final int numBuckets = randomNumberOfBuckets();
|
||||
for (int i = 0; i < numBuckets; ++i) {
|
||||
long term = randomLong();
|
||||
int docCount = randomIntBetween(1, 100);
|
||||
buckets.add(new LongRareTerms.Bucket(term, docCount, aggregations, format));
|
||||
}
|
||||
SetBackedScalingCuckooFilter filter = new SetBackedScalingCuckooFilter(1000, Randomness.get(), 0.01);
|
||||
return new LongRareTerms(name, order, metadata, format, buckets, maxDocCount, filter);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedLongRareTerms.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalRareTerms<?, ?> mutateInstance(InternalRareTerms<?, ?> instance) {
|
||||
if (instance instanceof LongRareTerms) {
|
||||
LongRareTerms longRareTerms = (LongRareTerms) instance;
|
||||
String name = longRareTerms.getName();
|
||||
BucketOrder order = longRareTerms.order;
|
||||
DocValueFormat format = longRareTerms.format;
|
||||
long maxDocCount = longRareTerms.maxDocCount;
|
||||
Map<String, Object> metadata = longRareTerms.getMetadata();
|
||||
List<LongRareTerms.Bucket> buckets = longRareTerms.getBuckets();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
maxDocCount = between(1, 5);
|
||||
break;
|
||||
case 2:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new LongRareTerms.Bucket(randomLong(), randomNonNegativeLong(), InternalAggregations.EMPTY, format));
|
||||
break;
|
||||
case 3:
|
||||
if (metadata == null) {
|
||||
metadata = new HashMap<>(1);
|
||||
} else {
|
||||
metadata = new HashMap<>(instance.getMetadata());
|
||||
}
|
||||
metadata.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new LongRareTerms(name, order, metadata, format, buckets, maxDocCount, null);
|
||||
} else {
|
||||
String name = instance.getName();
|
||||
Map<String, Object> metadata = instance.getMetadata();
|
||||
switch (between(0, 1)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
if (metadata == null) {
|
||||
metadata = new HashMap<>(1);
|
||||
} else {
|
||||
metadata = new HashMap<>(instance.getMetadata());
|
||||
}
|
||||
metadata.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new UnmappedRareTerms(name, metadata);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Randomness;
|
||||
import org.elasticsearch.common.util.SetBackedScalingCuckooFilter;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public class StringRareTermsTests extends InternalRareTermsTestCase {
|
||||
|
||||
@Override
|
||||
protected InternalRareTerms<?, ?> createTestInstance(String name,
|
||||
Map<String, Object> metadata,
|
||||
InternalAggregations aggregations,
|
||||
long maxDocCount) {
|
||||
BucketOrder order = BucketOrder.count(false);
|
||||
DocValueFormat format = DocValueFormat.RAW;
|
||||
List<StringRareTerms.Bucket> buckets = new ArrayList<>();
|
||||
final int numBuckets = randomNumberOfBuckets();
|
||||
for (int i = 0; i < numBuckets; ++i) {
|
||||
Set<BytesRef> terms = new HashSet<>();
|
||||
BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAlphaOfLength(10)));
|
||||
int docCount = randomIntBetween(1, 100);
|
||||
buckets.add(new StringRareTerms.Bucket(term, docCount, aggregations, format));
|
||||
}
|
||||
SetBackedScalingCuckooFilter filter = new SetBackedScalingCuckooFilter(1000, Randomness.get(), 0.01);
|
||||
return new StringRareTerms(name, order, metadata, format, buckets, maxDocCount, filter);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedStringRareTerms.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalRareTerms<?, ?> mutateInstance(InternalRareTerms<?, ?> instance) {
|
||||
if (instance instanceof StringRareTerms) {
|
||||
StringRareTerms stringRareTerms = (StringRareTerms) instance;
|
||||
String name = stringRareTerms.getName();
|
||||
BucketOrder order = stringRareTerms.order;
|
||||
DocValueFormat format = stringRareTerms.format;
|
||||
long maxDocCount = stringRareTerms.maxDocCount;
|
||||
Map<String, Object> metadata = stringRareTerms.getMetadata();
|
||||
List<StringRareTerms.Bucket> buckets = stringRareTerms.getBuckets();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
maxDocCount = between(1, 5);
|
||||
break;
|
||||
case 2:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new StringRareTerms.Bucket(new BytesRef(randomAlphaOfLengthBetween(1, 10)), randomNonNegativeLong(),
|
||||
InternalAggregations.EMPTY, format));
|
||||
break;
|
||||
case 3:
|
||||
if (metadata == null) {
|
||||
metadata = new HashMap<>(1);
|
||||
} else {
|
||||
metadata = new HashMap<>(instance.getMetadata());
|
||||
}
|
||||
metadata.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new StringRareTerms(name, order, metadata, format, buckets, maxDocCount, null);
|
||||
} else {
|
||||
String name = instance.getName();
|
||||
Map<String, Object> metadata = instance.getMetadata();
|
||||
switch (between(0, 1)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
if (metadata == null) {
|
||||
metadata = new HashMap<>(1);
|
||||
} else {
|
||||
metadata = new HashMap<>(instance.getMetadata());
|
||||
}
|
||||
metadata.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new UnmappedRareTerms(name, metadata);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -86,14 +86,18 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilde
|
|||
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.ParsedSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.LongRareTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongRareTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedSignificantLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedSignificantStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringRareTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.SignificantLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.SignificantStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringRareTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder;
|
||||
|
@ -252,6 +256,8 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
|
|||
map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c));
|
||||
map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c));
|
||||
map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c));
|
||||
map.put(LongRareTerms.NAME, (p, c) -> ParsedLongRareTerms.fromXContent(p, (String) c));
|
||||
map.put(StringRareTerms.NAME, (p, c) -> ParsedStringRareTerms.fromXContent(p, (String) c));
|
||||
map.put(MissingAggregationBuilder.NAME, (p, c) -> ParsedMissing.fromXContent(p, (String) c));
|
||||
map.put(NestedAggregationBuilder.NAME, (p, c) -> ParsedNested.fromXContent(p, (String) c));
|
||||
map.put(ReverseNestedAggregationBuilder.NAME, (p, c) -> ParsedReverseNested.fromXContent(p, (String) c));
|
||||
|
|
Loading…
Reference in New Issue