Share XContent rendering code in significant terms aggregations (#24677)
The rendering methods in String and Long Significant String aggregations and buckets are very similar. They can be factored out in the InternalSignificantTerms class an InternalMappedSignificantTerms class.
This commit is contained in:
parent
578223f679
commit
dcb63ab8bc
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.significant;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
@ -123,4 +124,19 @@ public abstract class InternalMappedSignificantTerms<
|
|||
protected int doHashCode() {
|
||||
return Objects.hash(super.doHashCode(), format, subsetSize, supersetSize, significanceHeuristic, buckets, bucketMap);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(CommonFields.DOC_COUNT.getPreferredName(), subsetSize);
|
||||
builder.startArray(CommonFields.BUCKETS.getPreferredName());
|
||||
for (Bucket bucket : buckets) {
|
||||
//There is a condition (presumably when only one shard has a bucket?) where reduce is not called
|
||||
// and I end up with buckets that contravene the user's min_doc_count criteria in my reducer
|
||||
if (bucket.subsetDf >= minDocCount) {
|
||||
bucket.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.significant;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
|
@ -43,6 +44,9 @@ import java.util.Objects;
|
|||
public abstract class InternalSignificantTerms<A extends InternalSignificantTerms<A, B>, B extends InternalSignificantTerms.Bucket<B>>
|
||||
extends InternalMultiBucketAggregation<A, B> implements SignificantTerms, ToXContent {
|
||||
|
||||
private static final String SCORE = "score";
|
||||
private static final String BG_COUNT = "bg_count";
|
||||
|
||||
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
|
||||
public abstract static class Bucket<B extends Bucket<B>> extends InternalMultiBucketAggregation.InternalBucket
|
||||
implements SignificantTerms.Bucket {
|
||||
|
@ -156,6 +160,20 @@ public abstract class InternalSignificantTerms<A extends InternalSignificantTerm
|
|||
public int hashCode() {
|
||||
return Objects.hash(getClass(), bucketOrd, aggregations, score, format);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
keyToXContent(builder);
|
||||
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
|
||||
builder.field(SCORE, score);
|
||||
builder.field(BG_COUNT, supersetDf);
|
||||
aggregations.toXContentInternal(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException;
|
||||
}
|
||||
|
||||
protected final int requiredSize;
|
||||
|
|
|
@ -97,17 +97,11 @@ public class SignificantLongTerms extends InternalMappedSignificantTerms<Signifi
|
|||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
|
||||
builder.field(CommonFields.KEY.getPreferredName(), term);
|
||||
if (format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term));
|
||||
}
|
||||
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
|
||||
builder.field("score", score);
|
||||
builder.field("bg_count", supersetDf);
|
||||
aggregations.toXContentInternal(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
@ -159,17 +153,6 @@ public class SignificantLongTerms extends InternalMappedSignificantTerms<Signifi
|
|||
supersetSize, significanceHeuristic, buckets);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field("doc_count", subsetSize);
|
||||
builder.startArray(CommonFields.BUCKETS.getPreferredName());
|
||||
for (Bucket bucket : buckets) {
|
||||
bucket.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bucket[] createBucketsArray(int size) {
|
||||
return new Bucket[size];
|
||||
|
|
|
@ -103,15 +103,8 @@ public class SignificantStringTerms extends InternalMappedSignificantTerms<Signi
|
|||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(CommonFields.KEY.getPreferredName(), getKeyAsString());
|
||||
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
|
||||
builder.field("score", score);
|
||||
builder.field("bg_count", supersetDf);
|
||||
aggregations.toXContentInternal(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
|
||||
return builder.field(CommonFields.KEY.getPreferredName(), getKeyAsString());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -162,21 +155,6 @@ public class SignificantStringTerms extends InternalMappedSignificantTerms<Signi
|
|||
supersetSize, significanceHeuristic, buckets);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field("doc_count", subsetSize);
|
||||
builder.startArray(CommonFields.BUCKETS.getPreferredName());
|
||||
for (Bucket bucket : buckets) {
|
||||
//There is a condition (presumably when only one shard has a bucket?) where reduce is not called
|
||||
// and I end up with buckets that contravene the user's min_doc_count criteria in my reducer
|
||||
if (bucket.subsetDf >= minDocCount) {
|
||||
bucket.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bucket[] createBucketsArray(int size) {
|
||||
return new Bucket[size];
|
||||
|
|
Loading…
Reference in New Issue