Facets: Remove redundant data returned as part of facet response (for example, the field name being faceted), closes #655.

This commit is contained in:
kimchy 2011-01-29 20:44:58 +02:00
parent 7fa5b0cb57
commit 0b09fd0806
42 changed files with 62 additions and 558 deletions

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.facet.geodistance;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.search.facet.Facet;
import java.util.List;
@ -34,18 +33,6 @@ public interface GeoDistanceFacet extends Facet, Iterable<GeoDistanceFacet.Entry
*/
public static final String TYPE = "geo_distance";
String fieldName();
String getFieldName();
String valueFieldName();
String getValueFieldName();
DistanceUnit unit();
DistanceUnit getUnit();
/**
* An ordered list of geo distance facet entries.
*/

View File

@ -38,8 +38,6 @@ import java.io.IOException;
*/
public class GeoDistanceFacetCollector extends AbstractFacetCollector {
protected final String fieldName;
protected final String indexFieldName;
protected final double lat;
@ -59,7 +57,6 @@ public class GeoDistanceFacetCollector extends AbstractFacetCollector {
public GeoDistanceFacetCollector(String facetName, String fieldName, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
GeoDistanceFacet.Entry[] entries, SearchContext context) {
super(facetName);
this.fieldName = fieldName;
this.lat = lat;
this.lon = lon;
this.unit = unit;
@ -116,6 +113,6 @@ public class GeoDistanceFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalGeoDistanceFacet(facetName, fieldName, fieldName, unit, entries);
return new InternalGeoDistanceFacet(facetName, entries);
}
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.facet.geodistance;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
@ -55,22 +54,13 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
private String name;
private String fieldName;
private String valueFieldName;
private DistanceUnit unit;
Entry[] entries;
InternalGeoDistanceFacet() {
}
public InternalGeoDistanceFacet(String name, String fieldName, String valueFieldName, DistanceUnit unit, Entry[] entries) {
public InternalGeoDistanceFacet(String name, Entry[] entries) {
this.name = name;
this.fieldName = fieldName;
this.valueFieldName = valueFieldName;
this.unit = unit;
this.entries = entries;
}
@ -90,30 +80,6 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
return type();
}
@Override public String fieldName() {
return this.fieldName;
}
@Override public String getFieldName() {
return fieldName();
}
@Override public String valueFieldName() {
return this.valueFieldName;
}
@Override public String getValueFieldName() {
return valueFieldName();
}
@Override public DistanceUnit unit() {
return this.unit;
}
@Override public DistanceUnit getUnit() {
return unit();
}
@Override public List<Entry> entries() {
return ImmutableList.copyOf(entries);
}
@ -134,9 +100,6 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
fieldName = in.readUTF();
valueFieldName = in.readUTF();
unit = DistanceUnit.readDistanceUnit(in);
entries = new Entry[in.readVInt()];
for (int i = 0; i < entries.length; i++) {
entries[i] = new Entry(in.readDouble(), in.readDouble(), in.readVLong(), in.readDouble());
@ -145,9 +108,6 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(fieldName);
out.writeUTF(valueFieldName);
DistanceUnit.writeDistanceUnit(out, unit);
out.writeVInt(entries.length);
for (Entry entry : entries) {
out.writeDouble(entry.from);
@ -160,9 +120,6 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _FIELD = new XContentBuilderString("_field");
static final XContentBuilderString _VALUE_FIELD = new XContentBuilderString("_value_field");
static final XContentBuilderString _UNIT = new XContentBuilderString("_unit");
static final XContentBuilderString RANGES = new XContentBuilderString("ranges");
static final XContentBuilderString FROM = new XContentBuilderString("from");
static final XContentBuilderString TO = new XContentBuilderString("to");
@ -174,9 +131,6 @@ public class InternalGeoDistanceFacet implements GeoDistanceFacet, InternalFacet
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, GeoDistanceFacet.TYPE);
builder.field(Fields._FIELD, fieldName);
builder.field(Fields._VALUE_FIELD, valueFieldName);
builder.field(Fields._UNIT, unit);
builder.startArray(Fields.RANGES);
for (Entry entry : entries) {
builder.startObject();

View File

@ -37,8 +37,6 @@ import java.io.IOException;
*/
public class ValueGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
private final String valueFieldName;
private final String indexValueFieldName;
private final FieldDataType valueFieldDataType;
@ -48,7 +46,6 @@ public class ValueGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
public ValueGeoDistanceFacetCollector(String facetName, String fieldName, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
GeoDistanceFacet.Entry[] entries, SearchContext context, String valueFieldName) {
super(facetName, fieldName, lat, lon, unit, geoDistance, entries, context);
this.valueFieldName = valueFieldName;
FieldMapper mapper = context.mapperService().smartNameFieldMapper(valueFieldName);
if (mapper == null) {
@ -106,6 +103,6 @@ public class ValueGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
}
@Override public Facet facet() {
return new InternalGeoDistanceFacet(facetName, fieldName, valueFieldName, unit, entries);
return new InternalGeoDistanceFacet(facetName, entries);
}
}

View File

@ -42,12 +42,8 @@ import java.io.IOException;
*/
public class CountAndTotalHistogramFacetCollector extends AbstractFacetCollector {
private final String fieldName;
private final String indexFieldName;
private final long interval;
private final HistogramFacet.ComparatorType comparatorType;
private final FieldDataCache fieldDataCache;
@ -60,8 +56,6 @@ public class CountAndTotalHistogramFacetCollector extends AbstractFacetCollector
public CountAndTotalHistogramFacetCollector(String facetName, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.fieldName = fieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.fieldDataCache = context.fieldDataCache();
@ -92,7 +86,7 @@ public class CountAndTotalHistogramFacetCollector extends AbstractFacetCollector
}
@Override public Facet facet() {
return new InternalCountAndTotalHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts(), histoProc.totals());
return new InternalCountAndTotalHistogramFacet(facetName, comparatorType, histoProc.counts(), histoProc.totals());
}
public static long bucket(double value, long interval) {

View File

@ -41,12 +41,8 @@ import java.io.IOException;
*/
public class CountHistogramFacetCollector extends AbstractFacetCollector {
private final String fieldName;
private final String indexFieldName;
private final long interval;
private final HistogramFacet.ComparatorType comparatorType;
private final FieldDataCache fieldDataCache;
@ -59,8 +55,6 @@ public class CountHistogramFacetCollector extends AbstractFacetCollector {
public CountHistogramFacetCollector(String facetName, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.fieldName = fieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.fieldDataCache = context.fieldDataCache();
@ -91,7 +85,7 @@ public class CountHistogramFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalCountHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts());
return new InternalCountHistogramFacet(facetName, comparatorType, histoProc.counts());
}
public static long bucket(double value, long interval) {

View File

@ -37,26 +37,6 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
*/
public static final String TYPE = "histogram";
/**
* The key field name used with this facet.
*/
String keyFieldName();
/**
* The key field name used with this facet.
*/
String getKeyFieldName();
/**
* The value field name used with this facet.
*/
String valueFieldName();
/**
* The value field name used with this facet.
*/
String getValueFieldName();
/**
* An ordered list of histogram facet entries.
*/

View File

@ -131,11 +131,6 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
private String name;
private String keyFieldName;
private String valueFieldName;
private long interval;
private ComparatorType comparatorType;
TLongLongHashMap counts;
@ -147,11 +142,8 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
private InternalCountAndTotalHistogramFacet() {
}
public InternalCountAndTotalHistogramFacet(String name, String keyFieldName, String valueFieldName, long interval, ComparatorType comparatorType, TLongLongHashMap counts, TLongDoubleHashMap totals) {
public InternalCountAndTotalHistogramFacet(String name, ComparatorType comparatorType, TLongLongHashMap counts, TLongDoubleHashMap totals) {
this.name = name;
this.keyFieldName = keyFieldName;
this.valueFieldName = valueFieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.counts = counts;
this.totals = totals;
@ -165,22 +157,6 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
return name();
}
@Override public String keyFieldName() {
return this.keyFieldName;
}
@Override public String getKeyFieldName() {
return keyFieldName();
}
@Override public String valueFieldName() {
return this.valueFieldName;
}
@Override public String getValueFieldName() {
return valueFieldName();
}
@Override public String type() {
return TYPE;
}
@ -264,10 +240,6 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _KEY_FIELD = new XContentBuilderString("_key_field");
static final XContentBuilderString _VALUE_FIELD = new XContentBuilderString("_value_field");
static final XContentBuilderString _COMPARATOR = new XContentBuilderString("_comparator");
static final XContentBuilderString _INTERVAL = new XContentBuilderString("_interval");
static final XContentBuilderString ENTRIES = new XContentBuilderString("entries");
static final XContentBuilderString KEY = new XContentBuilderString("key");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
@ -278,10 +250,6 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.field(Fields._KEY_FIELD, keyFieldName);
builder.field(Fields._VALUE_FIELD, valueFieldName);
builder.field(Fields._COMPARATOR, comparatorType.description());
builder.field(Fields._INTERVAL, interval);
builder.startArray(Fields.ENTRIES);
for (Entry entry : computeEntries()) {
builder.startObject();
@ -304,9 +272,6 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
keyFieldName = in.readUTF();
valueFieldName = in.readUTF();
interval = in.readVLong();
comparatorType = ComparatorType.fromId(in.readByte());
int size = in.readVInt();
@ -326,9 +291,6 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(keyFieldName);
out.writeUTF(valueFieldName);
out.writeVLong(interval);
out.writeByte(comparatorType.id());
// optimize the write, since we know we have the same buckets as keys
out.writeVInt(counts.size());

View File

@ -127,12 +127,7 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
private String name;
private String keyFieldName;
private String valueFieldName;
private long interval;
private ComparatorType comparatorType;
ComparatorType comparatorType;
TLongLongHashMap counts;
@ -141,11 +136,8 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
private InternalCountHistogramFacet() {
}
public InternalCountHistogramFacet(String name, String keyFieldName, String valueFieldName, long interval, ComparatorType comparatorType, TLongLongHashMap counts) {
public InternalCountHistogramFacet(String name, ComparatorType comparatorType, TLongLongHashMap counts) {
this.name = name;
this.keyFieldName = keyFieldName;
this.valueFieldName = valueFieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.counts = counts;
}
@ -158,22 +150,6 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
return name();
}
@Override public String keyFieldName() {
return this.keyFieldName;
}
@Override public String getKeyFieldName() {
return keyFieldName();
}
@Override public String valueFieldName() {
return this.valueFieldName;
}
@Override public String getValueFieldName() {
return valueFieldName();
}
@Override public String type() {
return TYPE;
}
@ -241,10 +217,6 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _KEY_FIELD = new XContentBuilderString("_key_field");
static final XContentBuilderString _VALUE_FIELD = new XContentBuilderString("_value_field");
static final XContentBuilderString _COMPARATOR = new XContentBuilderString("_comparator");
static final XContentBuilderString _INTERVAL = new XContentBuilderString("_interval");
static final XContentBuilderString ENTRIES = new XContentBuilderString("entries");
static final XContentBuilderString KEY = new XContentBuilderString("key");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
@ -253,10 +225,6 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.field(Fields._KEY_FIELD, keyFieldName);
builder.field(Fields._VALUE_FIELD, valueFieldName);
builder.field(Fields._COMPARATOR, comparatorType.description());
builder.field(Fields._INTERVAL, interval);
builder.startArray(Fields.ENTRIES);
for (Entry entry : computeEntries()) {
builder.startObject();
@ -277,9 +245,6 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
keyFieldName = in.readUTF();
valueFieldName = in.readUTF();
interval = in.readVLong();
comparatorType = ComparatorType.fromId(in.readByte());
int size = in.readVInt();
@ -296,9 +261,6 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(keyFieldName);
out.writeUTF(valueFieldName);
out.writeVLong(interval);
out.writeByte(comparatorType.id());
// optimize the write, since we know we have the same buckets as keys
out.writeVInt(counts.size());

View File

@ -41,10 +41,8 @@ import java.io.IOException;
*/
public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
private final String keyFieldName;
private final String keyIndexFieldName;
private final String valueFieldName;
private final String valueIndexFieldName;
private final long interval;
@ -64,8 +62,6 @@ public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
public KeyValueHistogramFacetCollector(String facetName, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.keyFieldName = keyFieldName;
this.valueFieldName = valueFieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.fieldDataCache = context.fieldDataCache();
@ -135,6 +131,6 @@ public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalCountAndTotalHistogramFacet(facetName, keyFieldName, valueFieldName, interval, comparatorType, counts, totals);
return new InternalCountAndTotalHistogramFacet(facetName, comparatorType, counts, totals);
}
}

View File

@ -44,12 +44,8 @@ import java.util.Map;
*/
public class KeyValueScriptHistogramFacetCollector extends AbstractFacetCollector {
private final String fieldName;
private final String indexFieldName;
private final long interval;
private final HistogramFacet.ComparatorType comparatorType;
private final FieldDataCache fieldDataCache;
@ -64,8 +60,6 @@ public class KeyValueScriptHistogramFacetCollector extends AbstractFacetCollecto
public KeyValueScriptHistogramFacetCollector(String facetName, String fieldName, String scriptLang, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.fieldName = fieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.fieldDataCache = context.fieldDataCache();
@ -99,7 +93,7 @@ public class KeyValueScriptHistogramFacetCollector extends AbstractFacetCollecto
}
@Override public Facet facet() {
return new InternalCountAndTotalHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts(), histoProc.totals());
return new InternalCountAndTotalHistogramFacet(facetName, comparatorType, histoProc.counts(), histoProc.totals());
}
public static long bucket(double value, long interval) {

View File

@ -74,7 +74,7 @@ public class ScriptHistogramFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalCountAndTotalHistogramFacet(facetName, "_na", "_na", -1, comparatorType, counts, totals);
return new InternalCountAndTotalHistogramFacet(facetName, comparatorType, counts, totals);
}
public static long bucket(double value, long interval) {

View File

@ -54,19 +54,13 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
private String name;
private String keyFieldName;
private String valueFieldName;
Entry[] entries;
InternalRangeFacet() {
}
public InternalRangeFacet(String name, String keyFieldName, String valueFieldName, Entry[] entries) {
public InternalRangeFacet(String name, Entry[] entries) {
this.name = name;
this.keyFieldName = keyFieldName;
this.valueFieldName = valueFieldName;
this.entries = entries;
}
@ -86,22 +80,6 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
return RangeFacet.TYPE;
}
@Override public String keyFieldName() {
return this.keyFieldName;
}
@Override public String getKeyFieldName() {
return keyFieldName();
}
@Override public String valueFieldName() {
return this.valueFieldName;
}
@Override public String getValueFieldName() {
return valueFieldName();
}
@Override public List<Entry> entries() {
return ImmutableList.copyOf(entries);
}
@ -122,8 +100,6 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
keyFieldName = in.readUTF();
valueFieldName = in.readUTF();
entries = new Entry[in.readVInt()];
for (int i = 0; i < entries.length; i++) {
Entry entry = new Entry();
@ -143,8 +119,6 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(keyFieldName);
out.writeUTF(valueFieldName);
out.writeVInt(entries.length);
for (Entry entry : entries) {
out.writeDouble(entry.from);
@ -168,8 +142,6 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _KEY_FIELD = new XContentBuilderString("_key_field");
static final XContentBuilderString _VALUE_FIELD = new XContentBuilderString("_value_field");
static final XContentBuilderString RANGES = new XContentBuilderString("ranges");
static final XContentBuilderString FROM = new XContentBuilderString("from");
static final XContentBuilderString FROM_STR = new XContentBuilderString("from_str");
@ -183,8 +155,6 @@ public class InternalRangeFacet implements RangeFacet, InternalFacet {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, "range");
builder.field(Fields._KEY_FIELD, keyFieldName);
builder.field(Fields._VALUE_FIELD, valueFieldName);
builder.startArray(Fields.RANGES);
for (Entry entry : entries) {
builder.startObject();

View File

@ -37,10 +37,8 @@ import java.io.IOException;
*/
public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
private final String keyFieldName;
private final String keyIndexFieldName;
private final String valueFieldName;
private final String valueIndexFieldName;
private final FieldDataCache fieldDataCache;
@ -55,8 +53,6 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
public KeyValueRangeFacetCollector(String facetName, String keyFieldName, String valueFieldName, RangeFacet.Entry[] entries, SearchContext context) {
super(facetName);
this.keyFieldName = keyFieldName;
this.valueFieldName = valueFieldName;
this.entries = entries;
this.fieldDataCache = context.fieldDataCache();
@ -139,6 +135,6 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalRangeFacet(facetName, keyFieldName, valueFieldName, entries);
return new InternalRangeFacet(facetName, entries);
}
}

View File

@ -33,26 +33,6 @@ public interface RangeFacet extends Facet, Iterable<RangeFacet.Entry> {
*/
public static final String TYPE = "range";
/**
* The key field name used with this facet.
*/
String keyFieldName();
/**
* The key field name used with this facet.
*/
String getKeyFieldName();
/**
* The value field name used with this facet.
*/
String valueFieldName();
/**
* The value field name used with this facet.
*/
String getValueFieldName();
/**
* An ordered list of range facet entries.
*/

View File

@ -36,8 +36,6 @@ import java.io.IOException;
*/
public class RangeFacetCollector extends AbstractFacetCollector {
private final String fieldName;
private final String indexFieldName;
private final FieldDataCache fieldDataCache;
@ -52,7 +50,6 @@ public class RangeFacetCollector extends AbstractFacetCollector {
public RangeFacetCollector(String facetName, String fieldName, RangeFacet.Entry[] entries, SearchContext context) {
super(facetName);
this.fieldName = fieldName;
this.fieldDataCache = context.fieldDataCache();
this.entries = entries;
@ -81,7 +78,7 @@ public class RangeFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalRangeFacet(facetName, fieldName, fieldName, entries);
return new InternalRangeFacet(facetName, entries);
}
public static class RangeProc implements NumericFieldData.DoubleValueInDocProc {

View File

@ -64,6 +64,6 @@ public class ScriptRangeFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalRangeFacet(facetName, "_na", "_na", entries);
return new InternalRangeFacet(facetName, entries);
}
}

View File

@ -51,8 +51,6 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
private String name;
private String fieldName;
private double min;
private double max;
@ -66,9 +64,8 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
private InternalStatisticalFacet() {
}
public InternalStatisticalFacet(String name, String fieldName, double min, double max, double total, double sumOfSquares, long count) {
public InternalStatisticalFacet(String name, double min, double max, double total, double sumOfSquares, long count) {
this.name = name;
this.fieldName = fieldName;
this.min = min;
this.max = max;
this.total = total;
@ -84,14 +81,6 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
return name();
}
@Override public String fieldName() {
return this.fieldName;
}
@Override public String getFieldName() {
return fieldName();
}
@Override public String type() {
return TYPE;
}
@ -166,7 +155,6 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _FIELD = new XContentBuilderString("_field");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString MIN = new XContentBuilderString("min");
@ -180,7 +168,6 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, StatisticalFacet.TYPE);
builder.field(Fields._FIELD, fieldName);
builder.field(Fields.COUNT, count());
builder.field(Fields.TOTAL, total());
builder.field(Fields.MIN, min());
@ -201,7 +188,6 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
fieldName = in.readUTF();
count = in.readVLong();
total = in.readDouble();
min = in.readDouble();
@ -211,7 +197,6 @@ public class InternalStatisticalFacet implements StatisticalFacet, InternalFacet
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(fieldName);
out.writeVLong(count);
out.writeDouble(total);
out.writeDouble(min);

View File

@ -68,6 +68,6 @@ public class ScriptStatisticalFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalStatisticalFacet(facetName, "_na", min, max, total, sumOfSquares, count);
return new InternalStatisticalFacet(facetName, min, max, total, sumOfSquares, count);
}
}

View File

@ -33,16 +33,6 @@ public interface StatisticalFacet extends Facet {
*/
public static final String TYPE = "statistical";
/**
* The field name of the facet.
*/
String fieldName();
/**
* The field name of the facet.
*/
String getFieldName();
/**
* The number of values counted.
*/

View File

@ -36,8 +36,6 @@ import java.io.IOException;
*/
public class StatisticalFacetCollector extends AbstractFacetCollector {
private final String fieldName;
private final String indexFieldName;
private final FieldDataCache fieldDataCache;
@ -50,7 +48,6 @@ public class StatisticalFacetCollector extends AbstractFacetCollector {
public StatisticalFacetCollector(String facetName, String fieldName, SearchContext context) {
super(facetName);
this.fieldName = fieldName;
this.fieldDataCache = context.fieldDataCache();
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
@ -76,7 +73,7 @@ public class StatisticalFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalStatisticalFacet(facetName, fieldName, statsProc.min(), statsProc.max(), statsProc.total(), statsProc.sumOfSquares(), statsProc.count());
return new InternalStatisticalFacet(facetName, statsProc.min(), statsProc.max(), statsProc.total(), statsProc.sumOfSquares(), statsProc.count());
}
public static class StatsProc implements NumericFieldData.DoubleValueInDocProc {

View File

@ -122,6 +122,6 @@ public class StatisticalFacetProcessor extends AbstractComponent implements Face
count += statsFacet.count();
}
return new InternalStatisticalFacet(name, ((InternalStatisticalFacet) facets.get(0)).fieldName(), min, max, total, sumOfSquares, count);
return new InternalStatisticalFacet(name, min, max, total, sumOfSquares, count);
}
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.search.facet.statistical;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
@ -37,8 +36,6 @@ import java.io.IOException;
*/
public class StatisticalFieldsFacetCollector extends AbstractFacetCollector {
private final String[] fieldsNames;
private final String[] indexFieldsNames;
private final FieldDataCache fieldDataCache;
@ -51,7 +48,6 @@ public class StatisticalFieldsFacetCollector extends AbstractFacetCollector {
public StatisticalFieldsFacetCollector(String facetName, String[] fieldsNames, SearchContext context) {
super(facetName);
this.fieldsNames = fieldsNames;
this.fieldDataCache = context.fieldDataCache();
fieldsDataType = new FieldDataType[fieldsNames.length];
@ -76,13 +72,13 @@ public class StatisticalFieldsFacetCollector extends AbstractFacetCollector {
}
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
for (int i = 0; i < fieldsNames.length; i++) {
for (int i = 0; i < indexFieldsNames.length; i++) {
fieldsData[i] = (NumericFieldData) fieldDataCache.cache(fieldsDataType[i], reader, indexFieldsNames[i]);
}
}
@Override public Facet facet() {
return new InternalStatisticalFacet(facetName, Strings.arrayToCommaDelimitedString(fieldsNames), statsProc.min(), statsProc.max(), statsProc.total(), statsProc.sumOfSquares(), statsProc.count());
return new InternalStatisticalFacet(facetName, statsProc.min(), statsProc.max(), statsProc.total(), statsProc.sumOfSquares(), statsProc.count());
}
public static class StatsProc implements NumericFieldData.DoubleValueInDocProc {

View File

@ -144,26 +144,6 @@ public interface TermsFacet extends Facet, Iterable<TermsFacet.Entry> {
}
}
/**
* The field name the terms were extracted from.
*/
String fieldName();
/**
* The field name the terms were extracted from.
*/
String getFieldName();
/**
* The ordering of the results.
*/
ComparatorType comparatorType();
/**
* The ordering of the results.
*/
ComparatorType getComparatorType();
/**
* The number of docs missing a value.
*/

View File

@ -108,22 +108,19 @@ public class InternalByteTermsFacet extends InternalTermsFacet {
private String name;
private String fieldName;
int requiredSize;
long missing;
Collection<ByteEntry> entries = ImmutableList.of();
private ComparatorType comparatorType;
ComparatorType comparatorType;
InternalByteTermsFacet() {
}
public InternalByteTermsFacet(String name, String fieldName, ComparatorType comparatorType, int requiredSize, Collection<ByteEntry> entries, long missing) {
public InternalByteTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<ByteEntry> entries, long missing) {
this.name = name;
this.fieldName = fieldName;
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
@ -138,14 +135,6 @@ public class InternalByteTermsFacet extends InternalTermsFacet {
return this.name;
}
@Override public String fieldName() {
return this.fieldName;
}
@Override public String getFieldName() {
return fieldName();
}
@Override public String type() {
return TYPE;
}
@ -154,14 +143,6 @@ public class InternalByteTermsFacet extends InternalTermsFacet {
return type();
}
@Override public ComparatorType comparatorType() {
return comparatorType;
}
@Override public ComparatorType getComparatorType() {
return comparatorType();
}
@Override public long missingCount() {
return this.missing;
}
@ -210,7 +191,7 @@ public class InternalByteTermsFacet extends InternalTermsFacet {
}
}
BoundedTreeSet<ByteEntry> ordered = new BoundedTreeSet<ByteEntry>(first.comparatorType().comparator(), first.requiredSize);
BoundedTreeSet<ByteEntry> ordered = new BoundedTreeSet<ByteEntry>(first.comparatorType.comparator(), first.requiredSize);
for (TByteIntIterator it = aggregated.iterator(); it.hasNext();) {
it.advance();
ordered.add(new ByteEntry(it.key(), it.value()));
@ -222,7 +203,6 @@ public class InternalByteTermsFacet extends InternalTermsFacet {
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _FIELD = new XContentBuilderString("_field");
static final XContentBuilderString MISSING = new XContentBuilderString("missing");
static final XContentBuilderString TERMS = new XContentBuilderString("terms");
static final XContentBuilderString TERM = new XContentBuilderString("term");
@ -232,7 +212,6 @@ public class InternalByteTermsFacet extends InternalTermsFacet {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, TermsFacet.TYPE);
builder.field(Fields._FIELD, fieldName);
builder.field(Fields.MISSING, missing);
builder.startArray(Fields.TERMS);
for (ByteEntry entry : entries) {
@ -254,7 +233,6 @@ public class InternalByteTermsFacet extends InternalTermsFacet {
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
fieldName = in.readUTF();
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
missing = in.readVLong();
@ -268,7 +246,6 @@ public class InternalByteTermsFacet extends InternalTermsFacet {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(fieldName);
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);

View File

@ -56,8 +56,6 @@ public class TermsByteFacetCollector extends AbstractFacetCollector {
private final FieldDataCache fieldDataCache;
private final String fieldName;
private final String indexFieldName;
private final TermsFacet.ComparatorType comparatorType;
@ -82,8 +80,6 @@ public class TermsByteFacetCollector extends AbstractFacetCollector {
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.fieldName = fieldName;
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new ElasticSearchIllegalArgumentException("Field [" + fieldName + "] doesn't have a type, can't run terms short facet collector on it");
@ -140,7 +136,7 @@ public class TermsByteFacetCollector extends AbstractFacetCollector {
TByteIntHashMap facets = aggregator.facets();
if (facets.isEmpty()) {
pushFacets(facets);
return new InternalByteTermsFacet(facetName, fieldName, comparatorType, size, ImmutableList.<InternalByteTermsFacet.ByteEntry>of(), aggregator.missing());
return new InternalByteTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalByteTermsFacet.ByteEntry>of(), aggregator.missing());
} else {
// we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards
BoundedTreeSet<InternalByteTermsFacet.ByteEntry> ordered = new BoundedTreeSet<InternalByteTermsFacet.ByteEntry>(comparatorType.comparator(), size * numberOfShards);
@ -149,7 +145,7 @@ public class TermsByteFacetCollector extends AbstractFacetCollector {
ordered.add(new InternalByteTermsFacet.ByteEntry(it.key(), it.value()));
}
pushFacets(facets);
return new InternalByteTermsFacet(facetName, fieldName, comparatorType, size, ordered, aggregator.missing());
return new InternalByteTermsFacet(facetName, comparatorType, size, ordered, aggregator.missing());
}
}

View File

@ -111,22 +111,19 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
private String name;
private String fieldName;
int requiredSize;
long missing;
Collection<DoubleEntry> entries = ImmutableList.of();
private ComparatorType comparatorType;
ComparatorType comparatorType;
InternalDoubleTermsFacet() {
}
public InternalDoubleTermsFacet(String name, String fieldName, ComparatorType comparatorType, int requiredSize, Collection<DoubleEntry> entries, long missing) {
public InternalDoubleTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<DoubleEntry> entries, long missing) {
this.name = name;
this.fieldName = fieldName;
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
@ -141,14 +138,6 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
return this.name;
}
@Override public String fieldName() {
return this.fieldName;
}
@Override public String getFieldName() {
return fieldName();
}
@Override public String type() {
return TYPE;
}
@ -157,14 +146,6 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
return type();
}
@Override public ComparatorType comparatorType() {
return comparatorType;
}
@Override public ComparatorType getComparatorType() {
return comparatorType();
}
@Override public List<DoubleEntry> entries() {
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
@ -211,7 +192,7 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
}
}
BoundedTreeSet<DoubleEntry> ordered = new BoundedTreeSet<DoubleEntry>(first.comparatorType().comparator(), first.requiredSize);
BoundedTreeSet<DoubleEntry> ordered = new BoundedTreeSet<DoubleEntry>(first.comparatorType.comparator(), first.requiredSize);
for (TDoubleIntIterator it = aggregated.iterator(); it.hasNext();) {
it.advance();
ordered.add(new DoubleEntry(it.key(), it.value()));
@ -223,7 +204,6 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _FIELD = new XContentBuilderString("_field");
static final XContentBuilderString MISSING = new XContentBuilderString("missing");
static final XContentBuilderString TERMS = new XContentBuilderString("terms");
static final XContentBuilderString TERM = new XContentBuilderString("term");
@ -233,7 +213,6 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, TermsFacet.TYPE);
builder.field(Fields._FIELD, fieldName);
builder.field(Fields.MISSING, missing);
builder.startArray(Fields.TERMS);
for (DoubleEntry entry : entries) {
@ -255,7 +234,6 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
fieldName = in.readUTF();
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
missing = in.readVLong();
@ -269,7 +247,6 @@ public class InternalDoubleTermsFacet extends InternalTermsFacet {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(fieldName);
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);
out.writeVLong(missing);

View File

@ -56,8 +56,6 @@ public class TermsDoubleFacetCollector extends AbstractFacetCollector {
private final FieldDataCache fieldDataCache;
private final String fieldName;
private final String indexFieldName;
private final TermsFacet.ComparatorType comparatorType;
@ -82,8 +80,6 @@ public class TermsDoubleFacetCollector extends AbstractFacetCollector {
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.fieldName = fieldName;
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new ElasticSearchIllegalArgumentException("Field [" + fieldName + "] doesn't have a type, can't run terms double facet collector on it");
@ -140,7 +136,7 @@ public class TermsDoubleFacetCollector extends AbstractFacetCollector {
TDoubleIntHashMap facets = aggregator.facets();
if (facets.isEmpty()) {
pushFacets(facets);
return new InternalDoubleTermsFacet(facetName, fieldName, comparatorType, size, ImmutableList.<InternalDoubleTermsFacet.DoubleEntry>of(), aggregator.missing());
return new InternalDoubleTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalDoubleTermsFacet.DoubleEntry>of(), aggregator.missing());
} else {
// we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards
BoundedTreeSet<InternalDoubleTermsFacet.DoubleEntry> ordered = new BoundedTreeSet<InternalDoubleTermsFacet.DoubleEntry>(comparatorType.comparator(), size * numberOfShards);
@ -149,7 +145,7 @@ public class TermsDoubleFacetCollector extends AbstractFacetCollector {
ordered.add(new InternalDoubleTermsFacet.DoubleEntry(it.key(), it.value()));
}
pushFacets(facets);
return new InternalDoubleTermsFacet(facetName, fieldName, comparatorType, size, ordered, aggregator.missing());
return new InternalDoubleTermsFacet(facetName, comparatorType, size, ordered, aggregator.missing());
}
}

View File

@ -111,22 +111,19 @@ public class InternalFloatTermsFacet extends InternalTermsFacet {
private String name;
private String fieldName;
int requiredSize;
long missing;
Collection<FloatEntry> entries = ImmutableList.of();
private ComparatorType comparatorType;
ComparatorType comparatorType;
InternalFloatTermsFacet() {
}
public InternalFloatTermsFacet(String name, String fieldName, ComparatorType comparatorType, int requiredSize, Collection<FloatEntry> entries, long missing) {
public InternalFloatTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<FloatEntry> entries, long missing) {
this.name = name;
this.fieldName = fieldName;
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
@ -141,14 +138,6 @@ public class InternalFloatTermsFacet extends InternalTermsFacet {
return this.name;
}
@Override public String fieldName() {
return this.fieldName;
}
@Override public String getFieldName() {
return fieldName();
}
@Override public String type() {
return TYPE;
}
@ -157,14 +146,6 @@ public class InternalFloatTermsFacet extends InternalTermsFacet {
return type();
}
@Override public ComparatorType comparatorType() {
return comparatorType;
}
@Override public ComparatorType getComparatorType() {
return comparatorType();
}
@Override public List<FloatEntry> entries() {
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
@ -212,7 +193,7 @@ public class InternalFloatTermsFacet extends InternalTermsFacet {
}
}
BoundedTreeSet<FloatEntry> ordered = new BoundedTreeSet<FloatEntry>(first.comparatorType().comparator(), first.requiredSize);
BoundedTreeSet<FloatEntry> ordered = new BoundedTreeSet<FloatEntry>(first.comparatorType.comparator(), first.requiredSize);
for (TFloatIntIterator it = aggregated.iterator(); it.hasNext();) {
it.advance();
ordered.add(new FloatEntry(it.key(), it.value()));
@ -224,7 +205,6 @@ public class InternalFloatTermsFacet extends InternalTermsFacet {
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _FIELD = new XContentBuilderString("_field");
static final XContentBuilderString MISSING = new XContentBuilderString("missing");
static final XContentBuilderString TERMS = new XContentBuilderString("terms");
static final XContentBuilderString TERM = new XContentBuilderString("term");
@ -234,7 +214,6 @@ public class InternalFloatTermsFacet extends InternalTermsFacet {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, TermsFacet.TYPE);
builder.field(Fields._FIELD, fieldName);
builder.field(Fields.MISSING, missing);
builder.startArray(Fields.TERMS);
for (FloatEntry entry : entries) {
@ -256,7 +235,6 @@ public class InternalFloatTermsFacet extends InternalTermsFacet {
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
fieldName = in.readUTF();
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
missing = in.readVLong();
@ -270,7 +248,6 @@ public class InternalFloatTermsFacet extends InternalTermsFacet {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(fieldName);
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);
out.writeVLong(missing);

View File

@ -56,8 +56,6 @@ public class TermsFloatFacetCollector extends AbstractFacetCollector {
private final FieldDataCache fieldDataCache;
private final String fieldName;
private final String indexFieldName;
private final TermsFacet.ComparatorType comparatorType;
@ -82,8 +80,6 @@ public class TermsFloatFacetCollector extends AbstractFacetCollector {
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.fieldName = fieldName;
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new ElasticSearchIllegalArgumentException("Field [" + fieldName + "] doesn't have a type, can't run terms float facet collector on it");
@ -140,7 +136,7 @@ public class TermsFloatFacetCollector extends AbstractFacetCollector {
TFloatIntHashMap facets = aggregator.facets();
if (facets.isEmpty()) {
pushFacets(facets);
return new InternalFloatTermsFacet(facetName, fieldName, comparatorType, size, ImmutableList.<InternalFloatTermsFacet.FloatEntry>of(), aggregator.missing());
return new InternalFloatTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalFloatTermsFacet.FloatEntry>of(), aggregator.missing());
} else {
// we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards
BoundedTreeSet<InternalFloatTermsFacet.FloatEntry> ordered = new BoundedTreeSet<InternalFloatTermsFacet.FloatEntry>(comparatorType.comparator(), size * numberOfShards);
@ -149,7 +145,7 @@ public class TermsFloatFacetCollector extends AbstractFacetCollector {
ordered.add(new InternalFloatTermsFacet.FloatEntry(it.key(), it.value()));
}
pushFacets(facets);
return new InternalFloatTermsFacet(facetName, fieldName, comparatorType, size, ordered, aggregator.missing());
return new InternalFloatTermsFacet(facetName, comparatorType, size, ordered, aggregator.missing());
}
}

View File

@ -56,6 +56,6 @@ public class IndexNameFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalStringTermsFacet(facetName, "_index", comparatorType, size, Sets.newHashSet(new InternalStringTermsFacet.StringEntry(indexName, count)), 0);
return new InternalStringTermsFacet(facetName, comparatorType, size, Sets.newHashSet(new InternalStringTermsFacet.StringEntry(indexName, count)), 0);
}
}

View File

@ -108,22 +108,19 @@ public class InternalIntTermsFacet extends InternalTermsFacet {
private String name;
private String fieldName;
int requiredSize;
long missing;
Collection<IntEntry> entries = ImmutableList.of();
private ComparatorType comparatorType;
ComparatorType comparatorType;
InternalIntTermsFacet() {
}
public InternalIntTermsFacet(String name, String fieldName, ComparatorType comparatorType, int requiredSize, Collection<IntEntry> entries, long missing) {
public InternalIntTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<IntEntry> entries, long missing) {
this.name = name;
this.fieldName = fieldName;
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
@ -138,14 +135,6 @@ public class InternalIntTermsFacet extends InternalTermsFacet {
return this.name;
}
@Override public String fieldName() {
return this.fieldName;
}
@Override public String getFieldName() {
return fieldName();
}
@Override public String type() {
return TYPE;
}
@ -154,14 +143,6 @@ public class InternalIntTermsFacet extends InternalTermsFacet {
return type();
}
@Override public ComparatorType comparatorType() {
return comparatorType;
}
@Override public ComparatorType getComparatorType() {
return comparatorType();
}
@Override public List<IntEntry> entries() {
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
@ -209,7 +190,7 @@ public class InternalIntTermsFacet extends InternalTermsFacet {
}
}
BoundedTreeSet<IntEntry> ordered = new BoundedTreeSet<IntEntry>(first.comparatorType().comparator(), first.requiredSize);
BoundedTreeSet<IntEntry> ordered = new BoundedTreeSet<IntEntry>(first.comparatorType.comparator(), first.requiredSize);
for (TIntIntIterator it = aggregated.iterator(); it.hasNext();) {
it.advance();
ordered.add(new IntEntry(it.key(), it.value()));
@ -221,7 +202,6 @@ public class InternalIntTermsFacet extends InternalTermsFacet {
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _FIELD = new XContentBuilderString("_field");
static final XContentBuilderString MISSING = new XContentBuilderString("missing");
static final XContentBuilderString TERMS = new XContentBuilderString("terms");
static final XContentBuilderString TERM = new XContentBuilderString("term");
@ -231,7 +211,6 @@ public class InternalIntTermsFacet extends InternalTermsFacet {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, TermsFacet.TYPE);
builder.field(Fields._FIELD, fieldName);
builder.field(Fields.MISSING, missing);
builder.startArray(Fields.TERMS);
for (IntEntry entry : entries) {
@ -253,7 +232,6 @@ public class InternalIntTermsFacet extends InternalTermsFacet {
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
fieldName = in.readUTF();
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
missing = in.readVLong();
@ -267,7 +245,6 @@ public class InternalIntTermsFacet extends InternalTermsFacet {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(fieldName);
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);
out.writeVLong(missing);

View File

@ -56,8 +56,6 @@ public class TermsIntFacetCollector extends AbstractFacetCollector {
private final FieldDataCache fieldDataCache;
private final String fieldName;
private final String indexFieldName;
private final TermsFacet.ComparatorType comparatorType;
@ -82,8 +80,6 @@ public class TermsIntFacetCollector extends AbstractFacetCollector {
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.fieldName = fieldName;
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new ElasticSearchIllegalArgumentException("Field [" + fieldName + "] doesn't have a type, can't run terms int facet collector on it");
@ -140,7 +136,7 @@ public class TermsIntFacetCollector extends AbstractFacetCollector {
TIntIntHashMap facets = aggregator.facets();
if (facets.isEmpty()) {
pushFacets(facets);
return new InternalIntTermsFacet(facetName, fieldName, comparatorType, size, ImmutableList.<InternalIntTermsFacet.IntEntry>of(), aggregator.missing());
return new InternalIntTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalIntTermsFacet.IntEntry>of(), aggregator.missing());
} else {
// we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards
BoundedTreeSet<InternalIntTermsFacet.IntEntry> ordered = new BoundedTreeSet<InternalIntTermsFacet.IntEntry>(comparatorType.comparator(), size * numberOfShards);
@ -149,7 +145,7 @@ public class TermsIntFacetCollector extends AbstractFacetCollector {
ordered.add(new InternalIntTermsFacet.IntEntry(it.key(), it.value()));
}
pushFacets(facets);
return new InternalIntTermsFacet(facetName, fieldName, comparatorType, size, ordered, aggregator.missing());
return new InternalIntTermsFacet(facetName, comparatorType, size, ordered, aggregator.missing());
}
}

View File

@ -111,22 +111,19 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
private String name;
private String fieldName;
int requiredSize;
long missing;
Collection<LongEntry> entries = ImmutableList.of();
private ComparatorType comparatorType;
ComparatorType comparatorType;
InternalLongTermsFacet() {
}
public InternalLongTermsFacet(String name, String fieldName, ComparatorType comparatorType, int requiredSize, Collection<LongEntry> entries, long missing) {
public InternalLongTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<LongEntry> entries, long missing) {
this.name = name;
this.fieldName = fieldName;
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
@ -141,14 +138,6 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
return this.name;
}
@Override public String fieldName() {
return this.fieldName;
}
@Override public String getFieldName() {
return fieldName();
}
@Override public String type() {
return TYPE;
}
@ -157,14 +146,6 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
return type();
}
@Override public ComparatorType comparatorType() {
return comparatorType;
}
@Override public ComparatorType getComparatorType() {
return comparatorType();
}
@Override public List<LongEntry> entries() {
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
@ -212,7 +193,7 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
}
}
BoundedTreeSet<LongEntry> ordered = new BoundedTreeSet<LongEntry>(first.comparatorType().comparator(), first.requiredSize);
BoundedTreeSet<LongEntry> ordered = new BoundedTreeSet<LongEntry>(first.comparatorType.comparator(), first.requiredSize);
for (TLongIntIterator it = aggregated.iterator(); it.hasNext();) {
it.advance();
ordered.add(new LongEntry(it.key(), it.value()));
@ -224,7 +205,6 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _FIELD = new XContentBuilderString("_field");
static final XContentBuilderString MISSING = new XContentBuilderString("missing");
static final XContentBuilderString TERMS = new XContentBuilderString("terms");
static final XContentBuilderString TERM = new XContentBuilderString("term");
@ -234,7 +214,6 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, TermsFacet.TYPE);
builder.field(Fields._FIELD, fieldName);
builder.field(Fields.MISSING, missing);
builder.startArray(Fields.TERMS);
for (LongEntry entry : entries) {
@ -256,7 +235,6 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
fieldName = in.readUTF();
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
missing = in.readVLong();
@ -270,7 +248,6 @@ public class InternalLongTermsFacet extends InternalTermsFacet {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(fieldName);
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);
out.writeVLong(missing);

View File

@ -57,8 +57,6 @@ public class TermsLongFacetCollector extends AbstractFacetCollector {
private final FieldDataCache fieldDataCache;
private final String fieldName;
private final String indexFieldName;
private final TermsFacet.ComparatorType comparatorType;
@ -83,8 +81,6 @@ public class TermsLongFacetCollector extends AbstractFacetCollector {
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.fieldName = fieldName;
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new ElasticSearchIllegalArgumentException("Field [" + fieldName + "] doesn't have a type, can't run terms long facet collector on it");
@ -141,7 +137,7 @@ public class TermsLongFacetCollector extends AbstractFacetCollector {
TLongIntHashMap facets = aggregator.facets();
if (facets.isEmpty()) {
pushFacets(facets);
return new InternalLongTermsFacet(facetName, fieldName, comparatorType, size, ImmutableList.<InternalLongTermsFacet.LongEntry>of(), aggregator.missing());
return new InternalLongTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalLongTermsFacet.LongEntry>of(), aggregator.missing());
} else {
// we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards
BoundedTreeSet<InternalLongTermsFacet.LongEntry> ordered = new BoundedTreeSet<InternalLongTermsFacet.LongEntry>(comparatorType.comparator(), size * numberOfShards);
@ -150,7 +146,7 @@ public class TermsLongFacetCollector extends AbstractFacetCollector {
ordered.add(new InternalLongTermsFacet.LongEntry(it.key(), it.value()));
}
pushFacets(facets);
return new InternalLongTermsFacet(facetName, fieldName, comparatorType, size, ordered, aggregator.missing());
return new InternalLongTermsFacet(facetName, comparatorType, size, ordered, aggregator.missing());
}
}

View File

@ -108,22 +108,19 @@ public class InternalShortTermsFacet extends InternalTermsFacet {
private String name;
private String fieldName;
int requiredSize;
long missing;
Collection<ShortEntry> entries = ImmutableList.of();
private ComparatorType comparatorType;
ComparatorType comparatorType;
InternalShortTermsFacet() {
}
public InternalShortTermsFacet(String name, String fieldName, ComparatorType comparatorType, int requiredSize, Collection<ShortEntry> entries, long missing) {
public InternalShortTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<ShortEntry> entries, long missing) {
this.name = name;
this.fieldName = fieldName;
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
@ -138,14 +135,6 @@ public class InternalShortTermsFacet extends InternalTermsFacet {
return this.name;
}
@Override public String fieldName() {
return this.fieldName;
}
@Override public String getFieldName() {
return fieldName();
}
@Override public String type() {
return TYPE;
}
@ -154,14 +143,6 @@ public class InternalShortTermsFacet extends InternalTermsFacet {
return type();
}
@Override public ComparatorType comparatorType() {
return comparatorType;
}
@Override public ComparatorType getComparatorType() {
return comparatorType();
}
@Override public List<ShortEntry> entries() {
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
@ -208,7 +189,7 @@ public class InternalShortTermsFacet extends InternalTermsFacet {
}
}
BoundedTreeSet<ShortEntry> ordered = new BoundedTreeSet<ShortEntry>(first.comparatorType().comparator(), first.requiredSize);
BoundedTreeSet<ShortEntry> ordered = new BoundedTreeSet<ShortEntry>(first.comparatorType.comparator(), first.requiredSize);
for (TShortIntIterator it = aggregated.iterator(); it.hasNext();) {
it.advance();
ordered.add(new ShortEntry(it.key(), it.value()));
@ -220,7 +201,6 @@ public class InternalShortTermsFacet extends InternalTermsFacet {
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _FIELD = new XContentBuilderString("_field");
static final XContentBuilderString MISSING = new XContentBuilderString("missing");
static final XContentBuilderString TERMS = new XContentBuilderString("terms");
static final XContentBuilderString TERM = new XContentBuilderString("term");
@ -230,7 +210,6 @@ public class InternalShortTermsFacet extends InternalTermsFacet {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, TermsFacet.TYPE);
builder.field(Fields._FIELD, fieldName);
builder.field(Fields.MISSING, missing);
builder.startArray(Fields.TERMS);
for (ShortEntry entry : entries) {
@ -252,7 +231,6 @@ public class InternalShortTermsFacet extends InternalTermsFacet {
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
fieldName = in.readUTF();
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
missing = in.readVLong();
@ -266,7 +244,6 @@ public class InternalShortTermsFacet extends InternalTermsFacet {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(fieldName);
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);
out.writeVLong(missing);

View File

@ -56,8 +56,6 @@ public class TermsShortFacetCollector extends AbstractFacetCollector {
private final FieldDataCache fieldDataCache;
private final String fieldName;
private final String indexFieldName;
private final TermsFacet.ComparatorType comparatorType;
@ -82,8 +80,6 @@ public class TermsShortFacetCollector extends AbstractFacetCollector {
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.fieldName = fieldName;
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new ElasticSearchIllegalArgumentException("Field [" + fieldName + "] doesn't have a type, can't run terms short facet collector on it");
@ -140,7 +136,7 @@ public class TermsShortFacetCollector extends AbstractFacetCollector {
TShortIntHashMap facets = aggregator.facets();
if (facets.isEmpty()) {
pushFacets(facets);
return new InternalShortTermsFacet(facetName, fieldName, comparatorType, size, ImmutableList.<InternalShortTermsFacet.ShortEntry>of(), aggregator.missing());
return new InternalShortTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalShortTermsFacet.ShortEntry>of(), aggregator.missing());
} else {
// we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards
BoundedTreeSet<InternalShortTermsFacet.ShortEntry> ordered = new BoundedTreeSet<InternalShortTermsFacet.ShortEntry>(comparatorType.comparator(), size * numberOfShards);
@ -149,7 +145,7 @@ public class TermsShortFacetCollector extends AbstractFacetCollector {
ordered.add(new InternalShortTermsFacet.ShortEntry(it.key(), it.value()));
}
pushFacets(facets);
return new InternalShortTermsFacet(facetName, fieldName, comparatorType, size, ordered, aggregator.missing());
return new InternalShortTermsFacet(facetName, comparatorType, size, ordered, aggregator.missing());
}
}

View File

@ -41,8 +41,6 @@ import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.elasticsearch.common.Strings.*;
/**
* @author kimchy (shay.banon)
*/
@ -50,8 +48,6 @@ public class FieldsTermsStringFacetCollector extends AbstractFacetCollector {
private final FieldDataCache fieldDataCache;
private final String[] fieldsNames;
private final String[] indexFieldsNames;
private final InternalStringTermsFacet.ComparatorType comparatorType;
@ -76,8 +72,6 @@ public class FieldsTermsStringFacetCollector extends AbstractFacetCollector {
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.fieldsNames = fieldsNames;
fieldsDataType = new FieldDataType[fieldsNames.length];
fieldsData = new FieldData[fieldsNames.length];
indexFieldsNames = new String[fieldsNames.length];
@ -121,7 +115,7 @@ public class FieldsTermsStringFacetCollector extends AbstractFacetCollector {
}
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
for (int i = 0; i < fieldsNames.length; i++) {
for (int i = 0; i < indexFieldsNames.length; i++) {
fieldsData[i] = fieldDataCache.cache(fieldsDataType[i], reader, indexFieldsNames[i]);
}
if (script != null) {
@ -139,7 +133,7 @@ public class FieldsTermsStringFacetCollector extends AbstractFacetCollector {
TObjectIntHashMap<String> facets = aggregator.facets();
if (facets.isEmpty()) {
TermsStringFacetCollector.pushFacets(facets);
return new InternalStringTermsFacet(facetName, arrayToCommaDelimitedString(fieldsNames), comparatorType, size, ImmutableList.<InternalStringTermsFacet.StringEntry>of(), aggregator.missing());
return new InternalStringTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalStringTermsFacet.StringEntry>of(), aggregator.missing());
} else {
// we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards
BoundedTreeSet<InternalStringTermsFacet.StringEntry> ordered = new BoundedTreeSet<InternalStringTermsFacet.StringEntry>(comparatorType.comparator(), size * numberOfShards);
@ -148,7 +142,7 @@ public class FieldsTermsStringFacetCollector extends AbstractFacetCollector {
ordered.add(new InternalStringTermsFacet.StringEntry(it.key(), it.value()));
}
TermsStringFacetCollector.pushFacets(facets);
return new InternalStringTermsFacet(facetName, arrayToCommaDelimitedString(fieldsNames), comparatorType, size, ordered, aggregator.missing());
return new InternalStringTermsFacet(facetName, comparatorType, size, ordered, aggregator.missing());
}
}

View File

@ -107,22 +107,19 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
private String name;
private String fieldName;
int requiredSize;
long missing;
Collection<StringEntry> entries = ImmutableList.of();
private ComparatorType comparatorType;
ComparatorType comparatorType;
InternalStringTermsFacet() {
}
public InternalStringTermsFacet(String name, String fieldName, ComparatorType comparatorType, int requiredSize, Collection<StringEntry> entries, long missing) {
public InternalStringTermsFacet(String name, ComparatorType comparatorType, int requiredSize, Collection<StringEntry> entries, long missing) {
this.name = name;
this.fieldName = fieldName;
this.comparatorType = comparatorType;
this.requiredSize = requiredSize;
this.entries = entries;
@ -137,14 +134,6 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
return this.name;
}
@Override public String fieldName() {
return this.fieldName;
}
@Override public String getFieldName() {
return fieldName();
}
@Override public String type() {
return TYPE;
}
@ -153,14 +142,6 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
return type();
}
@Override public ComparatorType comparatorType() {
return comparatorType;
}
@Override public ComparatorType getComparatorType() {
return comparatorType();
}
@Override public List<StringEntry> entries() {
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
@ -207,7 +188,7 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
}
}
BoundedTreeSet<StringEntry> ordered = new BoundedTreeSet<StringEntry>(first.comparatorType().comparator(), first.requiredSize);
BoundedTreeSet<StringEntry> ordered = new BoundedTreeSet<StringEntry>(first.comparatorType.comparator(), first.requiredSize);
for (TObjectIntIterator<String> it = aggregated.iterator(); it.hasNext();) {
it.advance();
ordered.add(new StringEntry(it.key(), it.value()));
@ -219,7 +200,6 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _FIELD = new XContentBuilderString("_field");
static final XContentBuilderString MISSING = new XContentBuilderString("missing");
static final XContentBuilderString TERMS = new XContentBuilderString("terms");
static final XContentBuilderString TERM = new XContentBuilderString("term");
@ -229,7 +209,6 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, TermsFacet.TYPE);
builder.field(Fields._FIELD, fieldName);
builder.field(Fields.MISSING, missing);
builder.startArray(Fields.TERMS);
for (Entry entry : entries) {
@ -251,7 +230,6 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
fieldName = in.readUTF();
comparatorType = ComparatorType.fromId(in.readByte());
requiredSize = in.readVInt();
missing = in.readVLong();
@ -265,7 +243,6 @@ public class InternalStringTermsFacet extends InternalTermsFacet {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(fieldName);
out.writeByte(comparatorType.id());
out.writeVInt(requiredSize);
out.writeVLong(missing);

View File

@ -46,8 +46,6 @@ public class ScriptTermsStringFieldFacetCollector extends AbstractFacetCollector
private final int numberOfShards;
private final String sScript;
private final SearchScript script;
private final Matcher matcher;
@ -64,7 +62,6 @@ public class ScriptTermsStringFieldFacetCollector extends AbstractFacetCollector
this.size = size;
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.sScript = script;
this.script = new SearchScript(context.lookup(), scriptLang, script, params, context.scriptService());
this.excluded = excluded;
@ -130,7 +127,7 @@ public class ScriptTermsStringFieldFacetCollector extends AbstractFacetCollector
@Override public Facet facet() {
if (facets.isEmpty()) {
TermsStringFacetCollector.pushFacets(facets);
return new InternalStringTermsFacet(facetName, sScript, comparatorType, size, ImmutableList.<InternalStringTermsFacet.StringEntry>of(), missing);
return new InternalStringTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalStringTermsFacet.StringEntry>of(), missing);
} else {
// we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards
BoundedTreeSet<InternalStringTermsFacet.StringEntry> ordered = new BoundedTreeSet<InternalStringTermsFacet.StringEntry>(comparatorType.comparator(), size * numberOfShards);
@ -139,7 +136,7 @@ public class ScriptTermsStringFieldFacetCollector extends AbstractFacetCollector
ordered.add(new InternalStringTermsFacet.StringEntry(it.key(), it.value()));
}
TermsStringFacetCollector.pushFacets(facets);
return new InternalStringTermsFacet(facetName, sScript, comparatorType, size, ordered, missing);
return new InternalStringTermsFacet(facetName, comparatorType, size, ordered, missing);
}
}
}

View File

@ -59,8 +59,6 @@ public class TermsStringFacetCollector extends AbstractFacetCollector {
private final FieldDataCache fieldDataCache;
private final String fieldName;
private final String indexFieldName;
private final TermsFacet.ComparatorType comparatorType;
@ -85,8 +83,6 @@ public class TermsStringFacetCollector extends AbstractFacetCollector {
this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards();
this.fieldName = fieldName;
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
this.indexFieldName = fieldName;
@ -140,7 +136,7 @@ public class TermsStringFacetCollector extends AbstractFacetCollector {
TObjectIntHashMap<String> facets = aggregator.facets();
if (facets.isEmpty()) {
pushFacets(facets);
return new InternalStringTermsFacet(facetName, fieldName, comparatorType, size, ImmutableList.<InternalStringTermsFacet.StringEntry>of(), aggregator.missing());
return new InternalStringTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalStringTermsFacet.StringEntry>of(), aggregator.missing());
} else {
// we need to fetch facets of "size * numberOfShards" because of problems in how they are distributed across shards
BoundedTreeSet<InternalStringTermsFacet.StringEntry> ordered = new BoundedTreeSet<InternalStringTermsFacet.StringEntry>(comparatorType.comparator(), size * numberOfShards);
@ -149,7 +145,7 @@ public class TermsStringFacetCollector extends AbstractFacetCollector {
ordered.add(new InternalStringTermsFacet.StringEntry(it.key(), it.value()));
}
pushFacets(facets);
return new InternalStringTermsFacet(facetName, fieldName, comparatorType, size, ordered, aggregator.missing());
return new InternalStringTermsFacet(facetName, comparatorType, size, ordered, aggregator.missing());
}
}

View File

@ -132,8 +132,6 @@ public class GeoDistanceFacetTests extends AbstractNodesTests {
assertThat(searchResponse.hits().totalHits(), equalTo(7l));
GeoDistanceFacet facet = searchResponse.facets().facet("geo1");
assertThat(facet.fieldName(), equalTo("location"));
assertThat(facet.unit(), equalTo(DistanceUnit.KILOMETERS));
assertThat(facet.entries().size(), equalTo(4));
assertThat(facet.entries().get(0).to(), closeTo(2, 0.000001));
@ -167,8 +165,6 @@ public class GeoDistanceFacetTests extends AbstractNodesTests {
assertThat(searchResponse.hits().totalHits(), equalTo(7l));
facet = searchResponse.facets().facet("geo1");
assertThat(facet.fieldName(), equalTo("location"));
assertThat(facet.unit(), equalTo(DistanceUnit.KILOMETERS));
assertThat(facet.entries().size(), equalTo(4));
assertThat(facet.entries().get(0).to(), closeTo(2, 0.000001));
@ -201,8 +197,6 @@ public class GeoDistanceFacetTests extends AbstractNodesTests {
assertThat(searchResponse.hits().totalHits(), equalTo(7l));
facet = searchResponse.facets().facet("geo1");
assertThat(facet.fieldName(), equalTo("location"));
assertThat(facet.unit(), equalTo(DistanceUnit.KILOMETERS));
assertThat(facet.entries().size(), equalTo(4));
assertThat(facet.entries().get(0).to(), closeTo(2, 0.000001));