Histogram Facet: Improve perf / memory by defaulting to just providing counts with no totals, closes #587.

This commit is contained in:
kimchy 2011-01-01 17:27:55 +02:00
parent 45b93e2781
commit aec720218d
13 changed files with 850 additions and 296 deletions

View File

@ -143,6 +143,19 @@ public class HistogramFacetSearchBenchmark {
}
System.out.println("--> Histogram Facet (l_value) " + (totalQueryTime / QUERY_COUNT) + "ms");
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setQuery(matchAllQuery())
.addFacet(histogramFacet("l_value").field("l_value").valueField("l_value").interval(4))
.execute().actionGet();
if (searchResponse.hits().totalHits() != COUNT) {
System.err.println("--> mismatch on hits");
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> Histogram Facet (l_value/l_value) " + (totalQueryTime / QUERY_COUNT) + "ms");
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
@ -156,6 +169,19 @@ public class HistogramFacetSearchBenchmark {
}
System.out.println("--> Histogram Facet (date) " + (totalQueryTime / QUERY_COUNT) + "ms");
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setQuery(matchAllQuery())
.addFacet(histogramFacet("date").field("date").valueField("l_value").interval(1000))
.execute().actionGet();
if (searchResponse.hits().totalHits() != COUNT) {
System.err.println("--> mismatch on hits");
}
totalQueryTime += searchResponse.tookInMillis();
}
System.out.println("--> Histogram Facet (date/l_value) " + (totalQueryTime / QUERY_COUNT) + "ms");
clientNode.close();

View File

@ -40,7 +40,7 @@ import java.io.IOException;
*
* @author kimchy (shay.banon)
*/
public class HistogramFacetCollector extends AbstractFacetCollector {
public class CountAndTotalHistogramFacetCollector extends AbstractFacetCollector {
private final String fieldName;
@ -58,7 +58,7 @@ public class HistogramFacetCollector extends AbstractFacetCollector {
private final HistogramProc histoProc;
public HistogramFacetCollector(String facetName, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
public CountAndTotalHistogramFacetCollector(String facetName, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.fieldName = fieldName;
this.interval = interval;
@ -92,7 +92,7 @@ public class HistogramFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts(), histoProc.totals());
return new InternalCountAndTotalHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts(), histoProc.totals());
}
public static long bucket(double value, long interval) {

View File

@ -0,0 +1,120 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
* A histogram facet collector that uses the same field as the key as well as the
* value.
*
* @author kimchy (shay.banon)
*/
public class CountHistogramFacetCollector extends AbstractFacetCollector {
private final String fieldName;
private final String indexFieldName;
private final long interval;
private final HistogramFacet.ComparatorType comparatorType;
private final FieldDataCache fieldDataCache;
private final FieldDataType fieldDataType;
private NumericFieldData fieldData;
private final HistogramProc histoProc;
public CountHistogramFacetCollector(String facetName, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.fieldName = fieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.fieldDataCache = context.fieldDataCache();
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
}
// add type filter if there is exact doc mapper associated with it
if (smartMappers.hasDocMapper()) {
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
}
FieldMapper mapper = smartMappers.mapper();
indexFieldName = mapper.names().indexName();
fieldDataType = mapper.fieldDataType();
histoProc = new HistogramProc(interval);
}
@Override protected void doCollect(int doc) throws IOException {
fieldData.forEachValueInDoc(doc, histoProc);
}
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, indexFieldName);
}
@Override public Facet facet() {
return new InternalCountHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts());
}
public static long bucket(double value, long interval) {
return (((long) (value / interval)) * interval);
}
public static class HistogramProc implements NumericFieldData.DoubleValueInDocProc {
private final long interval;
private final TLongLongHashMap counts = new TLongLongHashMap();
public HistogramProc(long interval) {
this.interval = interval;
}
@Override public void onValue(int docId, double value) {
long bucket = bucket(value, interval);
counts.adjustOrPutValue(bucket, 1, 1);
}
public TLongLongHashMap counts() {
return counts;
}
}
}

View File

@ -60,12 +60,12 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
/**
* An ordered list of histogram facet entries.
*/
List<Entry> entries();
List<? extends Entry> entries();
/**
* An ordered list of histogram facet entries.
*/
List<Entry> getEntries();
List<? extends Entry> getEntries();
public static enum ComparatorType {
KEY((byte) 0, "key", new Comparator<Entry>() {
@ -134,75 +134,46 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
}
}
/**
* A histogram entry representing a single entry within the result of a histogram facet.
*/
public class Entry {
private final long key;
private final long count;
private final double total;
public Entry(long key, long count, double total) {
this.key = key;
this.count = count;
this.total = total;
}
public interface Entry {
/**
* The key value of the histogram.
*/
public long key() {
return key;
}
long key();
/**
* The key value of the histogram.
*/
public long getKey() {
return key();
}
long getKey();
/**
* The number of hits that fall within that key "range" or "interval".
*/
public long count() {
return count;
}
long count();
/**
* The number of hits that fall within that key "range" or "interval".
*/
public long getCount() {
return count();
}
long getCount();
/**
* The sum / total of the value field that fall within this key "interval".
*/
public double total() {
return total;
}
double total();
/**
* The sum / total of the value field that fall within this key "interval".
*/
public double getTotal() {
return total();
}
double getTotal();
/**
* The mean of this facet interval.
*/
public double mean() {
return total / count;
}
double mean();
/**
* The mean of this facet interval.
*/
public double getMean() {
return mean();
}
double getMean();
}
}

View File

@ -53,7 +53,6 @@ public class HistogramFacetBuilder extends AbstractFacetBuilder {
*/
public HistogramFacetBuilder field(String field) {
this.keyFieldName = field;
this.valueFieldName = field;
return this;
}
@ -131,7 +130,7 @@ public class HistogramFacetBuilder extends AbstractFacetBuilder {
builder.startObject(name);
builder.startObject(HistogramFacet.TYPE);
if (valueFieldName != null && !keyFieldName.equals(valueFieldName)) {
if (valueFieldName != null) {
builder.field("key_field", keyFieldName);
builder.field("value_field", valueFieldName);
} else {

View File

@ -22,10 +22,6 @@ package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongDoubleIterator;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.TLongLongIterator;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.facet.Facet;
@ -107,8 +103,10 @@ public class HistogramFacetProcessor extends AbstractComponent implements FacetP
if (valueScript != null) {
return new KeyValueScriptHistogramFacetCollector(facetName, keyField, scriptLang, valueScript, params, interval, comparatorType, context);
} else if (valueField == null || keyField.equals(valueField)) {
return new HistogramFacetCollector(facetName, keyField, interval, comparatorType, context);
} else if (valueField == null) {
return new CountHistogramFacetCollector(facetName, keyField, interval, comparatorType, context);
} else if (keyField.equals(valueField)) {
return new CountAndTotalHistogramFacetCollector(facetName, keyField, interval, comparatorType, context);
} else {
// we have a value field, and its different than the key
return new KeyValueHistogramFacetCollector(facetName, keyField, valueField, interval, comparatorType, context);
@ -116,46 +114,7 @@ public class HistogramFacetProcessor extends AbstractComponent implements FacetP
}
@Override public Facet reduce(String name, List<Facet> facets) {
if (facets.size() == 1) {
return facets.get(0);
}
TLongLongHashMap counts = null;
TLongDoubleHashMap totals = null;
InternalHistogramFacet firstHistoFacet = (InternalHistogramFacet) facets.get(0);
for (Facet facet : facets) {
InternalHistogramFacet histoFacet = (InternalHistogramFacet) facet;
if (!histoFacet.counts.isEmpty()) {
if (counts == null) {
counts = histoFacet.counts;
} else {
for (TLongLongIterator it = histoFacet.counts.iterator(); it.hasNext();) {
it.advance();
counts.adjustOrPutValue(it.key(), it.value(), it.value());
}
}
}
if (!histoFacet.totals.isEmpty()) {
if (totals == null) {
totals = histoFacet.totals;
} else {
for (TLongDoubleIterator it = histoFacet.totals.iterator(); it.hasNext();) {
it.advance();
totals.adjustOrPutValue(it.key(), it.value(), it.value());
}
}
}
}
if (counts == null) {
counts = InternalHistogramFacet.EMPTY_LONG_LONG_MAP;
}
if (totals == null) {
totals = InternalHistogramFacet.EMPTY_LONG_DOUBLE_MAP;
}
firstHistoFacet.counts = counts;
firstHistoFacet.totals = totals;
return firstHistoFacet;
InternalHistogramFacet first = (InternalHistogramFacet) facets.get(0);
return first.reduce(name, facets);
}
}

View File

@ -0,0 +1,344 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongDoubleIterator;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.TLongLongIterator;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
/**
* @author kimchy (shay.banon)
*/
public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet {
private static final String STREAM_TYPE = "ctHistogram";
public static void registerStreams() {
Streams.registerStream(STREAM, STREAM_TYPE);
}
static Stream STREAM = new Stream() {
@Override public Facet readFacet(String type, StreamInput in) throws IOException {
return readHistogramFacet(in);
}
};
@Override public String streamType() {
return STREAM_TYPE;
}
/**
* A histogram entry representing a single entry within the result of a histogram facet.
*/
public class CountAndTotalEntry implements Entry {
private final long key;
private final long count;
private final double total;
public CountAndTotalEntry(long key, long count, double total) {
this.key = key;
this.count = count;
this.total = total;
}
/**
* The key value of the histogram.
*/
public long key() {
return key;
}
/**
* The key value of the histogram.
*/
public long getKey() {
return key();
}
/**
* The number of hits that fall within that key "range" or "interval".
*/
public long count() {
return count;
}
/**
* The number of hits that fall within that key "range" or "interval".
*/
public long getCount() {
return count();
}
/**
* The sum / total of the value field that fall within this key "interval".
*/
public double total() {
return total;
}
/**
* The sum / total of the value field that fall within this key "interval".
*/
public double getTotal() {
return total();
}
/**
* The mean of this facet interval.
*/
public double mean() {
return total / count;
}
/**
* The mean of this facet interval.
*/
public double getMean() {
return mean();
}
}
private String name;
private String keyFieldName;
private String valueFieldName;
private long interval;
private ComparatorType comparatorType;
TLongLongHashMap counts;
TLongDoubleHashMap totals;
Collection<CountAndTotalEntry> entries = null;
private InternalCountAndTotalHistogramFacet() {
}
public InternalCountAndTotalHistogramFacet(String name, String keyFieldName, String valueFieldName, long interval, ComparatorType comparatorType, TLongLongHashMap counts, TLongDoubleHashMap totals) {
this.name = name;
this.keyFieldName = keyFieldName;
this.valueFieldName = valueFieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.counts = counts;
this.totals = totals;
}
@Override public String name() {
return this.name;
}
@Override public String getName() {
return name();
}
@Override public String keyFieldName() {
return this.keyFieldName;
}
@Override public String getKeyFieldName() {
return keyFieldName();
}
@Override public String valueFieldName() {
return this.valueFieldName;
}
@Override public String getValueFieldName() {
return valueFieldName();
}
@Override public String type() {
return TYPE;
}
@Override public String getType() {
return type();
}
@Override public List<CountAndTotalEntry> entries() {
computeEntries();
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountAndTotalEntry>) entries;
}
@Override public List<CountAndTotalEntry> getEntries() {
return entries();
}
@Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator();
}
private Collection<CountAndTotalEntry> computeEntries() {
if (entries != null) {
return entries;
}
TreeSet<CountAndTotalEntry> set = new TreeSet<CountAndTotalEntry>(comparatorType.comparator());
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
set.add(new CountAndTotalEntry(it.key(), it.value(), totals.get(it.key())));
}
entries = set;
return entries;
}
@Override public Facet reduce(String name, List<Facet> facets) {
if (facets.size() == 1) {
return facets.get(0);
}
TLongLongHashMap counts = null;
TLongDoubleHashMap totals = null;
InternalCountAndTotalHistogramFacet firstHistoFacet = (InternalCountAndTotalHistogramFacet) facets.get(0);
for (Facet facet : facets) {
InternalCountAndTotalHistogramFacet histoFacet = (InternalCountAndTotalHistogramFacet) facet;
if (!histoFacet.counts.isEmpty()) {
if (counts == null) {
counts = histoFacet.counts;
} else {
for (TLongLongIterator it = histoFacet.counts.iterator(); it.hasNext();) {
it.advance();
counts.adjustOrPutValue(it.key(), it.value(), it.value());
}
}
}
if (!histoFacet.totals.isEmpty()) {
if (totals == null) {
totals = histoFacet.totals;
} else {
for (TLongDoubleIterator it = histoFacet.totals.iterator(); it.hasNext();) {
it.advance();
totals.adjustOrPutValue(it.key(), it.value(), it.value());
}
}
}
}
if (counts == null) {
counts = InternalCountAndTotalHistogramFacet.EMPTY_LONG_LONG_MAP;
}
if (totals == null) {
totals = InternalCountAndTotalHistogramFacet.EMPTY_LONG_DOUBLE_MAP;
}
firstHistoFacet.counts = counts;
firstHistoFacet.totals = totals;
return firstHistoFacet;
}
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _KEY_FIELD = new XContentBuilderString("_key_field");
static final XContentBuilderString _VALUE_FIELD = new XContentBuilderString("_value_field");
static final XContentBuilderString _COMPARATOR = new XContentBuilderString("_comparator");
static final XContentBuilderString _INTERVAL = new XContentBuilderString("_interval");
static final XContentBuilderString ENTRIES = new XContentBuilderString("entries");
static final XContentBuilderString KEY = new XContentBuilderString("key");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString MEAN = new XContentBuilderString("mean");
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.field(Fields._KEY_FIELD, keyFieldName);
builder.field(Fields._VALUE_FIELD, valueFieldName);
builder.field(Fields._COMPARATOR, comparatorType.description());
builder.field(Fields._INTERVAL, interval);
builder.startArray(Fields.ENTRIES);
for (Entry entry : computeEntries()) {
builder.startObject();
builder.field(Fields.KEY, entry.key());
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.TOTAL, entry.total());
builder.field(Fields.MEAN, entry.mean());
builder.endObject();
}
builder.endArray();
builder.endObject();
}
public static InternalCountAndTotalHistogramFacet readHistogramFacet(StreamInput in) throws IOException {
InternalCountAndTotalHistogramFacet facet = new InternalCountAndTotalHistogramFacet();
facet.readFrom(in);
return facet;
}
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
keyFieldName = in.readUTF();
valueFieldName = in.readUTF();
interval = in.readVLong();
comparatorType = ComparatorType.fromId(in.readByte());
int size = in.readVInt();
if (size == 0) {
counts = EMPTY_LONG_LONG_MAP;
totals = EMPTY_LONG_DOUBLE_MAP;
} else {
counts = new TLongLongHashMap(size);
totals = new TLongDoubleHashMap(size);
for (int i = 0; i < size; i++) {
long key = in.readLong();
counts.put(key, in.readVLong());
totals.put(key, in.readDouble());
}
}
}
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(keyFieldName);
out.writeUTF(valueFieldName);
out.writeVLong(interval);
out.writeByte(comparatorType.id());
// optimize the write, since we know we have the same buckets as keys
out.writeVInt(counts.size());
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
out.writeLong(it.key());
out.writeVLong(it.value());
out.writeDouble(totals.get(it.key()));
}
}
static final TLongLongHashMap EMPTY_LONG_LONG_MAP = new TLongLongHashMap();
static final TLongDoubleHashMap EMPTY_LONG_DOUBLE_MAP = new TLongDoubleHashMap();
}

View File

@ -0,0 +1,312 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.TLongLongIterator;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
/**
* @author kimchy (shay.banon)
*/
public class InternalCountHistogramFacet extends InternalHistogramFacet {
private static final String STREAM_TYPE = "cHistogram";
public static void registerStreams() {
Streams.registerStream(STREAM, STREAM_TYPE);
}
static Stream STREAM = new Stream() {
@Override public Facet readFacet(String type, StreamInput in) throws IOException {
return readHistogramFacet(in);
}
};
@Override public String streamType() {
return STREAM_TYPE;
}
/**
* A histogram entry representing a single entry within the result of a histogram facet.
*/
public class CountEntry implements Entry {
private final long key;
private final long count;
public CountEntry(long key, long count) {
this.key = key;
this.count = count;
}
/**
* The key value of the histogram.
*/
public long key() {
return key;
}
/**
* The key value of the histogram.
*/
public long getKey() {
return key();
}
/**
* The number of hits that fall within that key "range" or "interval".
*/
public long count() {
return count;
}
/**
* The number of hits that fall within that key "range" or "interval".
*/
public long getCount() {
return count();
}
/**
* The sum / total of the value field that fall within this key "interval".
*/
public double total() {
return -1;
}
/**
* The sum / total of the value field that fall within this key "interval".
*/
public double getTotal() {
return total();
}
/**
* The mean of this facet interval.
*/
public double mean() {
return -1;
}
/**
* The mean of this facet interval.
*/
public double getMean() {
return mean();
}
}
private String name;
private String keyFieldName;
private String valueFieldName;
private long interval;
private ComparatorType comparatorType;
TLongLongHashMap counts;
Collection<CountEntry> entries = null;
private InternalCountHistogramFacet() {
}
public InternalCountHistogramFacet(String name, String keyFieldName, String valueFieldName, long interval, ComparatorType comparatorType, TLongLongHashMap counts) {
this.name = name;
this.keyFieldName = keyFieldName;
this.valueFieldName = valueFieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.counts = counts;
}
@Override public String name() {
return this.name;
}
@Override public String getName() {
return name();
}
@Override public String keyFieldName() {
return this.keyFieldName;
}
@Override public String getKeyFieldName() {
return keyFieldName();
}
@Override public String valueFieldName() {
return this.valueFieldName;
}
@Override public String getValueFieldName() {
return valueFieldName();
}
@Override public String type() {
return TYPE;
}
@Override public String getType() {
return type();
}
@Override public List<CountEntry> entries() {
computeEntries();
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountEntry>) entries;
}
@Override public List<CountEntry> getEntries() {
return entries();
}
@Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator();
}
private Collection<CountEntry> computeEntries() {
if (entries != null) {
return entries;
}
TreeSet<CountEntry> set = new TreeSet<CountEntry>(comparatorType.comparator());
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
set.add(new CountEntry(it.key(), it.value()));
}
entries = set;
return entries;
}
@Override public Facet reduce(String name, List<Facet> facets) {
if (facets.size() == 1) {
return facets.get(0);
}
TLongLongHashMap counts = null;
InternalCountHistogramFacet firstHistoFacet = (InternalCountHistogramFacet) facets.get(0);
for (Facet facet : facets) {
InternalCountHistogramFacet histoFacet = (InternalCountHistogramFacet) facet;
if (!histoFacet.counts.isEmpty()) {
if (counts == null) {
counts = histoFacet.counts;
} else {
for (TLongLongIterator it = histoFacet.counts.iterator(); it.hasNext();) {
it.advance();
counts.adjustOrPutValue(it.key(), it.value(), it.value());
}
}
}
}
if (counts == null) {
counts = InternalCountHistogramFacet.EMPTY_LONG_LONG_MAP;
}
firstHistoFacet.counts = counts;
return firstHistoFacet;
}
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _KEY_FIELD = new XContentBuilderString("_key_field");
static final XContentBuilderString _VALUE_FIELD = new XContentBuilderString("_value_field");
static final XContentBuilderString _COMPARATOR = new XContentBuilderString("_comparator");
static final XContentBuilderString _INTERVAL = new XContentBuilderString("_interval");
static final XContentBuilderString ENTRIES = new XContentBuilderString("entries");
static final XContentBuilderString KEY = new XContentBuilderString("key");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.field(Fields._KEY_FIELD, keyFieldName);
builder.field(Fields._VALUE_FIELD, valueFieldName);
builder.field(Fields._COMPARATOR, comparatorType.description());
builder.field(Fields._INTERVAL, interval);
builder.startArray(Fields.ENTRIES);
for (Entry entry : computeEntries()) {
builder.startObject();
builder.field(Fields.KEY, entry.key());
builder.field(Fields.COUNT, entry.count());
builder.endObject();
}
builder.endArray();
builder.endObject();
}
public static InternalCountHistogramFacet readHistogramFacet(StreamInput in) throws IOException {
InternalCountHistogramFacet facet = new InternalCountHistogramFacet();
facet.readFrom(in);
return facet;
}
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
keyFieldName = in.readUTF();
valueFieldName = in.readUTF();
interval = in.readVLong();
comparatorType = ComparatorType.fromId(in.readByte());
int size = in.readVInt();
if (size == 0) {
counts = EMPTY_LONG_LONG_MAP;
} else {
counts = new TLongLongHashMap(size);
for (int i = 0; i < size; i++) {
long key = in.readLong();
counts.put(key, in.readVLong());
}
}
}
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(keyFieldName);
out.writeUTF(valueFieldName);
out.writeVLong(interval);
out.writeByte(comparatorType.id());
// optimize the write, since we know we have the same buckets as keys
out.writeVInt(counts.size());
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
out.writeLong(it.key());
out.writeVLong(it.value());
}
}
static final TLongLongHashMap EMPTY_LONG_LONG_MAP = new TLongLongHashMap();
}

View File

@ -7,7 +7,7 @@
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
@ -19,210 +19,20 @@
package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.TLongLongIterator;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.InternalFacet;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
/**
* @author kimchy (shay.banon)
*/
public class InternalHistogramFacet implements HistogramFacet, InternalFacet {
private static final String STREAM_TYPE = "histogram";
public abstract class InternalHistogramFacet implements HistogramFacet, InternalFacet {
public static void registerStreams() {
Streams.registerStream(STREAM, STREAM_TYPE);
InternalCountAndTotalHistogramFacet.registerStreams();
InternalCountHistogramFacet.registerStreams();
}
static Stream STREAM = new Stream() {
@Override public Facet readFacet(String type, StreamInput in) throws IOException {
return readHistogramFacet(in);
}
};
@Override public String streamType() {
return STREAM_TYPE;
}
private String name;
private String keyFieldName;
private String valueFieldName;
private long interval;
private ComparatorType comparatorType;
TLongLongHashMap counts;
TLongDoubleHashMap totals;
Collection<Entry> entries = null;
private InternalHistogramFacet() {
}
public InternalHistogramFacet(String name, String keyFieldName, String valueFieldName, long interval, ComparatorType comparatorType, TLongLongHashMap counts, TLongDoubleHashMap totals) {
this.name = name;
this.keyFieldName = keyFieldName;
this.valueFieldName = valueFieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.counts = counts;
this.totals = totals;
}
@Override public String name() {
return this.name;
}
@Override public String getName() {
return name();
}
@Override public String keyFieldName() {
return this.keyFieldName;
}
@Override public String getKeyFieldName() {
return keyFieldName();
}
@Override public String valueFieldName() {
return this.valueFieldName;
}
@Override public String getValueFieldName() {
return valueFieldName();
}
@Override public String type() {
return TYPE;
}
@Override public String getType() {
return type();
}
@Override public List<Entry> entries() {
computeEntries();
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<Entry>) entries;
}
@Override public List<Entry> getEntries() {
return entries();
}
@Override public Iterator<Entry> iterator() {
return computeEntries().iterator();
}
private Collection<Entry> computeEntries() {
if (entries != null) {
return entries;
}
TreeSet<Entry> set = new TreeSet<Entry>(comparatorType.comparator());
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
set.add(new Entry(it.key(), it.value(), totals.get(it.key())));
}
entries = set;
return entries;
}
static final class Fields {
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _KEY_FIELD = new XContentBuilderString("_key_field");
static final XContentBuilderString _VALUE_FIELD = new XContentBuilderString("_value_field");
static final XContentBuilderString _COMPARATOR = new XContentBuilderString("_comparator");
static final XContentBuilderString _INTERVAL = new XContentBuilderString("_interval");
static final XContentBuilderString ENTRIES = new XContentBuilderString("entries");
static final XContentBuilderString KEY = new XContentBuilderString("key");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString MEAN = new XContentBuilderString("mean");
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field(Fields._TYPE, HistogramFacet.TYPE);
builder.field(Fields._KEY_FIELD, keyFieldName);
builder.field(Fields._VALUE_FIELD, valueFieldName);
builder.field(Fields._COMPARATOR, comparatorType.description());
builder.field(Fields._INTERVAL, interval);
builder.startArray(Fields.ENTRIES);
for (Entry entry : computeEntries()) {
builder.startObject();
builder.field(Fields.KEY, entry.key());
builder.field(Fields.COUNT, entry.count());
builder.field(Fields.TOTAL, entry.total());
builder.field(Fields.MEAN, entry.mean());
builder.endObject();
}
builder.endArray();
builder.endObject();
}
public static InternalHistogramFacet readHistogramFacet(StreamInput in) throws IOException {
InternalHistogramFacet facet = new InternalHistogramFacet();
facet.readFrom(in);
return facet;
}
@Override public void readFrom(StreamInput in) throws IOException {
name = in.readUTF();
keyFieldName = in.readUTF();
valueFieldName = in.readUTF();
interval = in.readVLong();
comparatorType = ComparatorType.fromId(in.readByte());
int size = in.readVInt();
if (size == 0) {
counts = EMPTY_LONG_LONG_MAP;
totals = EMPTY_LONG_DOUBLE_MAP;
} else {
counts = new TLongLongHashMap(size);
totals = new TLongDoubleHashMap(size);
for (int i = 0; i < size; i++) {
long key = in.readLong();
counts.put(key, in.readVLong());
totals.put(key, in.readDouble());
}
}
}
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeUTF(name);
out.writeUTF(keyFieldName);
out.writeUTF(valueFieldName);
out.writeVLong(interval);
out.writeByte(comparatorType.id());
// optimize the write, since we know we have the same buckets as keys
out.writeVInt(counts.size());
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
out.writeLong(it.key());
out.writeVLong(it.value());
out.writeDouble(totals.get(it.key()));
}
}
static final TLongLongHashMap EMPTY_LONG_LONG_MAP = new TLongLongHashMap();
static final TLongDoubleHashMap EMPTY_LONG_DOUBLE_MAP = new TLongDoubleHashMap();
}
public abstract Facet reduce(String name, List<Facet> facets);
}

View File

@ -99,7 +99,7 @@ public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
double[] values = valueFieldData.doubleValues(doc);
int size = Math.min(keys.length, values.length);
for (int i = 0; i < size; i++) {
long bucket = HistogramFacetCollector.bucket(keys[i], interval);
long bucket = CountAndTotalHistogramFacetCollector.bucket(keys[i], interval);
counts.adjustOrPutValue(bucket, 1, 1);
totals.adjustOrPutValue(bucket, values[i], values[i]);
}
@ -107,14 +107,14 @@ public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
// key multi valued, value is a single value
double value = valueFieldData.doubleValue(doc);
for (double key : keyFieldData.doubleValues(doc)) {
long bucket = HistogramFacetCollector.bucket(key, interval);
long bucket = CountAndTotalHistogramFacetCollector.bucket(key, interval);
counts.adjustOrPutValue(bucket, 1, 1);
totals.adjustOrPutValue(bucket, value, value);
}
}
} else {
// single key value, compute the bucket once
long bucket = HistogramFacetCollector.bucket(keyFieldData.doubleValue(doc), interval);
long bucket = CountAndTotalHistogramFacetCollector.bucket(keyFieldData.doubleValue(doc), interval);
if (valueFieldData.multiValued()) {
for (double value : valueFieldData.doubleValues(doc)) {
counts.adjustOrPutValue(bucket, 1, 1);
@ -135,6 +135,6 @@ public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalHistogramFacet(facetName, keyFieldName, valueFieldName, interval, comparatorType, counts, totals);
return new InternalCountAndTotalHistogramFacet(facetName, keyFieldName, valueFieldName, interval, comparatorType, counts, totals);
}
}

View File

@ -99,7 +99,7 @@ public class KeyValueScriptHistogramFacetCollector extends AbstractFacetCollecto
}
@Override public Facet facet() {
return new InternalHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts(), histoProc.totals());
return new InternalCountAndTotalHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts(), histoProc.totals());
}
public static long bucket(double value, long interval) {

View File

@ -74,7 +74,7 @@ public class ScriptHistogramFacetCollector extends AbstractFacetCollector {
}
@Override public Facet facet() {
return new InternalHistogramFacet(facetName, "_na", "_na", -1, comparatorType, counts, totals);
return new InternalCountAndTotalHistogramFacet(facetName, "_na", "_na", -1, comparatorType, counts, totals);
}
public static long bucket(double value, long interval) {

View File

@ -665,12 +665,13 @@ public class SimpleFacetsTests extends AbstractNodesTests {
SearchResponse searchResponse = client.prepareSearch()
.setQuery(matchAllQuery())
.addFacet(histogramFacet("stats1").field("num").interval(100))
.addFacet(histogramFacet("stats2").field("multi_num").interval(10))
.addFacet(histogramFacet("stats1").field("num").valueField("num").interval(100))
.addFacet(histogramFacet("stats2").field("multi_num").valueField("multi_num").interval(10))
.addFacet(histogramFacet("stats3").keyField("num").valueField("multi_num").interval(100))
.addFacet(histogramScriptFacet("stats4").keyScript("doc['date'].date.minuteOfHour").valueScript("doc['num'].value"))
.addFacet(histogramFacet("stats5").field("date").interval(1, TimeUnit.MINUTES))
.addFacet(histogramScriptFacet("stats6").keyField("num").valueScript("doc['num'].value").interval(100))
.addFacet(histogramFacet("stats7").field("num").interval(100))
.execute().actionGet();
if (searchResponse.failedShards() > 0) {
@ -752,6 +753,18 @@ public class SimpleFacetsTests extends AbstractNodesTests {
assertThat(facet.entries().get(1).count(), equalTo(1l));
assertThat(facet.entries().get(1).total(), equalTo(1175d));
assertThat(facet.entries().get(1).mean(), equalTo(1175d));
facet = searchResponse.facets().facet("stats7");
assertThat(facet.name(), equalTo("stats7"));
assertThat(facet.entries().size(), equalTo(2));
assertThat(facet.entries().get(0).key(), equalTo(1000l));
assertThat(facet.entries().get(0).count(), equalTo(2l));
assertThat(facet.entries().get(0).total(), equalTo(-1d));
assertThat(facet.entries().get(0).mean(), equalTo(-1d));
assertThat(facet.entries().get(1).key(), equalTo(1100l));
assertThat(facet.entries().get(1).count(), equalTo(1l));
assertThat(facet.entries().get(1).total(), equalTo(-1d));
assertThat(facet.entries().get(1).mean(), equalTo(-1d));
}
@Test public void testRangeFacets() throws Exception {