Search Facets: Histogram facets to allow to specify different key and value fields
This commit is contained in:
parent
5ca050ffcb
commit
64479a11c3
|
@ -362,43 +362,85 @@ public class SearchRequestBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchRequestBuilder addFacetHistogram(String name, String keyFieldName, String valueFieldName, long interval) {
|
||||||
|
facetsBuilder().histogramFacet(name, keyFieldName, valueFieldName, interval);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public SearchRequestBuilder addFacetHistogram(String name, String fieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
public SearchRequestBuilder addFacetHistogram(String name, String fieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
||||||
facetsBuilder().histogramFacet(name, fieldName, interval, filter);
|
facetsBuilder().histogramFacet(name, fieldName, interval, filter);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchRequestBuilder addFacetHistogram(String name, String keyFieldName, String valueFieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
||||||
|
facetsBuilder().histogramFacet(name, keyFieldName, valueFieldName, interval, filter);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public SearchRequestBuilder addFacetHistogram(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
public SearchRequestBuilder addFacetHistogram(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
||||||
facetsBuilder().histogramFacet(name, fieldName, interval, comparatorType);
|
facetsBuilder().histogramFacet(name, fieldName, interval, comparatorType);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchRequestBuilder addFacetHistogram(String name, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
||||||
|
facetsBuilder().histogramFacet(name, keyFieldName, valueFieldName, interval, comparatorType);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public SearchRequestBuilder addFacetHistogram(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
public SearchRequestBuilder addFacetHistogram(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
||||||
@Nullable XContentFilterBuilder filter) {
|
@Nullable XContentFilterBuilder filter) {
|
||||||
facetsBuilder().histogramFacet(name, fieldName, interval, comparatorType, filter);
|
facetsBuilder().histogramFacet(name, fieldName, interval, comparatorType, filter);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchRequestBuilder addFacetHistogram(String name, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
||||||
|
@Nullable XContentFilterBuilder filter) {
|
||||||
|
facetsBuilder().histogramFacet(name, keyFieldName, valueFieldName, interval, comparatorType, filter);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public SearchRequestBuilder addFacetHistogramGlobal(String name, String fieldName, long interval) {
|
public SearchRequestBuilder addFacetHistogramGlobal(String name, String fieldName, long interval) {
|
||||||
facetsBuilder().histogramFacetGlobal(name, fieldName, interval);
|
facetsBuilder().histogramFacetGlobal(name, fieldName, interval);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchRequestBuilder addFacetHistogramGlobal(String name, String keyFieldName, String valueFieldName, long interval) {
|
||||||
|
facetsBuilder().histogramFacetGlobal(name, keyFieldName, valueFieldName, interval);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public SearchRequestBuilder addFacetHistogramGlobal(String name, String fieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
public SearchRequestBuilder addFacetHistogramGlobal(String name, String fieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
||||||
facetsBuilder().histogramFacetGlobal(name, fieldName, interval, filter);
|
facetsBuilder().histogramFacetGlobal(name, fieldName, interval, filter);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchRequestBuilder addFacetHistogramGlobal(String name, String keyFieldName, String valueFieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
||||||
|
facetsBuilder().histogramFacetGlobal(name, keyFieldName, valueFieldName, interval, filter);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public SearchRequestBuilder addFacetHistogramGlobal(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
public SearchRequestBuilder addFacetHistogramGlobal(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
||||||
facetsBuilder().histogramFacetGlobal(name, fieldName, interval, comparatorType);
|
facetsBuilder().histogramFacetGlobal(name, fieldName, interval, comparatorType);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchRequestBuilder addFacetHistogramGlobal(String name, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
||||||
|
facetsBuilder().histogramFacetGlobal(name, keyFieldName, valueFieldName, interval, comparatorType);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public SearchRequestBuilder addFacetHistogramGlobal(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
public SearchRequestBuilder addFacetHistogramGlobal(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
||||||
@Nullable XContentFilterBuilder filter) {
|
@Nullable XContentFilterBuilder filter) {
|
||||||
facetsBuilder().histogramFacetGlobal(name, fieldName, interval, comparatorType, filter);
|
facetsBuilder().histogramFacetGlobal(name, fieldName, interval, comparatorType, filter);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchRequestBuilder addFacetHistogramGlobal(String name, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
||||||
|
@Nullable XContentFilterBuilder filter) {
|
||||||
|
facetsBuilder().histogramFacetGlobal(name, keyFieldName, valueFieldName, interval, comparatorType, filter);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds a field to be highlighted with default fragment size of 100 characters, and
|
* Adds a field to be highlighted with default fragment size of 100 characters, and
|
||||||
* default number of fragments of 5.
|
* default number of fragments of 5.
|
||||||
|
|
|
@ -86,6 +86,8 @@ public abstract class NumericFieldData extends FieldData {
|
||||||
return (short) intValue(docId);
|
return (short) intValue(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public abstract double[] doubleValues(int docId);
|
||||||
|
|
||||||
public abstract void forEachValueInDoc(int docId, DoubleValueInDocProc proc);
|
public abstract void forEachValueInDoc(int docId, DoubleValueInDocProc proc);
|
||||||
|
|
||||||
public static interface DoubleValueInDocProc {
|
public static interface DoubleValueInDocProc {
|
||||||
|
|
|
@ -29,11 +29,11 @@ import org.elasticsearch.util.gnu.trove.TDoubleArrayList;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public abstract class DoubleFieldData extends NumericFieldData {
|
public abstract class DoubleFieldData extends NumericFieldData {
|
||||||
|
|
||||||
static final double[] EMPTY_DOUBLE_ARRAY = new double[0];
|
public static final double[] EMPTY_DOUBLE_ARRAY = new double[0];
|
||||||
|
|
||||||
protected final double[] values;
|
protected final double[] values;
|
||||||
protected final int[] freqs;
|
protected final int[] freqs;
|
||||||
|
|
|
@ -23,13 +23,13 @@ import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
import org.elasticsearch.util.ThreadLocals;
|
import org.elasticsearch.util.ThreadLocals;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class MultiValueDoubleFieldData extends DoubleFieldData {
|
public class MultiValueDoubleFieldData extends DoubleFieldData {
|
||||||
|
|
||||||
private static final int VALUE_CACHE_SIZE = 100;
|
private static final int VALUE_CACHE_SIZE = 10;
|
||||||
|
|
||||||
private static ThreadLocal<ThreadLocals.CleanableValue<double[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<double[][]>>() {
|
private ThreadLocal<ThreadLocals.CleanableValue<double[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<double[][]>>() {
|
||||||
@Override protected ThreadLocals.CleanableValue<double[][]> initialValue() {
|
@Override protected ThreadLocals.CleanableValue<double[][]> initialValue() {
|
||||||
double[][] value = new double[VALUE_CACHE_SIZE][];
|
double[][] value = new double[VALUE_CACHE_SIZE][];
|
||||||
for (int i = 0; i < value.length; i++) {
|
for (int i = 0; i < value.length; i++) {
|
||||||
|
@ -75,6 +75,10 @@ public class MultiValueDoubleFieldData extends DoubleFieldData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
return values(docId);
|
||||||
|
}
|
||||||
|
|
||||||
@Override public double value(int docId) {
|
@Override public double value(int docId) {
|
||||||
int[] docOrders = order[docId];
|
int[] docOrders = order[docId];
|
||||||
if (docOrders == null) {
|
if (docOrders == null) {
|
||||||
|
|
|
@ -22,11 +22,11 @@ package org.elasticsearch.index.field.doubles;
|
||||||
import org.elasticsearch.index.field.FieldDataOptions;
|
import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class SingleValueDoubleFieldData extends DoubleFieldData {
|
public class SingleValueDoubleFieldData extends DoubleFieldData {
|
||||||
|
|
||||||
private static ThreadLocal<double[]> valuesCache = new ThreadLocal<double[]>() {
|
private ThreadLocal<double[]> valuesCache = new ThreadLocal<double[]>() {
|
||||||
@Override protected double[] initialValue() {
|
@Override protected double[] initialValue() {
|
||||||
return new double[1];
|
return new double[1];
|
||||||
}
|
}
|
||||||
|
@ -64,6 +64,10 @@ public class SingleValueDoubleFieldData extends DoubleFieldData {
|
||||||
proc.onValue(docId, values[loc]);
|
proc.onValue(docId, values[loc]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
return values(docId);
|
||||||
|
}
|
||||||
|
|
||||||
@Override public double value(int docId) {
|
@Override public double value(int docId) {
|
||||||
return values[order[docId]];
|
return values[order[docId]];
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,16 +20,27 @@
|
||||||
package org.elasticsearch.index.field.floats;
|
package org.elasticsearch.index.field.floats;
|
||||||
|
|
||||||
import org.elasticsearch.index.field.FieldDataOptions;
|
import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
|
import org.elasticsearch.index.field.doubles.DoubleFieldData;
|
||||||
import org.elasticsearch.util.ThreadLocals;
|
import org.elasticsearch.util.ThreadLocals;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class MultiValueFloatFieldData extends FloatFieldData {
|
public class MultiValueFloatFieldData extends FloatFieldData {
|
||||||
|
|
||||||
private static final int VALUE_CACHE_SIZE = 100;
|
private static final int VALUE_CACHE_SIZE = 10;
|
||||||
|
|
||||||
private static ThreadLocal<ThreadLocals.CleanableValue<float[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<float[][]>>() {
|
private ThreadLocal<ThreadLocals.CleanableValue<double[][]>> doublesValuesCache = new ThreadLocal<ThreadLocals.CleanableValue<double[][]>>() {
|
||||||
|
@Override protected ThreadLocals.CleanableValue<double[][]> initialValue() {
|
||||||
|
double[][] value = new double[VALUE_CACHE_SIZE][];
|
||||||
|
for (int i = 0; i < value.length; i++) {
|
||||||
|
value[i] = new double[i];
|
||||||
|
}
|
||||||
|
return new ThreadLocals.CleanableValue<double[][]>(value);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private ThreadLocal<ThreadLocals.CleanableValue<float[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<float[][]>>() {
|
||||||
@Override protected ThreadLocals.CleanableValue<float[][]> initialValue() {
|
@Override protected ThreadLocals.CleanableValue<float[][]> initialValue() {
|
||||||
float[][] value = new float[VALUE_CACHE_SIZE][];
|
float[][] value = new float[VALUE_CACHE_SIZE][];
|
||||||
for (int i = 0; i < value.length; i++) {
|
for (int i = 0; i < value.length; i++) {
|
||||||
|
@ -75,6 +86,23 @@ public class MultiValueFloatFieldData extends FloatFieldData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
int[] docOrders = order[docId];
|
||||||
|
if (docOrders == null) {
|
||||||
|
return DoubleFieldData.EMPTY_DOUBLE_ARRAY;
|
||||||
|
}
|
||||||
|
double[] doubles;
|
||||||
|
if (docOrders.length < VALUE_CACHE_SIZE) {
|
||||||
|
doubles = doublesValuesCache.get().get()[docOrders.length];
|
||||||
|
} else {
|
||||||
|
doubles = new double[docOrders.length];
|
||||||
|
}
|
||||||
|
for (int i = 0; i < docOrders.length; i++) {
|
||||||
|
doubles[i] = values[docOrders[i]];
|
||||||
|
}
|
||||||
|
return doubles;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public float value(int docId) {
|
@Override public float value(int docId) {
|
||||||
int[] docOrders = order[docId];
|
int[] docOrders = order[docId];
|
||||||
if (docOrders == null) {
|
if (docOrders == null) {
|
||||||
|
|
|
@ -20,13 +20,20 @@
|
||||||
package org.elasticsearch.index.field.floats;
|
package org.elasticsearch.index.field.floats;
|
||||||
|
|
||||||
import org.elasticsearch.index.field.FieldDataOptions;
|
import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
|
import org.elasticsearch.index.field.doubles.DoubleFieldData;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class SingleValueFloatFieldData extends FloatFieldData {
|
public class SingleValueFloatFieldData extends FloatFieldData {
|
||||||
|
|
||||||
private static ThreadLocal<float[]> valuesCache = new ThreadLocal<float[]>() {
|
private ThreadLocal<double[]> doublesValuesCache = new ThreadLocal<double[]>() {
|
||||||
|
@Override protected double[] initialValue() {
|
||||||
|
return new double[1];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private ThreadLocal<float[]> valuesCache = new ThreadLocal<float[]>() {
|
||||||
@Override protected float[] initialValue() {
|
@Override protected float[] initialValue() {
|
||||||
return new float[1];
|
return new float[1];
|
||||||
}
|
}
|
||||||
|
@ -64,6 +71,16 @@ public class SingleValueFloatFieldData extends FloatFieldData {
|
||||||
proc.onValue(docId, values[loc]);
|
proc.onValue(docId, values[loc]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
int loc = order[docId];
|
||||||
|
if (loc == 0) {
|
||||||
|
return DoubleFieldData.EMPTY_DOUBLE_ARRAY;
|
||||||
|
}
|
||||||
|
double[] ret = doublesValuesCache.get();
|
||||||
|
ret[0] = values[loc];
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public float value(int docId) {
|
@Override public float value(int docId) {
|
||||||
return values[order[docId]];
|
return values[order[docId]];
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,16 +20,27 @@
|
||||||
package org.elasticsearch.index.field.ints;
|
package org.elasticsearch.index.field.ints;
|
||||||
|
|
||||||
import org.elasticsearch.index.field.FieldDataOptions;
|
import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
|
import org.elasticsearch.index.field.doubles.DoubleFieldData;
|
||||||
import org.elasticsearch.util.ThreadLocals;
|
import org.elasticsearch.util.ThreadLocals;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class MultiValueIntFieldData extends IntFieldData {
|
public class MultiValueIntFieldData extends IntFieldData {
|
||||||
|
|
||||||
private static final int VALUE_CACHE_SIZE = 100;
|
private static final int VALUE_CACHE_SIZE = 10;
|
||||||
|
|
||||||
private static ThreadLocal<ThreadLocals.CleanableValue<int[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<int[][]>>() {
|
private ThreadLocal<ThreadLocals.CleanableValue<double[][]>> doublesValuesCache = new ThreadLocal<ThreadLocals.CleanableValue<double[][]>>() {
|
||||||
|
@Override protected ThreadLocals.CleanableValue<double[][]> initialValue() {
|
||||||
|
double[][] value = new double[VALUE_CACHE_SIZE][];
|
||||||
|
for (int i = 0; i < value.length; i++) {
|
||||||
|
value[i] = new double[i];
|
||||||
|
}
|
||||||
|
return new ThreadLocals.CleanableValue<double[][]>(value);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private ThreadLocal<ThreadLocals.CleanableValue<int[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<int[][]>>() {
|
||||||
@Override protected ThreadLocals.CleanableValue<int[][]> initialValue() {
|
@Override protected ThreadLocals.CleanableValue<int[][]> initialValue() {
|
||||||
int[][] value = new int[VALUE_CACHE_SIZE][];
|
int[][] value = new int[VALUE_CACHE_SIZE][];
|
||||||
for (int i = 0; i < value.length; i++) {
|
for (int i = 0; i < value.length; i++) {
|
||||||
|
@ -75,6 +86,23 @@ public class MultiValueIntFieldData extends IntFieldData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
int[] docOrders = order[docId];
|
||||||
|
if (docOrders == null) {
|
||||||
|
return DoubleFieldData.EMPTY_DOUBLE_ARRAY;
|
||||||
|
}
|
||||||
|
double[] doubles;
|
||||||
|
if (docOrders.length < VALUE_CACHE_SIZE) {
|
||||||
|
doubles = doublesValuesCache.get().get()[docOrders.length];
|
||||||
|
} else {
|
||||||
|
doubles = new double[docOrders.length];
|
||||||
|
}
|
||||||
|
for (int i = 0; i < docOrders.length; i++) {
|
||||||
|
doubles[i] = values[docOrders[i]];
|
||||||
|
}
|
||||||
|
return doubles;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public int value(int docId) {
|
@Override public int value(int docId) {
|
||||||
int[] docOrders = order[docId];
|
int[] docOrders = order[docId];
|
||||||
if (docOrders == null) {
|
if (docOrders == null) {
|
||||||
|
|
|
@ -20,13 +20,20 @@
|
||||||
package org.elasticsearch.index.field.ints;
|
package org.elasticsearch.index.field.ints;
|
||||||
|
|
||||||
import org.elasticsearch.index.field.FieldDataOptions;
|
import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
|
import org.elasticsearch.index.field.doubles.DoubleFieldData;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class SingleValueIntFieldData extends IntFieldData {
|
public class SingleValueIntFieldData extends IntFieldData {
|
||||||
|
|
||||||
private static ThreadLocal<int[]> valuesCache = new ThreadLocal<int[]>() {
|
private ThreadLocal<double[]> doublesValuesCache = new ThreadLocal<double[]>() {
|
||||||
|
@Override protected double[] initialValue() {
|
||||||
|
return new double[1];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private ThreadLocal<int[]> valuesCache = new ThreadLocal<int[]>() {
|
||||||
@Override protected int[] initialValue() {
|
@Override protected int[] initialValue() {
|
||||||
return new int[1];
|
return new int[1];
|
||||||
}
|
}
|
||||||
|
@ -64,6 +71,16 @@ public class SingleValueIntFieldData extends IntFieldData {
|
||||||
proc.onValue(docId, values[loc]);
|
proc.onValue(docId, values[loc]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
int loc = order[docId];
|
||||||
|
if (loc == 0) {
|
||||||
|
return DoubleFieldData.EMPTY_DOUBLE_ARRAY;
|
||||||
|
}
|
||||||
|
double[] ret = doublesValuesCache.get();
|
||||||
|
ret[0] = values[loc];
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public int value(int docId) {
|
@Override public int value(int docId) {
|
||||||
return values[order[docId]];
|
return values[order[docId]];
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,16 +20,27 @@
|
||||||
package org.elasticsearch.index.field.longs;
|
package org.elasticsearch.index.field.longs;
|
||||||
|
|
||||||
import org.elasticsearch.index.field.FieldDataOptions;
|
import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
|
import org.elasticsearch.index.field.doubles.DoubleFieldData;
|
||||||
import org.elasticsearch.util.ThreadLocals;
|
import org.elasticsearch.util.ThreadLocals;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class MultiValueLongFieldData extends LongFieldData {
|
public class MultiValueLongFieldData extends LongFieldData {
|
||||||
|
|
||||||
private static final int VALUE_CACHE_SIZE = 100;
|
private static final int VALUE_CACHE_SIZE = 10;
|
||||||
|
|
||||||
private static ThreadLocal<ThreadLocals.CleanableValue<long[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<long[][]>>() {
|
private ThreadLocal<ThreadLocals.CleanableValue<double[][]>> doublesValuesCache = new ThreadLocal<ThreadLocals.CleanableValue<double[][]>>() {
|
||||||
|
@Override protected ThreadLocals.CleanableValue<double[][]> initialValue() {
|
||||||
|
double[][] value = new double[VALUE_CACHE_SIZE][];
|
||||||
|
for (int i = 0; i < value.length; i++) {
|
||||||
|
value[i] = new double[i];
|
||||||
|
}
|
||||||
|
return new ThreadLocals.CleanableValue<double[][]>(value);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private ThreadLocal<ThreadLocals.CleanableValue<long[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<long[][]>>() {
|
||||||
@Override protected ThreadLocals.CleanableValue<long[][]> initialValue() {
|
@Override protected ThreadLocals.CleanableValue<long[][]> initialValue() {
|
||||||
long[][] value = new long[VALUE_CACHE_SIZE][];
|
long[][] value = new long[VALUE_CACHE_SIZE][];
|
||||||
for (int i = 0; i < value.length; i++) {
|
for (int i = 0; i < value.length; i++) {
|
||||||
|
@ -75,6 +86,23 @@ public class MultiValueLongFieldData extends LongFieldData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
int[] docOrders = order[docId];
|
||||||
|
if (docOrders == null) {
|
||||||
|
return DoubleFieldData.EMPTY_DOUBLE_ARRAY;
|
||||||
|
}
|
||||||
|
double[] doubles;
|
||||||
|
if (docOrders.length < VALUE_CACHE_SIZE) {
|
||||||
|
doubles = doublesValuesCache.get().get()[docOrders.length];
|
||||||
|
} else {
|
||||||
|
doubles = new double[docOrders.length];
|
||||||
|
}
|
||||||
|
for (int i = 0; i < docOrders.length; i++) {
|
||||||
|
doubles[i] = values[docOrders[i]];
|
||||||
|
}
|
||||||
|
return doubles;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public long value(int docId) {
|
@Override public long value(int docId) {
|
||||||
int[] docOrders = order[docId];
|
int[] docOrders = order[docId];
|
||||||
if (docOrders == null) {
|
if (docOrders == null) {
|
||||||
|
|
|
@ -20,13 +20,20 @@
|
||||||
package org.elasticsearch.index.field.longs;
|
package org.elasticsearch.index.field.longs;
|
||||||
|
|
||||||
import org.elasticsearch.index.field.FieldDataOptions;
|
import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
|
import org.elasticsearch.index.field.doubles.DoubleFieldData;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class SingleValueLongFieldData extends LongFieldData {
|
public class SingleValueLongFieldData extends LongFieldData {
|
||||||
|
|
||||||
private static ThreadLocal<long[]> valuesCache = new ThreadLocal<long[]>() {
|
private ThreadLocal<double[]> doublesValuesCache = new ThreadLocal<double[]>() {
|
||||||
|
@Override protected double[] initialValue() {
|
||||||
|
return new double[1];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private ThreadLocal<long[]> valuesCache = new ThreadLocal<long[]>() {
|
||||||
@Override protected long[] initialValue() {
|
@Override protected long[] initialValue() {
|
||||||
return new long[1];
|
return new long[1];
|
||||||
}
|
}
|
||||||
|
@ -64,6 +71,16 @@ public class SingleValueLongFieldData extends LongFieldData {
|
||||||
proc.onValue(docId, values[loc]);
|
proc.onValue(docId, values[loc]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
int loc = order[docId];
|
||||||
|
if (loc == 0) {
|
||||||
|
return DoubleFieldData.EMPTY_DOUBLE_ARRAY;
|
||||||
|
}
|
||||||
|
double[] ret = doublesValuesCache.get();
|
||||||
|
ret[0] = values[loc];
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public long value(int docId) {
|
@Override public long value(int docId) {
|
||||||
return values[order[docId]];
|
return values[order[docId]];
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,16 +20,27 @@
|
||||||
package org.elasticsearch.index.field.shorts;
|
package org.elasticsearch.index.field.shorts;
|
||||||
|
|
||||||
import org.elasticsearch.index.field.FieldDataOptions;
|
import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
|
import org.elasticsearch.index.field.doubles.DoubleFieldData;
|
||||||
import org.elasticsearch.util.ThreadLocals;
|
import org.elasticsearch.util.ThreadLocals;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class MultiValueShortFieldData extends ShortFieldData {
|
public class MultiValueShortFieldData extends ShortFieldData {
|
||||||
|
|
||||||
private static final int VALUE_CACHE_SIZE = 100;
|
private static final int VALUE_CACHE_SIZE = 10;
|
||||||
|
|
||||||
private static ThreadLocal<ThreadLocals.CleanableValue<short[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<short[][]>>() {
|
private ThreadLocal<ThreadLocals.CleanableValue<double[][]>> doublesValuesCache = new ThreadLocal<ThreadLocals.CleanableValue<double[][]>>() {
|
||||||
|
@Override protected ThreadLocals.CleanableValue<double[][]> initialValue() {
|
||||||
|
double[][] value = new double[VALUE_CACHE_SIZE][];
|
||||||
|
for (int i = 0; i < value.length; i++) {
|
||||||
|
value[i] = new double[i];
|
||||||
|
}
|
||||||
|
return new ThreadLocals.CleanableValue<double[][]>(value);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private ThreadLocal<ThreadLocals.CleanableValue<short[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<short[][]>>() {
|
||||||
@Override protected ThreadLocals.CleanableValue<short[][]> initialValue() {
|
@Override protected ThreadLocals.CleanableValue<short[][]> initialValue() {
|
||||||
short[][] value = new short[VALUE_CACHE_SIZE][];
|
short[][] value = new short[VALUE_CACHE_SIZE][];
|
||||||
for (int i = 0; i < value.length; i++) {
|
for (int i = 0; i < value.length; i++) {
|
||||||
|
@ -75,6 +86,23 @@ public class MultiValueShortFieldData extends ShortFieldData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
int[] docOrders = order[docId];
|
||||||
|
if (docOrders == null) {
|
||||||
|
return DoubleFieldData.EMPTY_DOUBLE_ARRAY;
|
||||||
|
}
|
||||||
|
double[] doubles;
|
||||||
|
if (docOrders.length < VALUE_CACHE_SIZE) {
|
||||||
|
doubles = doublesValuesCache.get().get()[docOrders.length];
|
||||||
|
} else {
|
||||||
|
doubles = new double[docOrders.length];
|
||||||
|
}
|
||||||
|
for (int i = 0; i < docOrders.length; i++) {
|
||||||
|
doubles[i] = values[docOrders[i]];
|
||||||
|
}
|
||||||
|
return doubles;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public short value(int docId) {
|
@Override public short value(int docId) {
|
||||||
int[] docOrders = order[docId];
|
int[] docOrders = order[docId];
|
||||||
if (docOrders == null) {
|
if (docOrders == null) {
|
||||||
|
|
|
@ -20,13 +20,20 @@
|
||||||
package org.elasticsearch.index.field.shorts;
|
package org.elasticsearch.index.field.shorts;
|
||||||
|
|
||||||
import org.elasticsearch.index.field.FieldDataOptions;
|
import org.elasticsearch.index.field.FieldDataOptions;
|
||||||
|
import org.elasticsearch.index.field.doubles.DoubleFieldData;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class SingleValueShortFieldData extends ShortFieldData {
|
public class SingleValueShortFieldData extends ShortFieldData {
|
||||||
|
|
||||||
private static ThreadLocal<short[]> valuesCache = new ThreadLocal<short[]>() {
|
private ThreadLocal<double[]> doublesValuesCache = new ThreadLocal<double[]>() {
|
||||||
|
@Override protected double[] initialValue() {
|
||||||
|
return new double[1];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private ThreadLocal<short[]> valuesCache = new ThreadLocal<short[]>() {
|
||||||
@Override protected short[] initialValue() {
|
@Override protected short[] initialValue() {
|
||||||
return new short[1];
|
return new short[1];
|
||||||
}
|
}
|
||||||
|
@ -68,6 +75,16 @@ public class SingleValueShortFieldData extends ShortFieldData {
|
||||||
return values[order[docId]];
|
return values[order[docId]];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override public double[] doubleValues(int docId) {
|
||||||
|
int loc = order[docId];
|
||||||
|
if (loc == 0) {
|
||||||
|
return DoubleFieldData.EMPTY_DOUBLE_ARRAY;
|
||||||
|
}
|
||||||
|
double[] ret = doublesValuesCache.get();
|
||||||
|
ret[0] = values[loc];
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public short[] values(int docId) {
|
@Override public short[] values(int docId) {
|
||||||
int loc = order[docId];
|
int loc = order[docId];
|
||||||
if (loc == 0) {
|
if (loc == 0) {
|
||||||
|
|
|
@ -134,44 +134,78 @@ public class SearchSourceFacetsBuilder implements ToXContent {
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchSourceFacetsBuilder histogramFacet(String name, String fieldName, long interval) {
|
public SearchSourceFacetsBuilder histogramFacet(String name, String fieldName, long interval) {
|
||||||
return histogramFacet(name, fieldName, interval, HistogramFacet.ComparatorType.VALUE, null);
|
return histogramFacet(name, fieldName, interval, HistogramFacet.ComparatorType.KEY, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchSourceFacetsBuilder histogramFacet(String name, String keyFieldName, String valueFieldName, long interval) {
|
||||||
|
return histogramFacet(name, keyFieldName, valueFieldName, interval, HistogramFacet.ComparatorType.KEY, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchSourceFacetsBuilder histogramFacet(String name, String fieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
public SearchSourceFacetsBuilder histogramFacet(String name, String fieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
||||||
return histogramFacet(name, fieldName, interval, HistogramFacet.ComparatorType.VALUE, filter);
|
return histogramFacet(name, fieldName, interval, HistogramFacet.ComparatorType.KEY, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchSourceFacetsBuilder histogramFacet(String name, String keyFieldName, String valueFieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
||||||
|
return histogramFacet(name, keyFieldName, valueFieldName, interval, HistogramFacet.ComparatorType.KEY, filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchSourceFacetsBuilder histogramFacet(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
public SearchSourceFacetsBuilder histogramFacet(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
||||||
return histogramFacet(name, fieldName, interval, comparatorType, null);
|
return histogramFacet(name, fieldName, interval, comparatorType, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchSourceFacetsBuilder histogramFacet(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
public SearchSourceFacetsBuilder histogramFacet(String name, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
||||||
|
return histogramFacet(name, keyFieldName, valueFieldName, interval, comparatorType, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchSourceFacetsBuilder histogramFacet(String name, String keyFieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
||||||
|
@Nullable XContentFilterBuilder filter) {
|
||||||
|
return histogramFacet(name, keyFieldName, null, interval, comparatorType, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchSourceFacetsBuilder histogramFacet(String name, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
||||||
@Nullable XContentFilterBuilder filter) {
|
@Nullable XContentFilterBuilder filter) {
|
||||||
if (histogramFacets == null) {
|
if (histogramFacets == null) {
|
||||||
histogramFacets = newArrayListWithCapacity(2);
|
histogramFacets = newArrayListWithCapacity(2);
|
||||||
}
|
}
|
||||||
histogramFacets.add(new BuilderHistogramFacet(name, fieldName, interval, comparatorType, filter, false));
|
histogramFacets.add(new BuilderHistogramFacet(name, keyFieldName, valueFieldName, interval, comparatorType, filter, false));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String fieldName, long interval) {
|
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String fieldName, long interval) {
|
||||||
return histogramFacet(name, fieldName, interval, HistogramFacet.ComparatorType.VALUE, null);
|
return histogramFacet(name, fieldName, interval, HistogramFacet.ComparatorType.KEY, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String keyFieldName, String valueFieldName, long interval) {
|
||||||
|
return histogramFacet(name, keyFieldName, valueFieldName, interval, HistogramFacet.ComparatorType.KEY, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String fieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String fieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
||||||
return histogramFacet(name, fieldName, interval, HistogramFacet.ComparatorType.VALUE, filter);
|
return histogramFacet(name, fieldName, interval, HistogramFacet.ComparatorType.KEY, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String keyFieldName, String valueFieldName, long interval, @Nullable XContentFilterBuilder filter) {
|
||||||
|
return histogramFacet(name, keyFieldName, valueFieldName, interval, HistogramFacet.ComparatorType.KEY, filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
||||||
return histogramFacetGlobal(name, fieldName, interval, comparatorType, null);
|
return histogramFacetGlobal(name, fieldName, interval, comparatorType, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType) {
|
||||||
|
return histogramFacetGlobal(name, keyFieldName, valueFieldName, interval, comparatorType, null);
|
||||||
|
}
|
||||||
|
|
||||||
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
||||||
@Nullable XContentFilterBuilder filter) {
|
@Nullable XContentFilterBuilder filter) {
|
||||||
|
return histogramFacetGlobal(name, fieldName, null, interval, comparatorType, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchSourceFacetsBuilder histogramFacetGlobal(String name, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
||||||
|
@Nullable XContentFilterBuilder filter) {
|
||||||
if (histogramFacets == null) {
|
if (histogramFacets == null) {
|
||||||
histogramFacets = newArrayListWithCapacity(2);
|
histogramFacets = newArrayListWithCapacity(2);
|
||||||
}
|
}
|
||||||
histogramFacets.add(new BuilderHistogramFacet(name, fieldName, interval, comparatorType, filter, true));
|
histogramFacets.add(new BuilderHistogramFacet(name, keyFieldName, valueFieldName, interval, comparatorType, filter, true));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -247,7 +281,12 @@ public class SearchSourceFacetsBuilder implements ToXContent {
|
||||||
builder.startObject(histogramFacet.name());
|
builder.startObject(histogramFacet.name());
|
||||||
|
|
||||||
builder.startObject(HistogramFacetCollectorParser.NAME);
|
builder.startObject(HistogramFacetCollectorParser.NAME);
|
||||||
builder.field("field", histogramFacet.fieldName());
|
if (histogramFacet.valueFieldName() != null && !histogramFacet.keyFieldName().equals(histogramFacet.valueFieldName())) {
|
||||||
|
builder.field("key_field", histogramFacet.keyFieldName());
|
||||||
|
builder.field("value_field", histogramFacet.valueFieldName());
|
||||||
|
} else {
|
||||||
|
builder.field("field", histogramFacet.keyFieldName());
|
||||||
|
}
|
||||||
builder.field("interval", histogramFacet.interval());
|
builder.field("interval", histogramFacet.interval());
|
||||||
builder.field("comparator", histogramFacet.comparatorType().description());
|
builder.field("comparator", histogramFacet.comparatorType().description());
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
@ -366,20 +405,22 @@ public class SearchSourceFacetsBuilder implements ToXContent {
|
||||||
|
|
||||||
private static class BuilderHistogramFacet {
|
private static class BuilderHistogramFacet {
|
||||||
private final String name;
|
private final String name;
|
||||||
private final String fieldName;
|
private final String keyFieldName;
|
||||||
|
private final String valueFieldName;
|
||||||
private final long interval;
|
private final long interval;
|
||||||
private final HistogramFacet.ComparatorType comparatorType;
|
private final HistogramFacet.ComparatorType comparatorType;
|
||||||
private final XContentFilterBuilder filter;
|
private final XContentFilterBuilder filter;
|
||||||
private final Boolean global;
|
private final Boolean global;
|
||||||
|
|
||||||
private BuilderHistogramFacet(String name, String fieldName, long interval, XContentFilterBuilder filter, Boolean global) {
|
private BuilderHistogramFacet(String name, String keyFieldName, String valueFieldName, long interval, XContentFilterBuilder filter, Boolean global) {
|
||||||
this(name, fieldName, interval, HistogramFacet.ComparatorType.VALUE, filter, global);
|
this(name, keyFieldName, valueFieldName, interval, HistogramFacet.ComparatorType.KEY, filter, global);
|
||||||
}
|
}
|
||||||
|
|
||||||
private BuilderHistogramFacet(String name, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
private BuilderHistogramFacet(String name, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType,
|
||||||
XContentFilterBuilder filter, Boolean global) {
|
XContentFilterBuilder filter, Boolean global) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldName = fieldName;
|
this.keyFieldName = keyFieldName;
|
||||||
|
this.valueFieldName = valueFieldName;
|
||||||
this.interval = interval;
|
this.interval = interval;
|
||||||
this.comparatorType = comparatorType;
|
this.comparatorType = comparatorType;
|
||||||
this.filter = filter;
|
this.filter = filter;
|
||||||
|
@ -390,8 +431,12 @@ public class SearchSourceFacetsBuilder implements ToXContent {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String fieldName() {
|
public String keyFieldName() {
|
||||||
return fieldName;
|
return keyFieldName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String valueFieldName() {
|
||||||
|
return valueFieldName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long interval() {
|
public long interval() {
|
||||||
|
|
|
@ -32,20 +32,24 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
|
public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
|
||||||
|
|
||||||
String fieldName();
|
String keyFieldName();
|
||||||
|
|
||||||
String getFieldName();
|
String getKeyFieldName();
|
||||||
|
|
||||||
|
String valueFieldName();
|
||||||
|
|
||||||
|
String getValueFieldName();
|
||||||
|
|
||||||
List<Entry> entries();
|
List<Entry> entries();
|
||||||
|
|
||||||
List<Entry> getEntries();
|
List<Entry> getEntries();
|
||||||
|
|
||||||
public static enum ComparatorType {
|
public static enum ComparatorType {
|
||||||
VALUE((byte) 0, "value", new Comparator<Entry>() {
|
KEY((byte) 0, "key", new Comparator<Entry>() {
|
||||||
|
|
||||||
@Override public int compare(Entry o1, Entry o2) {
|
@Override public int compare(Entry o1, Entry o2) {
|
||||||
// really, there should not be two entries with the same value
|
// really, there should not be two entries with the same value
|
||||||
int i = (int) (o1.value() - o2.value());
|
int i = (int) (o1.key() - o2.key());
|
||||||
if (i == 0) {
|
if (i == 0) {
|
||||||
i = System.identityHashCode(o1) - System.identityHashCode(o2);
|
i = System.identityHashCode(o1) - System.identityHashCode(o2);
|
||||||
}
|
}
|
||||||
|
@ -105,7 +109,7 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
|
||||||
|
|
||||||
public static ComparatorType fromId(byte id) {
|
public static ComparatorType fromId(byte id) {
|
||||||
if (id == 0) {
|
if (id == 0) {
|
||||||
return VALUE;
|
return KEY;
|
||||||
} else if (id == 1) {
|
} else if (id == 1) {
|
||||||
return COUNT;
|
return COUNT;
|
||||||
} else if (id == 2) {
|
} else if (id == 2) {
|
||||||
|
@ -115,8 +119,8 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ComparatorType fromString(String type) {
|
public static ComparatorType fromString(String type) {
|
||||||
if ("value".equals(type)) {
|
if ("key".equals(type)) {
|
||||||
return VALUE;
|
return KEY;
|
||||||
} else if ("count".equals(type)) {
|
} else if ("count".equals(type)) {
|
||||||
return COUNT;
|
return COUNT;
|
||||||
} else if ("total".equals(type)) {
|
} else if ("total".equals(type)) {
|
||||||
|
@ -128,22 +132,22 @@ public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
|
||||||
|
|
||||||
|
|
||||||
public class Entry {
|
public class Entry {
|
||||||
private final long value;
|
private final long key;
|
||||||
private final long count;
|
private final long count;
|
||||||
private final double total;
|
private final double total;
|
||||||
|
|
||||||
public Entry(long value, long count, double total) {
|
public Entry(long key, long count, double total) {
|
||||||
this.value = value;
|
this.key = key;
|
||||||
this.count = count;
|
this.count = count;
|
||||||
this.total = total;
|
this.total = total;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long value() {
|
public long key() {
|
||||||
return value;
|
return key;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getValue() {
|
public long getKey() {
|
||||||
return value();
|
return key();
|
||||||
}
|
}
|
||||||
|
|
||||||
public long count() {
|
public long count() {
|
||||||
|
|
|
@ -36,6 +36,9 @@ import java.io.IOException;
|
||||||
import static org.elasticsearch.index.field.FieldDataOptions.*;
|
import static org.elasticsearch.index.field.FieldDataOptions.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* A histogram facet collector that uses the same field as the key as well as the
|
||||||
|
* value.
|
||||||
|
*
|
||||||
* @author kimchy (shay.banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class HistogramFacetCollector extends AbstractFacetCollector {
|
public class HistogramFacetCollector extends AbstractFacetCollector {
|
||||||
|
@ -79,7 +82,11 @@ public class HistogramFacetCollector extends AbstractFacetCollector {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public Facet facet() {
|
@Override public Facet facet() {
|
||||||
return new InternalHistogramFacet(facetName, fieldName, interval, comparatorType, histoProc.counts(), histoProc.totals());
|
return new InternalHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts(), histoProc.totals());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long bucket(double value, long interval) {
|
||||||
|
return (((long) (value / interval)) * interval);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class HistogramProc implements NumericFieldData.DoubleValueInDocProc {
|
public static class HistogramProc implements NumericFieldData.DoubleValueInDocProc {
|
||||||
|
@ -95,7 +102,7 @@ public class HistogramFacetCollector extends AbstractFacetCollector {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public void onValue(int docId, double value) {
|
@Override public void onValue(int docId, double value) {
|
||||||
long bucket = (((long) (value / interval)) * interval);
|
long bucket = bucket(value, interval);
|
||||||
counts.adjustOrPutValue(bucket, 1, 1);
|
counts.adjustOrPutValue(bucket, 1, 1);
|
||||||
totals.adjustOrPutValue(bucket, value, value);
|
totals.adjustOrPutValue(bucket, value, value);
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.search.facets.FacetPhaseExecutionException;
|
||||||
import org.elasticsearch.search.facets.collector.FacetCollector;
|
import org.elasticsearch.search.facets.collector.FacetCollector;
|
||||||
import org.elasticsearch.search.facets.collector.FacetCollectorParser;
|
import org.elasticsearch.search.facets.collector.FacetCollectorParser;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
import org.elasticsearch.util.TimeValue;
|
||||||
import org.elasticsearch.util.xcontent.XContentParser;
|
import org.elasticsearch.util.xcontent.XContentParser;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -39,26 +40,36 @@ public class HistogramFacetCollectorParser implements FacetCollectorParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public FacetCollector parser(String facetName, XContentParser parser, SearchContext context) throws IOException {
|
@Override public FacetCollector parser(String facetName, XContentParser parser, SearchContext context) throws IOException {
|
||||||
String field = null;
|
String keyField = null;
|
||||||
|
String valueField = null;
|
||||||
String fieldName = null;
|
|
||||||
long interval = -1;
|
long interval = -1;
|
||||||
HistogramFacet.ComparatorType comparatorType = HistogramFacet.ComparatorType.VALUE;
|
HistogramFacet.ComparatorType comparatorType = HistogramFacet.ComparatorType.KEY;
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
|
String fieldName = null;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
fieldName = parser.currentName();
|
fieldName = parser.currentName();
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("field".equals(fieldName)) {
|
if ("field".equals(fieldName)) {
|
||||||
field = parser.text();
|
keyField = parser.text();
|
||||||
|
} else if ("key_field".equals(fieldName) || "keyField".equals(fieldName)) {
|
||||||
|
keyField = parser.text();
|
||||||
|
} else if ("value_field".equals(fieldName) || "valueField".equals(fieldName)) {
|
||||||
|
valueField = parser.text();
|
||||||
} else if ("interval".equals(fieldName)) {
|
} else if ("interval".equals(fieldName)) {
|
||||||
interval = parser.longValue();
|
interval = parser.longValue();
|
||||||
|
} else if ("time_interval".equals(fieldName)) {
|
||||||
|
interval = TimeValue.parseTimeValue(parser.text(), null).millis();
|
||||||
} else if ("comparator".equals(fieldName)) {
|
} else if ("comparator".equals(fieldName)) {
|
||||||
comparatorType = HistogramFacet.ComparatorType.fromString(parser.text());
|
comparatorType = HistogramFacet.ComparatorType.fromString(parser.text());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (keyField == null) {
|
||||||
|
throw new FacetPhaseExecutionException(facetName, "key field is required to be set for histogram facet, either using [field] or using [key_field]");
|
||||||
|
}
|
||||||
|
|
||||||
if (interval == -1) {
|
if (interval == -1) {
|
||||||
throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for histogram facet");
|
throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for histogram facet");
|
||||||
}
|
}
|
||||||
|
@ -66,6 +77,12 @@ public class HistogramFacetCollectorParser implements FacetCollectorParser {
|
||||||
if (interval < 0) {
|
if (interval < 0) {
|
||||||
throw new FacetPhaseExecutionException(facetName, "[interval] is required to be positive for histogram facet");
|
throw new FacetPhaseExecutionException(facetName, "[interval] is required to be positive for histogram facet");
|
||||||
}
|
}
|
||||||
return new HistogramFacetCollector(facetName, field, interval, comparatorType, context.fieldDataCache(), context.mapperService());
|
|
||||||
|
if (valueField == null || keyField.equals(valueField)) {
|
||||||
|
return new HistogramFacetCollector(facetName, keyField, interval, comparatorType, context.fieldDataCache(), context.mapperService());
|
||||||
|
} else {
|
||||||
|
// we have a value field, and its different than the key
|
||||||
|
return new KeyValueHistogramFacetCollector(facetName, keyField, valueField, interval, comparatorType, context.fieldDataCache(), context.mapperService());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -46,7 +46,8 @@ public class InternalHistogramFacet implements HistogramFacet, InternalFacet {
|
||||||
|
|
||||||
private String name;
|
private String name;
|
||||||
|
|
||||||
private String fieldName;
|
private String keyFieldName;
|
||||||
|
private String valueFieldName;
|
||||||
|
|
||||||
private long interval;
|
private long interval;
|
||||||
|
|
||||||
|
@ -61,9 +62,10 @@ public class InternalHistogramFacet implements HistogramFacet, InternalFacet {
|
||||||
private InternalHistogramFacet() {
|
private InternalHistogramFacet() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public InternalHistogramFacet(String name, String fieldName, long interval, ComparatorType comparatorType, TLongLongHashMap counts, TLongDoubleHashMap totals) {
|
public InternalHistogramFacet(String name, String keyFieldName, String valueFieldName, long interval, ComparatorType comparatorType, TLongLongHashMap counts, TLongDoubleHashMap totals) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldName = fieldName;
|
this.keyFieldName = keyFieldName;
|
||||||
|
this.valueFieldName = valueFieldName;
|
||||||
this.interval = interval;
|
this.interval = interval;
|
||||||
this.comparatorType = comparatorType;
|
this.comparatorType = comparatorType;
|
||||||
this.counts = counts;
|
this.counts = counts;
|
||||||
|
@ -78,12 +80,20 @@ public class InternalHistogramFacet implements HistogramFacet, InternalFacet {
|
||||||
return name();
|
return name();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public String fieldName() {
|
@Override public String keyFieldName() {
|
||||||
return this.fieldName;
|
return this.keyFieldName;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public String getFieldName() {
|
@Override public String getKeyFieldName() {
|
||||||
return fieldName();
|
return keyFieldName();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override public String valueFieldName() {
|
||||||
|
return this.valueFieldName;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override public String getValueFieldName() {
|
||||||
|
return valueFieldName();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public Type type() {
|
@Override public Type type() {
|
||||||
|
@ -161,19 +171,20 @@ public class InternalHistogramFacet implements HistogramFacet, InternalFacet {
|
||||||
totals = EMPTY_LONG_DOUBLE_MAP;
|
totals = EMPTY_LONG_DOUBLE_MAP;
|
||||||
}
|
}
|
||||||
|
|
||||||
return new InternalHistogramFacet(name, fieldName, interval, comparatorType, counts, totals);
|
return new InternalHistogramFacet(name, keyFieldName, valueFieldName, interval, comparatorType, counts, totals);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
|
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(name);
|
builder.startObject(name);
|
||||||
builder.field("_type", "histogram");
|
builder.field("_type", "histogram");
|
||||||
builder.field("_field", fieldName);
|
builder.field("_key_field", keyFieldName);
|
||||||
|
builder.field("_value_field", valueFieldName);
|
||||||
builder.field("_comparator", comparatorType.description());
|
builder.field("_comparator", comparatorType.description());
|
||||||
builder.field("_interval", interval);
|
builder.field("_interval", interval);
|
||||||
builder.startArray("entries");
|
builder.startArray("entries");
|
||||||
for (Entry entry : computeEntries()) {
|
for (Entry entry : computeEntries()) {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("value", entry.value());
|
builder.field("key", entry.key());
|
||||||
builder.field("count", entry.count());
|
builder.field("count", entry.count());
|
||||||
builder.field("total", entry.total());
|
builder.field("total", entry.total());
|
||||||
builder.field("mean", entry.mean());
|
builder.field("mean", entry.mean());
|
||||||
|
@ -191,7 +202,8 @@ public class InternalHistogramFacet implements HistogramFacet, InternalFacet {
|
||||||
|
|
||||||
@Override public void readFrom(StreamInput in) throws IOException {
|
@Override public void readFrom(StreamInput in) throws IOException {
|
||||||
name = in.readUTF();
|
name = in.readUTF();
|
||||||
fieldName = in.readUTF();
|
keyFieldName = in.readUTF();
|
||||||
|
valueFieldName = in.readUTF();
|
||||||
interval = in.readVLong();
|
interval = in.readVLong();
|
||||||
comparatorType = ComparatorType.fromId(in.readByte());
|
comparatorType = ComparatorType.fromId(in.readByte());
|
||||||
|
|
||||||
|
@ -212,7 +224,8 @@ public class InternalHistogramFacet implements HistogramFacet, InternalFacet {
|
||||||
|
|
||||||
@Override public void writeTo(StreamOutput out) throws IOException {
|
@Override public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeUTF(name);
|
out.writeUTF(name);
|
||||||
out.writeUTF(fieldName);
|
out.writeUTF(keyFieldName);
|
||||||
|
out.writeUTF(valueFieldName);
|
||||||
out.writeVLong(interval);
|
out.writeVLong(interval);
|
||||||
out.writeByte(comparatorType.id());
|
out.writeByte(comparatorType.id());
|
||||||
// optimize the write, since we know we have the same buckets as keys
|
// optimize the write, since we know we have the same buckets as keys
|
||||||
|
|
|
@ -0,0 +1,131 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elastic Search and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. Elastic Search licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.facets.histogram;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.elasticsearch.index.cache.field.FieldDataCache;
|
||||||
|
import org.elasticsearch.index.field.FieldData;
|
||||||
|
import org.elasticsearch.index.field.NumericFieldData;
|
||||||
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
|
import org.elasticsearch.search.facets.Facet;
|
||||||
|
import org.elasticsearch.search.facets.FacetPhaseExecutionException;
|
||||||
|
import org.elasticsearch.search.facets.support.AbstractFacetCollector;
|
||||||
|
import org.elasticsearch.util.gnu.trove.TLongDoubleHashMap;
|
||||||
|
import org.elasticsearch.util.gnu.trove.TLongLongHashMap;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.elasticsearch.index.field.FieldDataOptions.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A histogram facet collector that uses different fields for the key and the value.
|
||||||
|
*
|
||||||
|
* @author kimchy (shay.banon)
|
||||||
|
*/
|
||||||
|
public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
|
||||||
|
|
||||||
|
private final String keyFieldName;
|
||||||
|
|
||||||
|
private final String valueFieldName;
|
||||||
|
|
||||||
|
private final long interval;
|
||||||
|
|
||||||
|
private final HistogramFacet.ComparatorType comparatorType;
|
||||||
|
|
||||||
|
private final FieldDataCache fieldDataCache;
|
||||||
|
|
||||||
|
private final FieldData.Type keyFieldDataType;
|
||||||
|
private NumericFieldData keyFieldData;
|
||||||
|
|
||||||
|
private final FieldData.Type valueFieldDataType;
|
||||||
|
private NumericFieldData valueFieldData;
|
||||||
|
|
||||||
|
private final TLongLongHashMap counts = new TLongLongHashMap();
|
||||||
|
private final TLongDoubleHashMap totals = new TLongDoubleHashMap();
|
||||||
|
|
||||||
|
public KeyValueHistogramFacetCollector(String facetName, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType, FieldDataCache fieldDataCache, MapperService mapperService) {
|
||||||
|
super(facetName);
|
||||||
|
this.keyFieldName = keyFieldName;
|
||||||
|
this.valueFieldName = valueFieldName;
|
||||||
|
this.interval = interval;
|
||||||
|
this.comparatorType = comparatorType;
|
||||||
|
this.fieldDataCache = fieldDataCache;
|
||||||
|
|
||||||
|
FieldMapper mapper = mapperService.smartNameFieldMapper(keyFieldName);
|
||||||
|
if (mapper == null) {
|
||||||
|
throw new FacetPhaseExecutionException(facetName, "No mapping found for key_field [" + keyFieldName + "]");
|
||||||
|
}
|
||||||
|
keyFieldDataType = mapper.fieldDataType();
|
||||||
|
|
||||||
|
mapper = mapperService.smartNameFieldMapper(valueFieldName);
|
||||||
|
if (mapper == null) {
|
||||||
|
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + valueFieldName + "]");
|
||||||
|
}
|
||||||
|
valueFieldDataType = mapper.fieldDataType();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override protected void doCollect(int doc) throws IOException {
|
||||||
|
if (keyFieldData.multiValued()) {
|
||||||
|
if (valueFieldData.multiValued()) {
|
||||||
|
// both multi valued, intersect based on the minimum size
|
||||||
|
double[] keys = keyFieldData.doubleValues(doc);
|
||||||
|
double[] values = valueFieldData.doubleValues(doc);
|
||||||
|
int size = Math.min(keys.length, values.length);
|
||||||
|
for (int i = 0; i < size; i++) {
|
||||||
|
long bucket = HistogramFacetCollector.bucket(keys[i], interval);
|
||||||
|
counts.adjustOrPutValue(bucket, 1, 1);
|
||||||
|
totals.adjustOrPutValue(bucket, values[i], values[i]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// key multi valued, value is a single value
|
||||||
|
double value = valueFieldData.doubleValue(doc);
|
||||||
|
for (double key : keyFieldData.doubleValues(doc)) {
|
||||||
|
long bucket = HistogramFacetCollector.bucket(key, interval);
|
||||||
|
counts.adjustOrPutValue(bucket, 1, 1);
|
||||||
|
totals.adjustOrPutValue(bucket, value, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// single key value, compute the bucket once
|
||||||
|
long bucket = HistogramFacetCollector.bucket(keyFieldData.doubleValue(doc), interval);
|
||||||
|
if (valueFieldData.multiValued()) {
|
||||||
|
for (double value : valueFieldData.doubleValues(doc)) {
|
||||||
|
counts.adjustOrPutValue(bucket, 1, 1);
|
||||||
|
totals.adjustOrPutValue(bucket, value, value);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// both key and value are not multi valued
|
||||||
|
double value = valueFieldData.doubleValue(doc);
|
||||||
|
counts.adjustOrPutValue(bucket, 1, 1);
|
||||||
|
totals.adjustOrPutValue(bucket, value, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
|
||||||
|
keyFieldData = (NumericFieldData) fieldDataCache.cache(keyFieldDataType, reader, keyFieldName, fieldDataOptions().withFreqs(false));
|
||||||
|
valueFieldData = (NumericFieldData) fieldDataCache.cache(valueFieldDataType, reader, valueFieldName, fieldDataOptions().withFreqs(false));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override public Facet facet() {
|
||||||
|
return new InternalHistogramFacet(facetName, keyFieldName, valueFieldName, interval, comparatorType, counts, totals);
|
||||||
|
}
|
||||||
|
}
|
|
@ -186,16 +186,17 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.addFacetHistogram("stats1", "num", 100)
|
.addFacetHistogram("stats1", "num", 100)
|
||||||
.addFacetHistogram("stats2", "multi_num", 10)
|
.addFacetHistogram("stats2", "multi_num", 10)
|
||||||
|
.addFacetHistogram("stats3", "num", "multi_num", 100)
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
HistogramFacet facet = searchResponse.facets().facet(HistogramFacet.class, "stats1");
|
HistogramFacet facet = searchResponse.facets().facet(HistogramFacet.class, "stats1");
|
||||||
assertThat(facet.name(), equalTo("stats1"));
|
assertThat(facet.name(), equalTo("stats1"));
|
||||||
assertThat(facet.entries().size(), equalTo(2));
|
assertThat(facet.entries().size(), equalTo(2));
|
||||||
assertThat(facet.entries().get(0).value(), equalTo(1000l));
|
assertThat(facet.entries().get(0).key(), equalTo(1000l));
|
||||||
assertThat(facet.entries().get(0).count(), equalTo(2l));
|
assertThat(facet.entries().get(0).count(), equalTo(2l));
|
||||||
assertThat(facet.entries().get(0).total(), equalTo(2120d));
|
assertThat(facet.entries().get(0).total(), equalTo(2120d));
|
||||||
assertThat(facet.entries().get(0).mean(), equalTo(1060d));
|
assertThat(facet.entries().get(0).mean(), equalTo(1060d));
|
||||||
assertThat(facet.entries().get(1).value(), equalTo(1100l));
|
assertThat(facet.entries().get(1).key(), equalTo(1100l));
|
||||||
assertThat(facet.entries().get(1).count(), equalTo(1l));
|
assertThat(facet.entries().get(1).count(), equalTo(1l));
|
||||||
assertThat(facet.entries().get(1).total(), equalTo(1175d));
|
assertThat(facet.entries().get(1).total(), equalTo(1175d));
|
||||||
assertThat(facet.entries().get(1).mean(), equalTo(1175d));
|
assertThat(facet.entries().get(1).mean(), equalTo(1175d));
|
||||||
|
@ -203,17 +204,29 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
facet = searchResponse.facets().facet(HistogramFacet.class, "stats2");
|
facet = searchResponse.facets().facet(HistogramFacet.class, "stats2");
|
||||||
assertThat(facet.name(), equalTo("stats2"));
|
assertThat(facet.name(), equalTo("stats2"));
|
||||||
assertThat(facet.entries().size(), equalTo(3));
|
assertThat(facet.entries().size(), equalTo(3));
|
||||||
assertThat(facet.entries().get(0).value(), equalTo(10l));
|
assertThat(facet.entries().get(0).key(), equalTo(10l));
|
||||||
assertThat(facet.entries().get(0).count(), equalTo(3l));
|
assertThat(facet.entries().get(0).count(), equalTo(3l));
|
||||||
assertThat(facet.entries().get(0).total(), equalTo(45d));
|
assertThat(facet.entries().get(0).total(), equalTo(45d));
|
||||||
assertThat(facet.entries().get(0).mean(), equalTo(15d));
|
assertThat(facet.entries().get(0).mean(), equalTo(15d));
|
||||||
assertThat(facet.entries().get(1).value(), equalTo(20l));
|
assertThat(facet.entries().get(1).key(), equalTo(20l));
|
||||||
assertThat(facet.entries().get(1).count(), equalTo(2l));
|
assertThat(facet.entries().get(1).count(), equalTo(2l));
|
||||||
assertThat(facet.entries().get(1).total(), equalTo(48d));
|
assertThat(facet.entries().get(1).total(), equalTo(48d));
|
||||||
assertThat(facet.entries().get(1).mean(), equalTo(24d));
|
assertThat(facet.entries().get(1).mean(), equalTo(24d));
|
||||||
assertThat(facet.entries().get(2).value(), equalTo(30l));
|
assertThat(facet.entries().get(2).key(), equalTo(30l));
|
||||||
assertThat(facet.entries().get(2).count(), equalTo(1l));
|
assertThat(facet.entries().get(2).count(), equalTo(1l));
|
||||||
assertThat(facet.entries().get(2).total(), equalTo(31d));
|
assertThat(facet.entries().get(2).total(), equalTo(31d));
|
||||||
assertThat(facet.entries().get(2).mean(), equalTo(31d));
|
assertThat(facet.entries().get(2).mean(), equalTo(31d));
|
||||||
|
|
||||||
|
facet = searchResponse.facets().facet(HistogramFacet.class, "stats3");
|
||||||
|
assertThat(facet.name(), equalTo("stats3"));
|
||||||
|
assertThat(facet.entries().size(), equalTo(2));
|
||||||
|
assertThat(facet.entries().get(0).key(), equalTo(1000l));
|
||||||
|
assertThat(facet.entries().get(0).count(), equalTo(4l));
|
||||||
|
assertThat(facet.entries().get(0).total(), equalTo(82d));
|
||||||
|
assertThat(facet.entries().get(0).mean(), equalTo(20.5d));
|
||||||
|
assertThat(facet.entries().get(1).key(), equalTo(1100l));
|
||||||
|
assertThat(facet.entries().get(1).count(), equalTo(2l));
|
||||||
|
assertThat(facet.entries().get(1).total(), equalTo(42d));
|
||||||
|
assertThat(facet.entries().get(1).mean(), equalTo(21d));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue