move histogram facet to use new field data
This commit is contained in:
parent
8c7e0f5ca1
commit
699ff2782e
|
@ -87,6 +87,10 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
}
|
||||
}
|
||||
|
||||
public <IFD extends IndexFieldData> IFD getForField(FieldMapper mapper) {
|
||||
return getForField(mapper.names(), mapper.fieldDataType2());
|
||||
}
|
||||
|
||||
public <IFD extends IndexFieldData> IFD getForField(FieldMapper.Names fieldNames, FieldDataType type) {
|
||||
IndexFieldData fieldData = loadedFieldData.get(type.getType());
|
||||
if (fieldData == null) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetCollector;
|
||||
|
@ -115,32 +116,43 @@ public class HistogramFacetProcessor extends AbstractComponent implements FacetP
|
|||
throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for histogram facet");
|
||||
}
|
||||
|
||||
if (sFrom != null && sTo != null && keyField != null) {
|
||||
FieldMapper mapper = context.smartNameFieldMapper(keyField);
|
||||
if (mapper == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for key_field [" + keyField + "]");
|
||||
FieldMapper keyMapper = context.smartNameFieldMapper(keyField);
|
||||
if (keyMapper == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for key_field [" + keyField + "]");
|
||||
}
|
||||
IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyMapper);
|
||||
|
||||
IndexNumericFieldData valueIndexFieldData = null;
|
||||
if (valueField != null) {
|
||||
FieldMapper valueMapper = context.smartNameFieldMapper(valueField);
|
||||
if (valueMapper == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + valueField + "]");
|
||||
}
|
||||
long from = ((Number) mapper.value(sFrom)).longValue();
|
||||
long to = ((Number) mapper.value(sTo)).longValue();
|
||||
valueIndexFieldData = context.fieldData().getForField(valueMapper);
|
||||
}
|
||||
|
||||
if (sFrom != null && sTo != null && keyField != null) {
|
||||
long from = ((Number) keyMapper.value(sFrom)).longValue();
|
||||
long to = ((Number) keyMapper.value(sTo)).longValue();
|
||||
|
||||
if (valueField != null) {
|
||||
return new BoundedValueHistogramFacetCollector(facetName, keyField, valueField, interval, from, to, comparatorType, context);
|
||||
return new BoundedValueHistogramFacetCollector(facetName, keyIndexFieldData, valueIndexFieldData, interval, from, to, comparatorType, context);
|
||||
} else if (valueScript != null) {
|
||||
return new BoundedValueScriptHistogramFacetCollector(facetName, keyField, scriptLang, valueScript, params, interval, from, to, comparatorType, context);
|
||||
return new BoundedValueScriptHistogramFacetCollector(facetName, keyIndexFieldData, scriptLang, valueScript, params, interval, from, to, comparatorType, context);
|
||||
} else {
|
||||
return new BoundedCountHistogramFacetCollector(facetName, keyField, interval, from, to, comparatorType, context);
|
||||
return new BoundedCountHistogramFacetCollector(facetName, keyIndexFieldData, interval, from, to, comparatorType, context);
|
||||
}
|
||||
}
|
||||
|
||||
if (valueScript != null) {
|
||||
return new ValueScriptHistogramFacetCollector(facetName, keyField, scriptLang, valueScript, params, interval, comparatorType, context);
|
||||
return new ValueScriptHistogramFacetCollector(facetName, keyIndexFieldData, scriptLang, valueScript, params, interval, comparatorType, context);
|
||||
} else if (valueField == null) {
|
||||
return new CountHistogramFacetCollector(facetName, keyField, interval, comparatorType, context);
|
||||
return new CountHistogramFacetCollector(facetName, keyIndexFieldData, interval, comparatorType, context);
|
||||
} else if (keyField.equals(valueField)) {
|
||||
return new FullHistogramFacetCollector(facetName, keyField, interval, comparatorType, context);
|
||||
return new FullHistogramFacetCollector(facetName, keyIndexFieldData, interval, comparatorType, context);
|
||||
} else {
|
||||
// we have a value field, and its different than the key
|
||||
return new ValueHistogramFacetCollector(facetName, keyField, valueField, interval, comparatorType, context);
|
||||
return new ValueHistogramFacetCollector(facetName, keyIndexFieldData, valueIndexFieldData, interval, comparatorType, context);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,14 +21,10 @@ package org.elasticsearch.search.facet.histogram.bounded;
|
|||
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.LongValues;
|
||||
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.facet.histogram.HistogramFacet;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -36,38 +32,18 @@ import java.io.IOException;
|
|||
|
||||
public class BoundedCountHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final String indexFieldName;
|
||||
private final IndexNumericFieldData indexFieldData;
|
||||
|
||||
private final HistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
private final FieldDataType fieldDataType;
|
||||
|
||||
private NumericFieldData fieldData;
|
||||
private LongValues values;
|
||||
|
||||
private final HistogramProc histoProc;
|
||||
|
||||
public BoundedCountHistogramFacetCollector(String facetName, String fieldName, long interval, long from, long to, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public BoundedCountHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, long interval, long from, long to, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(fieldName);
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
// add type filter if there is exact doc mapper associated with it
|
||||
if (smartMappers.explicitTypeInNameWithDocMapper()) {
|
||||
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
|
||||
}
|
||||
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
|
||||
indexFieldName = mapper.names().indexName();
|
||||
fieldDataType = mapper.fieldDataType();
|
||||
|
||||
this.indexFieldData = indexFieldData;
|
||||
long normalizedFrom = (((long) ((double) from / interval)) * interval);
|
||||
long normalizedTo = (((long) ((double) to / interval)) * interval);
|
||||
if ((to % interval) != 0) {
|
||||
|
@ -81,12 +57,12 @@ public class BoundedCountHistogramFacetCollector extends AbstractFacetCollector
|
|||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
fieldData.forEachValueInDoc(doc, histoProc);
|
||||
values.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, context.reader(), indexFieldName);
|
||||
values = indexFieldData.load(context).getLongValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -94,7 +70,7 @@ public class BoundedCountHistogramFacetCollector extends AbstractFacetCollector
|
|||
return new InternalBoundedCountHistogramFacet(facetName, comparatorType, histoProc.interval, -histoProc.offset, histoProc.size, histoProc.counts, true);
|
||||
}
|
||||
|
||||
public static class HistogramProc implements NumericFieldData.LongValueInDocProc {
|
||||
public static class HistogramProc implements LongValues.ValueInDocProc {
|
||||
|
||||
final long from;
|
||||
final long to;
|
||||
|
@ -116,6 +92,10 @@ public class BoundedCountHistogramFacetCollector extends AbstractFacetCollector
|
|||
this.counts = CacheRecycler.popIntArray(size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMissing(int docId) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, long value) {
|
||||
if (value <= from || value > to) { // bounds check
|
||||
|
|
|
@ -21,13 +21,11 @@ package org.elasticsearch.search.facet.histogram.bounded;
|
|||
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.LongValues;
|
||||
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.facet.histogram.HistogramFacet;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -38,48 +36,23 @@ import java.io.IOException;
|
|||
*/
|
||||
public class BoundedValueHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final String keyIndexFieldName;
|
||||
|
||||
private final String valueIndexFieldName;
|
||||
private final IndexNumericFieldData keyIndexFieldData;
|
||||
private final IndexNumericFieldData valueIndexFieldData;
|
||||
|
||||
private final long interval;
|
||||
|
||||
private final HistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
private final FieldDataType keyFieldDataType;
|
||||
private NumericFieldData keyFieldData;
|
||||
|
||||
private final FieldDataType valueFieldDataType;
|
||||
private LongValues keyValues;
|
||||
|
||||
private final HistogramProc histoProc;
|
||||
|
||||
public BoundedValueHistogramFacetCollector(String facetName, String keyFieldName, String valueFieldName, long interval, long from, long to, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public BoundedValueHistogramFacetCollector(String facetName, IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, long interval, long from, long to, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.interval = interval;
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(keyFieldName);
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + keyFieldName + "]");
|
||||
}
|
||||
|
||||
// add type filter if there is exact doc mapper associated with it
|
||||
if (smartMappers.explicitTypeInNameWithDocMapper()) {
|
||||
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
|
||||
}
|
||||
|
||||
keyIndexFieldName = smartMappers.mapper().names().indexName();
|
||||
keyFieldDataType = smartMappers.mapper().fieldDataType();
|
||||
|
||||
smartMappers = context.smartFieldMappers(valueFieldName);
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + valueFieldName + "]");
|
||||
}
|
||||
valueIndexFieldName = smartMappers.mapper().names().indexName();
|
||||
valueFieldDataType = smartMappers.mapper().fieldDataType();
|
||||
this.keyIndexFieldData = keyIndexFieldData;
|
||||
this.valueIndexFieldData = valueIndexFieldData;
|
||||
|
||||
long normalizedFrom = (((long) ((double) from / interval)) * interval);
|
||||
long normalizedTo = (((long) ((double) to / interval)) * interval);
|
||||
|
@ -94,13 +67,13 @@ public class BoundedValueHistogramFacetCollector extends AbstractFacetCollector
|
|||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
keyFieldData.forEachValueInDoc(doc, histoProc);
|
||||
keyValues.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
||||
keyFieldData = (NumericFieldData) fieldDataCache.cache(keyFieldDataType, context.reader(), keyIndexFieldName);
|
||||
histoProc.valueFieldData = (NumericFieldData) fieldDataCache.cache(valueFieldDataType, context.reader(), valueIndexFieldName);
|
||||
keyValues = keyIndexFieldData.load(context).getLongValues();
|
||||
histoProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -108,7 +81,7 @@ public class BoundedValueHistogramFacetCollector extends AbstractFacetCollector
|
|||
return new InternalBoundedFullHistogramFacet(facetName, comparatorType, interval, -histoProc.offset, histoProc.size, histoProc.entries, true);
|
||||
}
|
||||
|
||||
public static class HistogramProc implements NumericFieldData.LongValueInDocProc {
|
||||
public static class HistogramProc implements LongValues.ValueInDocProc {
|
||||
|
||||
final long from;
|
||||
final long to;
|
||||
|
@ -121,7 +94,7 @@ public class BoundedValueHistogramFacetCollector extends AbstractFacetCollector
|
|||
|
||||
final Object[] entries;
|
||||
|
||||
NumericFieldData valueFieldData;
|
||||
DoubleValues valueValues;
|
||||
|
||||
final ValueAggregator valueAggregator = new ValueAggregator();
|
||||
|
||||
|
@ -134,6 +107,10 @@ public class BoundedValueHistogramFacetCollector extends AbstractFacetCollector
|
|||
this.entries = CacheRecycler.popObjectArray(size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMissing(int docId) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, long value) {
|
||||
if (value <= from || value > to) { // bounds check
|
||||
|
@ -147,14 +124,18 @@ public class BoundedValueHistogramFacetCollector extends AbstractFacetCollector
|
|||
}
|
||||
entry.count++;
|
||||
valueAggregator.entry = entry;
|
||||
valueFieldData.forEachValueInDoc(docId, valueAggregator);
|
||||
valueValues.forEachValueInDoc(docId, valueAggregator);
|
||||
}
|
||||
|
||||
|
||||
public static class ValueAggregator implements NumericFieldData.DoubleValueInDocProc {
|
||||
public static class ValueAggregator implements DoubleValues.ValueInDocProc {
|
||||
|
||||
InternalBoundedFullHistogramFacet.FullEntry entry;
|
||||
|
||||
@Override
|
||||
public void onMissing(int docId) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, double value) {
|
||||
entry.totalCount++;
|
||||
|
|
|
@ -22,15 +22,11 @@ package org.elasticsearch.search.facet.histogram.bounded;
|
|||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.LongValues;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.facet.histogram.HistogramFacet;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -42,42 +38,23 @@ import java.util.Map;
|
|||
*/
|
||||
public class BoundedValueScriptHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final String indexFieldName;
|
||||
private final IndexNumericFieldData indexFieldData;
|
||||
|
||||
private final HistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
private final FieldDataType fieldDataType;
|
||||
|
||||
private NumericFieldData fieldData;
|
||||
private LongValues keyValues;
|
||||
|
||||
private final SearchScript valueScript;
|
||||
|
||||
private final HistogramProc histoProc;
|
||||
|
||||
public BoundedValueScriptHistogramFacetCollector(String facetName, String fieldName, String scriptLang, String valueScript, Map<String, Object> params, long interval, long from, long to, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public BoundedValueScriptHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, String scriptLang, String valueScript, Map<String, Object> params, long interval, long from, long to, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(fieldName);
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
// add type filter if there is exact doc mapper associated with it
|
||||
if (smartMappers.explicitTypeInNameWithDocMapper()) {
|
||||
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
|
||||
}
|
||||
this.indexFieldData = indexFieldData;
|
||||
|
||||
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
|
||||
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
|
||||
indexFieldName = mapper.names().indexName();
|
||||
fieldDataType = mapper.fieldDataType();
|
||||
|
||||
long normalizedFrom = (((long) ((double) from / interval)) * interval);
|
||||
long normalizedTo = (((long) ((double) to / interval)) * interval);
|
||||
if ((to % interval) != 0) {
|
||||
|
@ -91,7 +68,7 @@ public class BoundedValueScriptHistogramFacetCollector extends AbstractFacetColl
|
|||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
fieldData.forEachValueInDoc(doc, histoProc);
|
||||
keyValues.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -101,7 +78,7 @@ public class BoundedValueScriptHistogramFacetCollector extends AbstractFacetColl
|
|||
|
||||
@Override
|
||||
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, context.reader(), indexFieldName);
|
||||
keyValues = indexFieldData.load(context).getLongValues();
|
||||
valueScript.setNextReader(context);
|
||||
}
|
||||
|
||||
|
@ -114,7 +91,7 @@ public class BoundedValueScriptHistogramFacetCollector extends AbstractFacetColl
|
|||
return (((long) (value / interval)) * interval);
|
||||
}
|
||||
|
||||
public static class HistogramProc implements NumericFieldData.LongValueInDocProc {
|
||||
public static class HistogramProc implements LongValues.ValueInDocProc {
|
||||
|
||||
final long from;
|
||||
final long to;
|
||||
|
@ -139,6 +116,10 @@ public class BoundedValueScriptHistogramFacetCollector extends AbstractFacetColl
|
|||
this.valueScript = valueScript;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMissing(int docId) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, long value) {
|
||||
if (value <= from || value > to) { // bounds check
|
||||
|
|
|
@ -22,14 +22,10 @@ package org.elasticsearch.search.facet.histogram.unbounded;
|
|||
import gnu.trove.map.hash.TLongLongHashMap;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.facet.histogram.HistogramFacet;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -41,49 +37,28 @@ import java.io.IOException;
|
|||
*/
|
||||
public class CountHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final String indexFieldName;
|
||||
private final IndexNumericFieldData indexFieldData;
|
||||
|
||||
private final HistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
private final FieldDataType fieldDataType;
|
||||
|
||||
private NumericFieldData fieldData;
|
||||
|
||||
private DoubleValues values;
|
||||
private final HistogramProc histoProc;
|
||||
|
||||
public CountHistogramFacetCollector(String facetName, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public CountHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(fieldName);
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
// add type filter if there is exact doc mapper associated with it
|
||||
if (smartMappers.explicitTypeInNameWithDocMapper()) {
|
||||
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
|
||||
}
|
||||
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
|
||||
indexFieldName = mapper.names().indexName();
|
||||
fieldDataType = mapper.fieldDataType();
|
||||
|
||||
this.indexFieldData = indexFieldData;
|
||||
histoProc = new HistogramProc(interval);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
fieldData.forEachValueInDoc(doc, histoProc);
|
||||
values.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, context.reader(), indexFieldName);
|
||||
values = indexFieldData.load(context).getDoubleValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -95,7 +70,7 @@ public class CountHistogramFacetCollector extends AbstractFacetCollector {
|
|||
return (((long) (value / interval)) * interval);
|
||||
}
|
||||
|
||||
public static class HistogramProc implements NumericFieldData.DoubleValueInDocProc {
|
||||
public static class HistogramProc implements DoubleValues.ValueInDocProc {
|
||||
|
||||
private final long interval;
|
||||
|
||||
|
@ -105,6 +80,10 @@ public class CountHistogramFacetCollector extends AbstractFacetCollector {
|
|||
this.interval = interval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMissing(int docId) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, double value) {
|
||||
long bucket = bucket(value, interval);
|
||||
|
|
|
@ -22,14 +22,10 @@ package org.elasticsearch.search.facet.histogram.unbounded;
|
|||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.facet.histogram.HistogramFacet;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -41,49 +37,28 @@ import java.io.IOException;
|
|||
*/
|
||||
public class FullHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final String indexFieldName;
|
||||
private final IndexNumericFieldData indexFieldData;
|
||||
|
||||
private final HistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
private final FieldDataType fieldDataType;
|
||||
|
||||
private NumericFieldData fieldData;
|
||||
|
||||
private DoubleValues values;
|
||||
private final HistogramProc histoProc;
|
||||
|
||||
public FullHistogramFacetCollector(String facetName, String fieldName, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public FullHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(fieldName);
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
// add type filter if there is exact doc mapper associated with it
|
||||
if (smartMappers.explicitTypeInNameWithDocMapper()) {
|
||||
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
|
||||
}
|
||||
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
|
||||
indexFieldName = mapper.names().indexName();
|
||||
fieldDataType = mapper.fieldDataType();
|
||||
|
||||
this.indexFieldData = indexFieldData;
|
||||
histoProc = new HistogramProc(interval);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
fieldData.forEachValueInDoc(doc, histoProc);
|
||||
values.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, context.reader(), indexFieldName);
|
||||
values = indexFieldData.load(context).getDoubleValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -95,7 +70,7 @@ public class FullHistogramFacetCollector extends AbstractFacetCollector {
|
|||
return (((long) (value / interval)) * interval);
|
||||
}
|
||||
|
||||
public static class HistogramProc implements NumericFieldData.DoubleValueInDocProc {
|
||||
public static class HistogramProc implements DoubleValues.ValueInDocProc {
|
||||
|
||||
final long interval;
|
||||
|
||||
|
@ -105,6 +80,10 @@ public class FullHistogramFacetCollector extends AbstractFacetCollector {
|
|||
this.interval = interval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMissing(int docId) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, double value) {
|
||||
long bucket = bucket(value, interval);
|
||||
|
|
|
@ -22,13 +22,10 @@ package org.elasticsearch.search.facet.histogram.unbounded;
|
|||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.facet.histogram.HistogramFacet;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -39,61 +36,32 @@ import java.io.IOException;
|
|||
*/
|
||||
public class ValueHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final String keyIndexFieldName;
|
||||
|
||||
private final String valueIndexFieldName;
|
||||
|
||||
private final long interval;
|
||||
private final IndexNumericFieldData keyIndexFieldData;
|
||||
private final IndexNumericFieldData valueIndexFieldData;
|
||||
|
||||
private final HistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
private final FieldDataType keyFieldDataType;
|
||||
private NumericFieldData keyFieldData;
|
||||
|
||||
private final FieldDataType valueFieldDataType;
|
||||
private DoubleValues keyValues;
|
||||
|
||||
private final HistogramProc histoProc;
|
||||
|
||||
public ValueHistogramFacetCollector(String facetName, String keyFieldName, String valueFieldName, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public ValueHistogramFacetCollector(String facetName, IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.interval = interval;
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(keyFieldName);
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + keyFieldName + "]");
|
||||
}
|
||||
|
||||
// add type filter if there is exact doc mapper associated with it
|
||||
if (smartMappers.explicitTypeInNameWithDocMapper()) {
|
||||
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
|
||||
}
|
||||
|
||||
keyIndexFieldName = smartMappers.mapper().names().indexName();
|
||||
keyFieldDataType = smartMappers.mapper().fieldDataType();
|
||||
|
||||
smartMappers = context.smartFieldMappers(valueFieldName);
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + valueFieldName + "]");
|
||||
}
|
||||
valueIndexFieldName = smartMappers.mapper().names().indexName();
|
||||
valueFieldDataType = smartMappers.mapper().fieldDataType();
|
||||
|
||||
this.keyIndexFieldData = keyIndexFieldData;
|
||||
this.valueIndexFieldData = valueIndexFieldData;
|
||||
histoProc = new HistogramProc(interval);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
keyFieldData.forEachValueInDoc(doc, histoProc);
|
||||
keyValues.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
||||
keyFieldData = (NumericFieldData) fieldDataCache.cache(keyFieldDataType, context.reader(), keyIndexFieldName);
|
||||
histoProc.valueFieldData = (NumericFieldData) fieldDataCache.cache(valueFieldDataType, context.reader(), valueIndexFieldName);
|
||||
keyValues = keyIndexFieldData.load(context).getDoubleValues();
|
||||
histoProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -101,13 +69,13 @@ public class ValueHistogramFacetCollector extends AbstractFacetCollector {
|
|||
return new InternalFullHistogramFacet(facetName, comparatorType, histoProc.entries, true);
|
||||
}
|
||||
|
||||
public static class HistogramProc implements NumericFieldData.DoubleValueInDocProc {
|
||||
public static class HistogramProc implements DoubleValues.ValueInDocProc {
|
||||
|
||||
final long interval;
|
||||
|
||||
final ExtTLongObjectHashMap<InternalFullHistogramFacet.FullEntry> entries = CacheRecycler.popLongObjectMap();
|
||||
|
||||
NumericFieldData valueFieldData;
|
||||
DoubleValues valueValues;
|
||||
|
||||
final ValueAggregator valueAggregator = new ValueAggregator();
|
||||
|
||||
|
@ -115,6 +83,10 @@ public class ValueHistogramFacetCollector extends AbstractFacetCollector {
|
|||
this.interval = interval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMissing(int docId) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, double value) {
|
||||
long bucket = FullHistogramFacetCollector.bucket(value, interval);
|
||||
|
@ -125,13 +97,17 @@ public class ValueHistogramFacetCollector extends AbstractFacetCollector {
|
|||
}
|
||||
entry.count++;
|
||||
valueAggregator.entry = entry;
|
||||
valueFieldData.forEachValueInDoc(docId, valueAggregator);
|
||||
valueValues.forEachValueInDoc(docId, valueAggregator);
|
||||
}
|
||||
|
||||
public static class ValueAggregator implements NumericFieldData.DoubleValueInDocProc {
|
||||
public static class ValueAggregator implements DoubleValues.ValueInDocProc {
|
||||
|
||||
InternalFullHistogramFacet.FullEntry entry;
|
||||
|
||||
@Override
|
||||
public void onMissing(int docId) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, double value) {
|
||||
entry.totalCount++;
|
||||
|
|
|
@ -23,15 +23,11 @@ import org.apache.lucene.index.AtomicReaderContext;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.facet.histogram.HistogramFacet;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -44,48 +40,25 @@ import java.util.Map;
|
|||
*/
|
||||
public class ValueScriptHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final String indexFieldName;
|
||||
private final IndexNumericFieldData indexFieldData;
|
||||
|
||||
private final HistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
private final FieldDataType fieldDataType;
|
||||
|
||||
private NumericFieldData fieldData;
|
||||
|
||||
private DoubleValues values;
|
||||
private final SearchScript valueScript;
|
||||
|
||||
private final HistogramProc histoProc;
|
||||
|
||||
public ValueScriptHistogramFacetCollector(String facetName, String fieldName, String scriptLang, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public ValueScriptHistogramFacetCollector(String facetName, IndexNumericFieldData indexFieldData, String scriptLang, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(fieldName);
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
// add type filter if there is exact doc mapper associated with it
|
||||
if (smartMappers.explicitTypeInNameWithDocMapper()) {
|
||||
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
|
||||
}
|
||||
|
||||
this.indexFieldData = indexFieldData;
|
||||
this.valueScript = context.scriptService().search(context.lookup(), scriptLang, valueScript, params);
|
||||
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
|
||||
indexFieldName = mapper.names().indexName();
|
||||
fieldDataType = mapper.fieldDataType();
|
||||
|
||||
histoProc = new HistogramProc(interval, this.valueScript);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
fieldData.forEachValueInDoc(doc, histoProc);
|
||||
values.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -95,7 +68,7 @@ public class ValueScriptHistogramFacetCollector extends AbstractFacetCollector {
|
|||
|
||||
@Override
|
||||
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, context.reader(), indexFieldName);
|
||||
values = indexFieldData.load(context).getDoubleValues();
|
||||
valueScript.setNextReader(context);
|
||||
}
|
||||
|
||||
|
@ -108,7 +81,7 @@ public class ValueScriptHistogramFacetCollector extends AbstractFacetCollector {
|
|||
return (((long) (value / interval)) * interval);
|
||||
}
|
||||
|
||||
public static class HistogramProc implements NumericFieldData.DoubleValueInDocProc {
|
||||
public static class HistogramProc implements DoubleValues.ValueInDocProc {
|
||||
|
||||
private final long interval;
|
||||
|
||||
|
@ -121,6 +94,10 @@ public class ValueScriptHistogramFacetCollector extends AbstractFacetCollector {
|
|||
this.valueScript = valueScript;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMissing(int docId) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, double value) {
|
||||
valueScript.setNextDocId(docId);
|
||||
|
|
|
@ -142,7 +142,7 @@ public class RangeFacetProcessor extends AbstractComponent implements FacetProce
|
|||
}
|
||||
}
|
||||
|
||||
IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyFieldMapper.names(), keyFieldMapper.fieldDataType2());
|
||||
IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyFieldMapper);
|
||||
|
||||
if (valueField == null || keyField.equals(valueField)) {
|
||||
return new RangeFacetCollector(facetName, keyIndexFieldData, rangeEntries, context);
|
||||
|
@ -151,7 +151,7 @@ public class RangeFacetProcessor extends AbstractComponent implements FacetProce
|
|||
if (valueFieldMapper == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + keyField + "]");
|
||||
}
|
||||
IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueFieldMapper.names(), valueFieldMapper.fieldDataType2());
|
||||
IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueFieldMapper);
|
||||
// we have a value field, and its different than the key
|
||||
return new KeyValueRangeFacetCollector(facetName, keyIndexFieldData, valueIndexFieldData, rangeEntries, context);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue