move range facet to use new field data abstraction

This commit is contained in:
Shay Banon 2013-01-18 14:49:47 +01:00
parent 692413862a
commit fa363b2dca
4 changed files with 50 additions and 89 deletions

View File

@ -172,6 +172,12 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
return Defaults.FIELD_TYPE;
}
@Override
public org.elasticsearch.index.fielddata.FieldDataType fieldDataType2() {
// long for now, need to think about scripts and getting a DateTime back?
return new org.elasticsearch.index.fielddata.FieldDataType("long");
}
@Override
protected double parseFuzzyFactor(String fuzzyFactor) {
if (fuzzyFactor == null) {
@ -402,11 +408,6 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
return FieldDataType.DefaultTypes.LONG;
}
@Override
public org.elasticsearch.index.fielddata.FieldDataType fieldDataType2() {
throw new ElasticSearchIllegalArgumentException("not implemented");
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -20,13 +20,10 @@
package org.elasticsearch.search.facet.range;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -36,53 +33,26 @@ import java.io.IOException;
*/
public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
private final String keyIndexFieldName;
private final String valueIndexFieldName;
private final FieldDataCache fieldDataCache;
private final FieldDataType keyFieldDataType;
private NumericFieldData keyFieldData;
private final FieldDataType valueFieldDataType;
private final IndexNumericFieldData keyIndexFieldData;
private final IndexNumericFieldData valueIndexFieldData;
private final RangeFacet.Entry[] entries;
private final RangeProc rangeProc;
public KeyValueRangeFacetCollector(String facetName, String keyFieldName, String valueFieldName, RangeFacet.Entry[] entries, SearchContext context) {
private DoubleValues keyValues;
public KeyValueRangeFacetCollector(String facetName, IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, RangeFacet.Entry[] entries, SearchContext context) {
super(facetName);
this.entries = entries;
this.fieldDataCache = context.fieldDataCache();
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(keyFieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + keyFieldName + "]");
}
// add type filter if there is exact doc mapper associated with it
if (smartMappers.explicitTypeInNameWithDocMapper()) {
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
}
keyIndexFieldName = smartMappers.mapper().names().indexName();
keyFieldDataType = smartMappers.mapper().fieldDataType();
smartMappers = context.smartFieldMappers(valueFieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + valueFieldName + "]");
}
valueIndexFieldName = smartMappers.mapper().names().indexName();
valueFieldDataType = smartMappers.mapper().fieldDataType();
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
this.rangeProc = new RangeProc(entries);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
keyFieldData = (NumericFieldData) fieldDataCache.cache(keyFieldDataType, context.reader(), keyIndexFieldName);
rangeProc.valueFieldData = (NumericFieldData) fieldDataCache.cache(valueFieldDataType, context.reader(), valueIndexFieldName);
keyValues = keyIndexFieldData.load(context).getDoubleValues();
rangeProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
}
@Override
@ -90,7 +60,7 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
for (RangeFacet.Entry entry : entries) {
entry.foundInDoc = false;
}
keyFieldData.forEachValueInDoc(doc, rangeProc);
keyValues.forEachValueInDoc(doc, rangeProc);
}
@Override
@ -98,16 +68,20 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
return new InternalRangeFacet(facetName, entries);
}
public static class RangeProc implements NumericFieldData.DoubleValueInDocProc {
public static class RangeProc implements DoubleValues.ValueInDocProc {
private final RangeFacet.Entry[] entries;
NumericFieldData valueFieldData;
DoubleValues valueValues;
public RangeProc(RangeFacet.Entry[] entries) {
this.entries = entries;
}
@Override
public void onMissing(int docId) {
}
@Override
public void onValue(int docId, double value) {
for (RangeFacet.Entry entry : entries) {
@ -117,10 +91,9 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
if (value >= entry.getFrom() && value < entry.getTo()) {
entry.foundInDoc = true;
entry.count++;
if (valueFieldData.multiValued()) {
double[] valuesValues = valueFieldData.doubleValues(docId);
entry.totalCount += valuesValues.length;
for (double valueValue : valuesValues) {
if (valueValues.isMultiValued()) {
for (DoubleValues.Iter iter = valueValues.getIter(docId); iter.hasNext(); ) {
double valueValue = iter.next();
entry.total += valueValue;
if (valueValue < entry.min) {
entry.min = valueValue;
@ -128,9 +101,10 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
if (valueValue > entry.max) {
entry.max = valueValue;
}
entry.totalCount++;
}
} else {
double valueValue = valueFieldData.doubleValue(docId);
} else if (valueValues.hasValue(docId)) {
double valueValue = valueValues.getValue(docId);
entry.totalCount++;
entry.total += valueValue;
if (valueValue < entry.min) {

View File

@ -20,13 +20,10 @@
package org.elasticsearch.search.facet.range;
import org.apache.lucene.index.AtomicReaderContext;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -36,42 +33,22 @@ import java.io.IOException;
*/
public class RangeFacetCollector extends AbstractFacetCollector {
private final String indexFieldName;
private final FieldDataCache fieldDataCache;
private final FieldDataType fieldDataType;
private NumericFieldData fieldData;
private final IndexNumericFieldData indexFieldData;
private DoubleValues values;
private final RangeFacet.Entry[] entries;
private final RangeProc rangeProc;
public RangeFacetCollector(String facetName, String fieldName, RangeFacet.Entry[] entries, SearchContext context) {
public RangeFacetCollector(String facetName, IndexNumericFieldData indexFieldData, RangeFacet.Entry[] entries, SearchContext context) {
super(facetName);
this.fieldDataCache = context.fieldDataCache();
this.indexFieldData = indexFieldData;
this.entries = entries;
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
}
// add type filter if there is exact doc mapper associated with it
if (smartMappers.explicitTypeInNameWithDocMapper()) {
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
}
indexFieldName = smartMappers.mapper().names().indexName();
fieldDataType = smartMappers.mapper().fieldDataType();
rangeProc = new RangeProc(entries);
}
@Override
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, context.reader(), indexFieldName);
values = indexFieldData.load(context).getDoubleValues();
}
@Override
@ -79,7 +56,7 @@ public class RangeFacetCollector extends AbstractFacetCollector {
for (RangeFacet.Entry entry : entries) {
entry.foundInDoc = false;
}
fieldData.forEachValueInDoc(doc, rangeProc);
values.forEachValueInDoc(doc, rangeProc);
}
@Override
@ -87,7 +64,7 @@ public class RangeFacetCollector extends AbstractFacetCollector {
return new InternalRangeFacet(facetName, entries);
}
public static class RangeProc implements NumericFieldData.DoubleValueInDocProc {
public static class RangeProc implements DoubleValues.ValueInDocProc {
private final RangeFacet.Entry[] entries;
@ -95,6 +72,10 @@ public class RangeFacetCollector extends AbstractFacetCollector {
this.entries = entries;
}
@Override
public void onMissing(int docId) {
}
@Override
public void onValue(int docId, double value) {
for (RangeFacet.Entry entry : entries) {

View File

@ -145,10 +145,15 @@ public class RangeFacetProcessor extends AbstractComponent implements FacetProce
IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyFieldMapper.names(), keyFieldMapper.fieldDataType2());
if (valueField == null || keyField.equals(valueField)) {
return new RangeFacetCollector(facetName, keyField, rangeEntries, context);
return new RangeFacetCollector(facetName, keyIndexFieldData, rangeEntries, context);
} else {
FieldMapper valueFieldMapper = context.smartNameFieldMapper(valueField);
if (valueFieldMapper == null) {
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + keyField + "]");
}
IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueFieldMapper.names(), valueFieldMapper.fieldDataType2());
// we have a value field, and its different than the key
return new KeyValueRangeFacetCollector(facetName, keyField, valueField, rangeEntries, context);
return new KeyValueRangeFacetCollector(facetName, keyIndexFieldData, valueIndexFieldData, rangeEntries, context);
}
}