mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-03 17:39:15 +00:00
move range facet to use new field data abstraction
This commit is contained in:
parent
692413862a
commit
fa363b2dca
@ -172,6 +172,12 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
|||||||
return Defaults.FIELD_TYPE;
|
return Defaults.FIELD_TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public org.elasticsearch.index.fielddata.FieldDataType fieldDataType2() {
|
||||||
|
// long for now, need to think about scripts and getting a DateTime back?
|
||||||
|
return new org.elasticsearch.index.fielddata.FieldDataType("long");
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected double parseFuzzyFactor(String fuzzyFactor) {
|
protected double parseFuzzyFactor(String fuzzyFactor) {
|
||||||
if (fuzzyFactor == null) {
|
if (fuzzyFactor == null) {
|
||||||
@ -402,11 +408,6 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
|||||||
return FieldDataType.DefaultTypes.LONG;
|
return FieldDataType.DefaultTypes.LONG;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public org.elasticsearch.index.fielddata.FieldDataType fieldDataType2() {
|
|
||||||
throw new ElasticSearchIllegalArgumentException("not implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String contentType() {
|
protected String contentType() {
|
||||||
return CONTENT_TYPE;
|
return CONTENT_TYPE;
|
||||||
|
@ -20,13 +20,10 @@
|
|||||||
package org.elasticsearch.search.facet.range;
|
package org.elasticsearch.search.facet.range;
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||||
import org.elasticsearch.index.field.data.FieldDataType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
|
||||||
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||||
import org.elasticsearch.search.facet.Facet;
|
import org.elasticsearch.search.facet.Facet;
|
||||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -36,53 +33,26 @@ import java.io.IOException;
|
|||||||
*/
|
*/
|
||||||
public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
|
public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
|
||||||
|
|
||||||
private final String keyIndexFieldName;
|
private final IndexNumericFieldData keyIndexFieldData;
|
||||||
|
private final IndexNumericFieldData valueIndexFieldData;
|
||||||
private final String valueIndexFieldName;
|
|
||||||
|
|
||||||
private final FieldDataCache fieldDataCache;
|
|
||||||
|
|
||||||
private final FieldDataType keyFieldDataType;
|
|
||||||
private NumericFieldData keyFieldData;
|
|
||||||
|
|
||||||
private final FieldDataType valueFieldDataType;
|
|
||||||
|
|
||||||
private final RangeFacet.Entry[] entries;
|
private final RangeFacet.Entry[] entries;
|
||||||
|
|
||||||
private final RangeProc rangeProc;
|
private final RangeProc rangeProc;
|
||||||
|
|
||||||
public KeyValueRangeFacetCollector(String facetName, String keyFieldName, String valueFieldName, RangeFacet.Entry[] entries, SearchContext context) {
|
private DoubleValues keyValues;
|
||||||
|
|
||||||
|
public KeyValueRangeFacetCollector(String facetName, IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, RangeFacet.Entry[] entries, SearchContext context) {
|
||||||
super(facetName);
|
super(facetName);
|
||||||
this.entries = entries;
|
this.entries = entries;
|
||||||
this.fieldDataCache = context.fieldDataCache();
|
this.keyIndexFieldData = keyIndexFieldData;
|
||||||
|
this.valueIndexFieldData = valueIndexFieldData;
|
||||||
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(keyFieldName);
|
|
||||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
|
||||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + keyFieldName + "]");
|
|
||||||
}
|
|
||||||
|
|
||||||
// add type filter if there is exact doc mapper associated with it
|
|
||||||
if (smartMappers.explicitTypeInNameWithDocMapper()) {
|
|
||||||
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
|
|
||||||
}
|
|
||||||
|
|
||||||
keyIndexFieldName = smartMappers.mapper().names().indexName();
|
|
||||||
keyFieldDataType = smartMappers.mapper().fieldDataType();
|
|
||||||
|
|
||||||
smartMappers = context.smartFieldMappers(valueFieldName);
|
|
||||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
|
||||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + valueFieldName + "]");
|
|
||||||
}
|
|
||||||
valueIndexFieldName = smartMappers.mapper().names().indexName();
|
|
||||||
valueFieldDataType = smartMappers.mapper().fieldDataType();
|
|
||||||
|
|
||||||
this.rangeProc = new RangeProc(entries);
|
this.rangeProc = new RangeProc(entries);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
||||||
keyFieldData = (NumericFieldData) fieldDataCache.cache(keyFieldDataType, context.reader(), keyIndexFieldName);
|
keyValues = keyIndexFieldData.load(context).getDoubleValues();
|
||||||
rangeProc.valueFieldData = (NumericFieldData) fieldDataCache.cache(valueFieldDataType, context.reader(), valueIndexFieldName);
|
rangeProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -90,7 +60,7 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
|
|||||||
for (RangeFacet.Entry entry : entries) {
|
for (RangeFacet.Entry entry : entries) {
|
||||||
entry.foundInDoc = false;
|
entry.foundInDoc = false;
|
||||||
}
|
}
|
||||||
keyFieldData.forEachValueInDoc(doc, rangeProc);
|
keyValues.forEachValueInDoc(doc, rangeProc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -98,16 +68,20 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
|
|||||||
return new InternalRangeFacet(facetName, entries);
|
return new InternalRangeFacet(facetName, entries);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class RangeProc implements NumericFieldData.DoubleValueInDocProc {
|
public static class RangeProc implements DoubleValues.ValueInDocProc {
|
||||||
|
|
||||||
private final RangeFacet.Entry[] entries;
|
private final RangeFacet.Entry[] entries;
|
||||||
|
|
||||||
NumericFieldData valueFieldData;
|
DoubleValues valueValues;
|
||||||
|
|
||||||
public RangeProc(RangeFacet.Entry[] entries) {
|
public RangeProc(RangeFacet.Entry[] entries) {
|
||||||
this.entries = entries;
|
this.entries = entries;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onMissing(int docId) {
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onValue(int docId, double value) {
|
public void onValue(int docId, double value) {
|
||||||
for (RangeFacet.Entry entry : entries) {
|
for (RangeFacet.Entry entry : entries) {
|
||||||
@ -117,10 +91,9 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
|
|||||||
if (value >= entry.getFrom() && value < entry.getTo()) {
|
if (value >= entry.getFrom() && value < entry.getTo()) {
|
||||||
entry.foundInDoc = true;
|
entry.foundInDoc = true;
|
||||||
entry.count++;
|
entry.count++;
|
||||||
if (valueFieldData.multiValued()) {
|
if (valueValues.isMultiValued()) {
|
||||||
double[] valuesValues = valueFieldData.doubleValues(docId);
|
for (DoubleValues.Iter iter = valueValues.getIter(docId); iter.hasNext(); ) {
|
||||||
entry.totalCount += valuesValues.length;
|
double valueValue = iter.next();
|
||||||
for (double valueValue : valuesValues) {
|
|
||||||
entry.total += valueValue;
|
entry.total += valueValue;
|
||||||
if (valueValue < entry.min) {
|
if (valueValue < entry.min) {
|
||||||
entry.min = valueValue;
|
entry.min = valueValue;
|
||||||
@ -128,9 +101,10 @@ public class KeyValueRangeFacetCollector extends AbstractFacetCollector {
|
|||||||
if (valueValue > entry.max) {
|
if (valueValue > entry.max) {
|
||||||
entry.max = valueValue;
|
entry.max = valueValue;
|
||||||
}
|
}
|
||||||
|
entry.totalCount++;
|
||||||
}
|
}
|
||||||
} else {
|
} else if (valueValues.hasValue(docId)) {
|
||||||
double valueValue = valueFieldData.doubleValue(docId);
|
double valueValue = valueValues.getValue(docId);
|
||||||
entry.totalCount++;
|
entry.totalCount++;
|
||||||
entry.total += valueValue;
|
entry.total += valueValue;
|
||||||
if (valueValue < entry.min) {
|
if (valueValue < entry.min) {
|
||||||
|
@ -20,13 +20,10 @@
|
|||||||
package org.elasticsearch.search.facet.range;
|
package org.elasticsearch.search.facet.range;
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||||
import org.elasticsearch.index.field.data.FieldDataType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
|
||||||
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||||
import org.elasticsearch.search.facet.Facet;
|
import org.elasticsearch.search.facet.Facet;
|
||||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -36,42 +33,22 @@ import java.io.IOException;
|
|||||||
*/
|
*/
|
||||||
public class RangeFacetCollector extends AbstractFacetCollector {
|
public class RangeFacetCollector extends AbstractFacetCollector {
|
||||||
|
|
||||||
private final String indexFieldName;
|
private final IndexNumericFieldData indexFieldData;
|
||||||
|
|
||||||
private final FieldDataCache fieldDataCache;
|
|
||||||
|
|
||||||
private final FieldDataType fieldDataType;
|
|
||||||
|
|
||||||
private NumericFieldData fieldData;
|
|
||||||
|
|
||||||
|
private DoubleValues values;
|
||||||
private final RangeFacet.Entry[] entries;
|
private final RangeFacet.Entry[] entries;
|
||||||
|
|
||||||
private final RangeProc rangeProc;
|
private final RangeProc rangeProc;
|
||||||
|
|
||||||
public RangeFacetCollector(String facetName, String fieldName, RangeFacet.Entry[] entries, SearchContext context) {
|
public RangeFacetCollector(String facetName, IndexNumericFieldData indexFieldData, RangeFacet.Entry[] entries, SearchContext context) {
|
||||||
super(facetName);
|
super(facetName);
|
||||||
this.fieldDataCache = context.fieldDataCache();
|
this.indexFieldData = indexFieldData;
|
||||||
this.entries = entries;
|
this.entries = entries;
|
||||||
|
|
||||||
MapperService.SmartNameFieldMappers smartMappers = context.smartFieldMappers(fieldName);
|
|
||||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
|
||||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
|
|
||||||
}
|
|
||||||
|
|
||||||
// add type filter if there is exact doc mapper associated with it
|
|
||||||
if (smartMappers.explicitTypeInNameWithDocMapper()) {
|
|
||||||
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
|
|
||||||
}
|
|
||||||
|
|
||||||
indexFieldName = smartMappers.mapper().names().indexName();
|
|
||||||
fieldDataType = smartMappers.mapper().fieldDataType();
|
|
||||||
|
|
||||||
rangeProc = new RangeProc(entries);
|
rangeProc = new RangeProc(entries);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
protected void doSetNextReader(AtomicReaderContext context) throws IOException {
|
||||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, context.reader(), indexFieldName);
|
values = indexFieldData.load(context).getDoubleValues();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -79,7 +56,7 @@ public class RangeFacetCollector extends AbstractFacetCollector {
|
|||||||
for (RangeFacet.Entry entry : entries) {
|
for (RangeFacet.Entry entry : entries) {
|
||||||
entry.foundInDoc = false;
|
entry.foundInDoc = false;
|
||||||
}
|
}
|
||||||
fieldData.forEachValueInDoc(doc, rangeProc);
|
values.forEachValueInDoc(doc, rangeProc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -87,7 +64,7 @@ public class RangeFacetCollector extends AbstractFacetCollector {
|
|||||||
return new InternalRangeFacet(facetName, entries);
|
return new InternalRangeFacet(facetName, entries);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class RangeProc implements NumericFieldData.DoubleValueInDocProc {
|
public static class RangeProc implements DoubleValues.ValueInDocProc {
|
||||||
|
|
||||||
private final RangeFacet.Entry[] entries;
|
private final RangeFacet.Entry[] entries;
|
||||||
|
|
||||||
@ -95,6 +72,10 @@ public class RangeFacetCollector extends AbstractFacetCollector {
|
|||||||
this.entries = entries;
|
this.entries = entries;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onMissing(int docId) {
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onValue(int docId, double value) {
|
public void onValue(int docId, double value) {
|
||||||
for (RangeFacet.Entry entry : entries) {
|
for (RangeFacet.Entry entry : entries) {
|
||||||
|
@ -145,10 +145,15 @@ public class RangeFacetProcessor extends AbstractComponent implements FacetProce
|
|||||||
IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyFieldMapper.names(), keyFieldMapper.fieldDataType2());
|
IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyFieldMapper.names(), keyFieldMapper.fieldDataType2());
|
||||||
|
|
||||||
if (valueField == null || keyField.equals(valueField)) {
|
if (valueField == null || keyField.equals(valueField)) {
|
||||||
return new RangeFacetCollector(facetName, keyField, rangeEntries, context);
|
return new RangeFacetCollector(facetName, keyIndexFieldData, rangeEntries, context);
|
||||||
} else {
|
} else {
|
||||||
|
FieldMapper valueFieldMapper = context.smartNameFieldMapper(valueField);
|
||||||
|
if (valueFieldMapper == null) {
|
||||||
|
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + keyField + "]");
|
||||||
|
}
|
||||||
|
IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueFieldMapper.names(), valueFieldMapper.fieldDataType2());
|
||||||
// we have a value field, and its different than the key
|
// we have a value field, and its different than the key
|
||||||
return new KeyValueRangeFacetCollector(facetName, keyField, valueField, rangeEntries, context);
|
return new KeyValueRangeFacetCollector(facetName, keyIndexFieldData, valueIndexFieldData, rangeEntries, context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user