move geo filters and numeric range to use new field data
This commit is contained in:
parent
be1e5becbb
commit
de013babf8
|
@ -35,6 +35,10 @@ public class FieldDataType {
|
|||
this(type, null, ImmutableMap.<String, String>of());
|
||||
}
|
||||
|
||||
public FieldDataType(String type, String format) {
|
||||
this(type, format, ImmutableMap.<String, String>of());
|
||||
}
|
||||
|
||||
public FieldDataType(String type, @Nullable String format, ImmutableMap<String, String> options) {
|
||||
this.type = type;
|
||||
this.format = format;
|
||||
|
|
|
@ -37,9 +37,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
|
||||
|
@ -229,8 +230,8 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newByteRange(fieldDataCache, names.indexName(),
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newByteRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -40,9 +40,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
|
||||
|
@ -310,9 +311,9 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
long now = context == null ? System.currentTimeMillis() : context.nowInMillis();
|
||||
return NumericRangeFieldDataFilter.newLongRange(fieldDataCache, names.indexName(),
|
||||
return NumericRangeFieldDataFilter.newLongRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
lowerTerm == null ? null : dateMathParser.parse(convertToString(lowerTerm), now),
|
||||
upperTerm == null ? null : (includeUpper && parseUpperInclusive) ? dateMathParser.parseUpperInclusive(convertToString(upperTerm), now) : dateMathParser.parse(convertToString(upperTerm), now),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -37,9 +37,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericDoubleAnalyzer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
|
||||
|
@ -228,8 +229,8 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newDoubleRange(fieldDataCache, names.indexName(),
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newDoubleRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -38,9 +38,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
|
||||
|
@ -222,8 +223,8 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newFloatRange(fieldDataCache, names.indexName(),
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newFloatRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -38,9 +38,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
|
||||
|
@ -228,8 +229,8 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newIntRange(fieldDataCache, names.indexName(),
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newIntRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -38,9 +38,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
|
||||
|
@ -228,8 +229,8 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newLongRange(fieldDataCache, names.indexName(),
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newLongRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -30,9 +30,9 @@ import org.elasticsearch.common.Explicit;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -242,7 +242,7 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
|
|||
/**
|
||||
* A range filter based on the field data cache.
|
||||
*/
|
||||
public abstract Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
|
||||
public abstract Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
|
||||
|
||||
/**
|
||||
* Override the default behavior (to return the string, and return the actual Number instance).
|
||||
|
|
|
@ -38,9 +38,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
|
||||
|
@ -232,8 +233,8 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newShortRange(fieldDataCache, names.indexName(),
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newShortRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -287,6 +287,10 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
|||
return lonMapper;
|
||||
}
|
||||
|
||||
public GeoStringFieldMapper stringMapper() {
|
||||
return this.geoStringMapper;
|
||||
}
|
||||
|
||||
public boolean isEnableLatLon() {
|
||||
return enableLatLon;
|
||||
}
|
||||
|
|
|
@ -35,9 +35,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
|
@ -211,8 +212,8 @@ public class BoostFieldMapper extends NumberFieldMapper<Float> implements Intern
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newFloatRange(fieldDataCache, names.indexName(),
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newFloatRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -129,7 +129,7 @@ public class UidFieldMapper extends AbstractFieldMapper<Uid> implements Internal
|
|||
|
||||
@Override
|
||||
public org.elasticsearch.index.fielddata.FieldDataType fieldDataType2() {
|
||||
return new FieldDataType("string");
|
||||
return new FieldDataType("string", "paged_bytes");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -35,9 +35,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericTokenizer;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
|
@ -249,8 +250,8 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(FieldDataCache fieldDataCache, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newLongRange(fieldDataCache, names.indexName(),
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newLongRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.search.Filter;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
|
@ -174,13 +175,12 @@ public class GeoBoundingBoxFilterParser implements FilterParser {
|
|||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper();
|
||||
|
||||
fieldName = mapper.names().indexName();
|
||||
|
||||
Filter filter;
|
||||
if ("indexed".equals(type)) {
|
||||
filter = IndexedGeoBoundingBoxFilter.create(topLeft, bottomRight, geoMapper);
|
||||
} else if ("memory".equals(type)) {
|
||||
filter = new InMemoryGeoBoundingBoxFilter(topLeft, bottomRight, fieldName, parseContext.indexCache().fieldData());
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
filter = new InMemoryGeoBoundingBoxFilter(topLeft, bottomRight, indexFieldData);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext.index(), "geo bounding box type [" + type + "] not supported, either 'indexed' or 'memory' are allowed");
|
||||
}
|
||||
|
|
|
@ -24,9 +24,9 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.*;
|
||||
|
||||
|
@ -177,13 +177,14 @@ public class GeoDistanceFilterParser implements FilterParser {
|
|||
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
if (mapper.fieldDataType() != GeoPointFieldDataType.TYPE) {
|
||||
if (!(mapper instanceof GeoPointFieldMapper.GeoStringFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper();
|
||||
fieldName = mapper.names().indexName();
|
||||
|
||||
Filter filter = new GeoDistanceFilter(lat, lon, distance, geoDistance, fieldName, geoMapper, parseContext.indexCache().fieldData(), optimizeBbox);
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
Filter filter = new GeoDistanceFilter(lat, lon, distance, geoDistance, indexFieldData, geoMapper, optimizeBbox);
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
}
|
||||
|
|
|
@ -24,9 +24,9 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.*;
|
||||
|
||||
|
@ -233,13 +233,13 @@ public class GeoDistanceRangeFilterParser implements FilterParser {
|
|||
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
if (mapper.fieldDataType() != GeoPointFieldDataType.TYPE) {
|
||||
if (!(mapper instanceof GeoPointFieldMapper.GeoStringFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper();
|
||||
fieldName = mapper.names().indexName();
|
||||
|
||||
Filter filter = new GeoDistanceRangeFilter(lat, lon, from, to, includeLower, includeUpper, geoDistance, fieldName, geoMapper, parseContext.indexCache().fieldData(), optimizeBbox);
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
Filter filter = new GeoDistanceRangeFilter(lat, lon, from, to, includeLower, includeUpper, geoDistance, geoMapper, indexFieldData, optimizeBbox);
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
}
|
||||
|
|
|
@ -24,9 +24,9 @@ import org.apache.lucene.search.Filter;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.GeoHashUtils;
|
||||
import org.elasticsearch.index.search.geo.GeoPolygonFilter;
|
||||
|
@ -172,12 +172,12 @@ public class GeoPolygonFilterParser implements FilterParser {
|
|||
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
if (mapper.fieldDataType() != GeoPointFieldDataType.TYPE) {
|
||||
if (!(mapper instanceof GeoPointFieldMapper.GeoStringFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
fieldName = mapper.names().indexName();
|
||||
|
||||
Filter filter = new GeoPolygonFilter(points.toArray(new Point[points.size()]), fieldName, parseContext.indexCache().fieldData());
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
Filter filter = new GeoPolygonFilter(points.toArray(new Point[points.size()]), indexFieldData);
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
}
|
||||
|
|
|
@ -119,7 +119,7 @@ public class NumericRangeFilterParser implements FilterParser {
|
|||
if (!(mapper instanceof NumberFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "Field [" + fieldName + "] is not a numeric type");
|
||||
}
|
||||
Filter filter = ((NumberFieldMapper) mapper).rangeFilter(parseContext.indexCache().fieldData(), from, to, includeLower, includeUpper, parseContext);
|
||||
Filter filter = ((NumberFieldMapper) mapper).rangeFilter(parseContext.fieldData(), from, to, includeLower, includeUpper, parseContext);
|
||||
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
|
|
|
@ -269,6 +269,10 @@ public class QueryParseContext {
|
|||
return indexQueryParser.mapperService.smartName(name, getTypes());
|
||||
}
|
||||
|
||||
public FieldMapper smartNameFieldMapper(String name) {
|
||||
return indexQueryParser.mapperService.smartNameFieldMapper(name, getTypes());
|
||||
}
|
||||
|
||||
public MapperService.SmartNameObjectMapper smartObjectMapper(String name) {
|
||||
return indexQueryParser.mapperService.smartNameObjectMapper(name, getTypes());
|
||||
}
|
||||
|
|
|
@ -25,14 +25,7 @@ import org.apache.lucene.search.Filter;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.field.data.bytes.ByteFieldData;
|
||||
import org.elasticsearch.index.field.data.doubles.DoubleFieldData;
|
||||
import org.elasticsearch.index.field.data.floats.FloatFieldData;
|
||||
import org.elasticsearch.index.field.data.ints.IntFieldData;
|
||||
import org.elasticsearch.index.field.data.longs.LongFieldData;
|
||||
import org.elasticsearch.index.field.data.shorts.ShortFieldData;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -41,16 +34,14 @@ import java.io.IOException;
|
|||
* expense of loading numeric values of the field to memory using {@link org.elasticsearch.index.cache.field.data.FieldDataCache}.
|
||||
*/
|
||||
public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
||||
// LUCENE 4 UPGRADE: this filter doesn't respect acceptDocs yet!
|
||||
final FieldDataCache fieldDataCache;
|
||||
final String field;
|
||||
final IndexNumericFieldData indexFieldData;
|
||||
final T lowerVal;
|
||||
final T upperVal;
|
||||
final boolean includeLower;
|
||||
final boolean includeUpper;
|
||||
|
||||
public String getField() {
|
||||
return field;
|
||||
return indexFieldData.getFieldNames().indexName();
|
||||
}
|
||||
|
||||
public T getLowerVal() {
|
||||
|
@ -69,9 +60,8 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
return includeUpper;
|
||||
}
|
||||
|
||||
protected NumericRangeFieldDataFilter(FieldDataCache fieldDataCache, String field, T lowerVal, T upperVal, boolean includeLower, boolean includeUpper) {
|
||||
this.fieldDataCache = fieldDataCache;
|
||||
this.field = field;
|
||||
protected NumericRangeFieldDataFilter(IndexNumericFieldData indexFieldData, T lowerVal, T upperVal, boolean includeLower, boolean includeUpper) {
|
||||
this.indexFieldData = indexFieldData;
|
||||
this.lowerVal = lowerVal;
|
||||
this.upperVal = upperVal;
|
||||
this.includeLower = includeLower;
|
||||
|
@ -80,7 +70,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
|
||||
@Override
|
||||
public final String toString() {
|
||||
final StringBuilder sb = new StringBuilder(field).append(":");
|
||||
final StringBuilder sb = new StringBuilder(indexFieldData.getFieldNames().indexName()).append(":");
|
||||
return sb.append(includeLower ? '[' : '{')
|
||||
.append((lowerVal == null) ? "*" : lowerVal.toString())
|
||||
.append(" TO ")
|
||||
|
@ -95,7 +85,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
if (!(o instanceof NumericRangeFieldDataFilter)) return false;
|
||||
NumericRangeFieldDataFilter other = (NumericRangeFieldDataFilter) o;
|
||||
|
||||
if (!this.field.equals(other.field)
|
||||
if (!this.indexFieldData.getFieldNames().indexName().equals(other.indexFieldData.getFieldNames().indexName())
|
||||
|| this.includeLower != other.includeLower
|
||||
|| this.includeUpper != other.includeUpper
|
||||
) {
|
||||
|
@ -108,7 +98,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
int h = field.hashCode();
|
||||
int h = indexFieldData.getFieldNames().indexName().hashCode();
|
||||
h ^= (lowerVal != null) ? lowerVal.hashCode() : 550356204;
|
||||
h = (h << 1) | (h >>> 31); // rotate to distinguish lower from upper
|
||||
h ^= (upperVal != null) ? upperVal.hashCode() : -1674416163;
|
||||
|
@ -116,8 +106,8 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
return h;
|
||||
}
|
||||
|
||||
public static NumericRangeFieldDataFilter<Byte> newByteRange(FieldDataCache fieldDataCache, String field, Byte lowerVal, Byte upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Byte>(fieldDataCache, field, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
public static NumericRangeFieldDataFilter<Byte> newByteRange(IndexNumericFieldData indexFieldData, Byte lowerVal, Byte upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Byte>(indexFieldData, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext ctx, Bits acceptedDocs) throws IOException {
|
||||
final byte inclusiveLowerPoint, inclusiveUpperPoint;
|
||||
|
@ -141,7 +131,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
if (inclusiveLowerPoint > inclusiveUpperPoint)
|
||||
return null;
|
||||
|
||||
final ByteFieldData fieldData = (ByteFieldData) this.fieldDataCache.cache(FieldDataType.DefaultTypes.BYTE, ctx.reader(), field);
|
||||
final ByteValues values = indexFieldData.load(ctx).getByteValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
|
@ -151,21 +141,14 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
if (fieldData.multiValued()) {
|
||||
byte[] values = fieldData.values(doc);
|
||||
for (byte value : values) {
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
ByteValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
byte value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
byte value = fieldData.value(doc);
|
||||
return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -173,8 +156,8 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
}
|
||||
|
||||
|
||||
public static NumericRangeFieldDataFilter<Short> newShortRange(FieldDataCache fieldDataCache, String field, Short lowerVal, Short upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Short>(fieldDataCache, field, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
public static NumericRangeFieldDataFilter<Short> newShortRange(IndexNumericFieldData indexFieldData, Short lowerVal, Short upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Short>(indexFieldData, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext ctx, Bits acceptedDocs) throws IOException {
|
||||
final short inclusiveLowerPoint, inclusiveUpperPoint;
|
||||
|
@ -198,7 +181,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
if (inclusiveLowerPoint > inclusiveUpperPoint)
|
||||
return null;
|
||||
|
||||
final ShortFieldData fieldData = (ShortFieldData) this.fieldDataCache.cache(FieldDataType.DefaultTypes.SHORT, ctx.reader(), field);
|
||||
final ShortValues values = indexFieldData.load(ctx).getShortValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
|
@ -208,29 +191,22 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
if (fieldData.multiValued()) {
|
||||
short[] values = fieldData.values(doc);
|
||||
for (short value : values) {
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
ShortValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
short value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
short value = fieldData.value(doc);
|
||||
return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static NumericRangeFieldDataFilter<Integer> newIntRange(FieldDataCache fieldDataCache, String field, Integer lowerVal, Integer upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Integer>(fieldDataCache, field, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
public static NumericRangeFieldDataFilter<Integer> newIntRange(IndexNumericFieldData indexFieldData, Integer lowerVal, Integer upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Integer>(indexFieldData, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext ctx, Bits acceptedDocs) throws IOException {
|
||||
final int inclusiveLowerPoint, inclusiveUpperPoint;
|
||||
|
@ -254,7 +230,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
if (inclusiveLowerPoint > inclusiveUpperPoint)
|
||||
return null;
|
||||
|
||||
final IntFieldData fieldData = (IntFieldData) this.fieldDataCache.cache(FieldDataType.DefaultTypes.INT, ctx.reader(), field);
|
||||
final IntValues values = indexFieldData.load(ctx).getIntValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
|
@ -264,29 +240,22 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
if (fieldData.multiValued()) {
|
||||
int[] values = fieldData.values(doc);
|
||||
for (int value : values) {
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
IntValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
int value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
int value = fieldData.value(doc);
|
||||
return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static NumericRangeFieldDataFilter<Long> newLongRange(FieldDataCache fieldDataCache, String field, Long lowerVal, Long upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Long>(fieldDataCache, field, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
public static NumericRangeFieldDataFilter<Long> newLongRange(IndexNumericFieldData indexFieldData, Long lowerVal, Long upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Long>(indexFieldData, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext ctx, Bits acceptedDocs) throws IOException {
|
||||
final long inclusiveLowerPoint, inclusiveUpperPoint;
|
||||
|
@ -310,7 +279,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
if (inclusiveLowerPoint > inclusiveUpperPoint)
|
||||
return null;
|
||||
|
||||
final LongFieldData fieldData = (LongFieldData) this.fieldDataCache.cache(FieldDataType.DefaultTypes.LONG, ctx.reader(), field);
|
||||
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
|
@ -320,29 +289,22 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
if (fieldData.multiValued()) {
|
||||
long[] values = fieldData.values(doc);
|
||||
for (long value : values) {
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
LongValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
long value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
long value = fieldData.value(doc);
|
||||
return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static NumericRangeFieldDataFilter<Float> newFloatRange(FieldDataCache fieldDataCache, String field, Float lowerVal, Float upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Float>(fieldDataCache, field, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
public static NumericRangeFieldDataFilter<Float> newFloatRange(IndexNumericFieldData indexFieldData, Float lowerVal, Float upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Float>(indexFieldData, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext ctx, Bits acceptedDocs) throws IOException {
|
||||
// we transform the floating point numbers to sortable integers
|
||||
|
@ -370,7 +332,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
if (inclusiveLowerPoint > inclusiveUpperPoint)
|
||||
return null;
|
||||
|
||||
final FloatFieldData fieldData = (FloatFieldData) this.fieldDataCache.cache(FieldDataType.DefaultTypes.FLOAT, ctx.reader(), field);
|
||||
final FloatValues values = indexFieldData.load(ctx).getFloatValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
|
@ -380,29 +342,22 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
if (fieldData.multiValued()) {
|
||||
float[] values = fieldData.values(doc);
|
||||
for (float value : values) {
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
FloatValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
float value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
float value = fieldData.value(doc);
|
||||
return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static NumericRangeFieldDataFilter<Double> newDoubleRange(FieldDataCache fieldDataCache, String field, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Double>(fieldDataCache, field, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
public static NumericRangeFieldDataFilter<Double> newDoubleRange(IndexNumericFieldData indexFieldData, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper) {
|
||||
return new NumericRangeFieldDataFilter<Double>(indexFieldData, lowerVal, upperVal, includeLower, includeUpper) {
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext ctx, Bits acceptedDocs) throws IOException {
|
||||
// we transform the floating point numbers to sortable integers
|
||||
|
@ -430,7 +385,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
if (inclusiveLowerPoint > inclusiveUpperPoint)
|
||||
return null;
|
||||
|
||||
final DoubleFieldData fieldData = (DoubleFieldData) this.fieldDataCache.cache(FieldDataType.DefaultTypes.DOUBLE, ctx.reader(), field);
|
||||
final DoubleValues values = indexFieldData.load(ctx).getDoubleValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
|
@ -440,21 +395,14 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
if (fieldData.multiValued()) {
|
||||
double[] values = fieldData.values(doc);
|
||||
for (double value : values) {
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
DoubleValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
double value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
double value = fieldData.value(doc);
|
||||
return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -29,9 +29,9 @@ import org.elasticsearch.common.lucene.docset.AndDocIdSet;
|
|||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPoint;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -48,22 +48,19 @@ public class GeoDistanceFilter extends Filter {
|
|||
|
||||
private final GeoDistance geoDistance;
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
private final IndexGeoPointFieldData indexFieldData;
|
||||
|
||||
private final GeoDistance.FixedSourceDistance fixedSourceDistance;
|
||||
private GeoDistance.DistanceBoundingCheck distanceBoundingCheck;
|
||||
private final Filter boundingBoxFilter;
|
||||
|
||||
public GeoDistanceFilter(double lat, double lon, double distance, GeoDistance geoDistance, String fieldName, GeoPointFieldMapper mapper, FieldDataCache fieldDataCache,
|
||||
public GeoDistanceFilter(double lat, double lon, double distance, GeoDistance geoDistance, IndexGeoPointFieldData indexFieldData, GeoPointFieldMapper mapper,
|
||||
String optimizeBbox) {
|
||||
this.lat = lat;
|
||||
this.lon = lon;
|
||||
this.distance = distance;
|
||||
this.geoDistance = geoDistance;
|
||||
this.fieldName = fieldName;
|
||||
this.fieldDataCache = fieldDataCache;
|
||||
this.indexFieldData = indexFieldData;
|
||||
|
||||
this.fixedSourceDistance = geoDistance.fixedSourceDistance(lat, lon, DistanceUnit.MILES);
|
||||
if (optimizeBbox != null && !"none".equals(optimizeBbox)) {
|
||||
|
@ -99,7 +96,7 @@ public class GeoDistanceFilter extends Filter {
|
|||
}
|
||||
|
||||
public String fieldName() {
|
||||
return fieldName;
|
||||
return indexFieldData.getFieldNames().indexName();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -111,8 +108,8 @@ public class GeoDistanceFilter extends Filter {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
final GeoPointFieldData fieldData = (GeoPointFieldData) fieldDataCache.cache(GeoPointFieldDataType.TYPE, context.reader(), fieldName);
|
||||
GeoDistanceDocSet distDocSet = new GeoDistanceDocSet(context.reader().maxDoc(), acceptedDocs, fieldData, fixedSourceDistance, distanceBoundingCheck, distance);
|
||||
final GeoPointValues values = indexFieldData.load(context).getGeoPointValues();
|
||||
GeoDistanceDocSet distDocSet = new GeoDistanceDocSet(context.reader().maxDoc(), acceptedDocs, values, fixedSourceDistance, distanceBoundingCheck, distance);
|
||||
if (boundingBoxDocSet == null) {
|
||||
return distDocSet;
|
||||
} else {
|
||||
|
@ -130,7 +127,8 @@ public class GeoDistanceFilter extends Filter {
|
|||
if (Double.compare(filter.distance, distance) != 0) return false;
|
||||
if (Double.compare(filter.lat, lat) != 0) return false;
|
||||
if (Double.compare(filter.lon, lon) != 0) return false;
|
||||
if (fieldName != null ? !fieldName.equals(filter.fieldName) : filter.fieldName != null) return false;
|
||||
if (!indexFieldData.getFieldNames().indexName().equals(filter.indexFieldData.getFieldNames().indexName()))
|
||||
return false;
|
||||
if (geoDistance != filter.geoDistance) return false;
|
||||
|
||||
return true;
|
||||
|
@ -138,7 +136,7 @@ public class GeoDistanceFilter extends Filter {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "GeoDistanceFilter(" + fieldName + ", " + geoDistance + ", " + distance + ", " + lat + ", " + lon + ")";
|
||||
return "GeoDistanceFilter(" + indexFieldData.getFieldNames().indexName() + ", " + geoDistance + ", " + distance + ", " + lat + ", " + lon + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -152,20 +150,20 @@ public class GeoDistanceFilter extends Filter {
|
|||
temp = distance != +0.0d ? Double.doubleToLongBits(distance) : 0L;
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
result = 31 * result + (geoDistance != null ? geoDistance.hashCode() : 0);
|
||||
result = 31 * result + (fieldName != null ? fieldName.hashCode() : 0);
|
||||
result = 31 * result + indexFieldData.getFieldNames().indexName().hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
public static class GeoDistanceDocSet extends MatchDocIdSet {
|
||||
private final double distance; // in miles
|
||||
private final GeoPointFieldData fieldData;
|
||||
private final GeoPointValues values;
|
||||
private final GeoDistance.FixedSourceDistance fixedSourceDistance;
|
||||
private final GeoDistance.DistanceBoundingCheck distanceBoundingCheck;
|
||||
|
||||
public GeoDistanceDocSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointFieldData fieldData, GeoDistance.FixedSourceDistance fixedSourceDistance, GeoDistance.DistanceBoundingCheck distanceBoundingCheck,
|
||||
public GeoDistanceDocSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointValues values, GeoDistance.FixedSourceDistance fixedSourceDistance, GeoDistance.DistanceBoundingCheck distanceBoundingCheck,
|
||||
double distance) {
|
||||
super(maxDoc, acceptDocs);
|
||||
this.fieldData = fieldData;
|
||||
this.values = values;
|
||||
this.fixedSourceDistance = fixedSourceDistance;
|
||||
this.distanceBoundingCheck = distanceBoundingCheck;
|
||||
this.distance = distance;
|
||||
|
@ -178,18 +176,16 @@ public class GeoDistanceFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fieldData.multiValued()) {
|
||||
double[] lats = fieldData.latValues(doc);
|
||||
double[] lons = fieldData.lonValues(doc);
|
||||
for (int i = 0; i < lats.length; i++) {
|
||||
double lat = lats[i];
|
||||
double lon = lons[i];
|
||||
if (distanceBoundingCheck.isWithin(lat, lon)) {
|
||||
double d = fixedSourceDistance.calculate(lat, lon);
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
if (d < distance) {
|
||||
return true;
|
||||
}
|
||||
|
@ -197,10 +193,9 @@ public class GeoDistanceFilter extends Filter {
|
|||
}
|
||||
return false;
|
||||
} else {
|
||||
double lat = fieldData.latValue(doc);
|
||||
double lon = fieldData.lonValue(doc);
|
||||
if (distanceBoundingCheck.isWithin(lat, lon)) {
|
||||
double d = fixedSourceDistance.calculate(lat, lon);
|
||||
GeoPoint point = values.getValue(doc);
|
||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
return d < distance;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,9 +30,9 @@ import org.elasticsearch.common.lucene.docset.AndDocIdSet;
|
|||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPoint;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -53,17 +53,14 @@ public class GeoDistanceRangeFilter extends Filter {
|
|||
private GeoDistance.DistanceBoundingCheck distanceBoundingCheck;
|
||||
private final Filter boundingBoxFilter;
|
||||
|
||||
private final String fieldName;
|
||||
private final IndexGeoPointFieldData indexFieldData;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
public GeoDistanceRangeFilter(double lat, double lon, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper, GeoDistance geoDistance, String fieldName, GeoPointFieldMapper mapper, FieldDataCache fieldDataCache,
|
||||
public GeoDistanceRangeFilter(double lat, double lon, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper, GeoDistance geoDistance, GeoPointFieldMapper mapper, IndexGeoPointFieldData indexFieldData,
|
||||
String optimizeBbox) {
|
||||
this.lat = lat;
|
||||
this.lon = lon;
|
||||
this.geoDistance = geoDistance;
|
||||
this.fieldName = fieldName;
|
||||
this.fieldDataCache = fieldDataCache;
|
||||
this.indexFieldData = indexFieldData;
|
||||
|
||||
this.fixedSourceDistance = geoDistance.fixedSourceDistance(lat, lon, DistanceUnit.MILES);
|
||||
|
||||
|
@ -114,10 +111,6 @@ public class GeoDistanceRangeFilter extends Filter {
|
|||
return geoDistance;
|
||||
}
|
||||
|
||||
public String fieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptedDocs) throws IOException {
|
||||
DocIdSet boundingBoxDocSet = null;
|
||||
|
@ -127,8 +120,8 @@ public class GeoDistanceRangeFilter extends Filter {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
final GeoPointFieldData fieldData = (GeoPointFieldData) fieldDataCache.cache(GeoPointFieldDataType.TYPE, context.reader(), fieldName);
|
||||
GeoDistanceRangeDocSet distDocSet = new GeoDistanceRangeDocSet(context.reader().maxDoc(), acceptedDocs, fieldData, fixedSourceDistance, distanceBoundingCheck, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||
GeoPointValues values = indexFieldData.load(context).getGeoPointValues();
|
||||
GeoDistanceRangeDocSet distDocSet = new GeoDistanceRangeDocSet(context.reader().maxDoc(), acceptedDocs, values, fixedSourceDistance, distanceBoundingCheck, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||
if (boundingBoxDocSet == null) {
|
||||
return distDocSet;
|
||||
} else {
|
||||
|
@ -147,7 +140,8 @@ public class GeoDistanceRangeFilter extends Filter {
|
|||
if (Double.compare(filter.inclusiveUpperPoint, inclusiveUpperPoint) != 0) return false;
|
||||
if (Double.compare(filter.lat, lat) != 0) return false;
|
||||
if (Double.compare(filter.lon, lon) != 0) return false;
|
||||
if (fieldName != null ? !fieldName.equals(filter.fieldName) : filter.fieldName != null) return false;
|
||||
if (!indexFieldData.getFieldNames().indexName().equals(filter.indexFieldData.getFieldNames().indexName()))
|
||||
return false;
|
||||
if (geoDistance != filter.geoDistance) return false;
|
||||
|
||||
return true;
|
||||
|
@ -155,7 +149,7 @@ public class GeoDistanceRangeFilter extends Filter {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "GeoDistanceRangeFilter(" + fieldName + ", " + geoDistance + ", [" + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")";
|
||||
return "GeoDistanceRangeFilter(" + indexFieldData.getFieldNames().indexName() + ", " + geoDistance + ", [" + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -171,22 +165,22 @@ public class GeoDistanceRangeFilter extends Filter {
|
|||
temp = inclusiveUpperPoint != +0.0d ? Double.doubleToLongBits(inclusiveUpperPoint) : 0L;
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
result = 31 * result + (geoDistance != null ? geoDistance.hashCode() : 0);
|
||||
result = 31 * result + (fieldName != null ? fieldName.hashCode() : 0);
|
||||
result = 31 * result + indexFieldData.getFieldNames().indexName().hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
public static class GeoDistanceRangeDocSet extends MatchDocIdSet {
|
||||
|
||||
private final GeoPointFieldData fieldData;
|
||||
private final GeoPointValues values;
|
||||
private final GeoDistance.FixedSourceDistance fixedSourceDistance;
|
||||
private final GeoDistance.DistanceBoundingCheck distanceBoundingCheck;
|
||||
private final double inclusiveLowerPoint; // in miles
|
||||
private final double inclusiveUpperPoint; // in miles
|
||||
|
||||
public GeoDistanceRangeDocSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointFieldData fieldData, GeoDistance.FixedSourceDistance fixedSourceDistance, GeoDistance.DistanceBoundingCheck distanceBoundingCheck,
|
||||
public GeoDistanceRangeDocSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointValues values, GeoDistance.FixedSourceDistance fixedSourceDistance, GeoDistance.DistanceBoundingCheck distanceBoundingCheck,
|
||||
double inclusiveLowerPoint, double inclusiveUpperPoint) {
|
||||
super(maxDoc, acceptDocs);
|
||||
this.fieldData = fieldData;
|
||||
this.values = values;
|
||||
this.fixedSourceDistance = fixedSourceDistance;
|
||||
this.distanceBoundingCheck = distanceBoundingCheck;
|
||||
this.inclusiveLowerPoint = inclusiveLowerPoint;
|
||||
|
@ -200,18 +194,16 @@ public class GeoDistanceRangeFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fieldData.multiValued()) {
|
||||
double[] lats = fieldData.latValues(doc);
|
||||
double[] lons = fieldData.lonValues(doc);
|
||||
for (int i = 0; i < lats.length; i++) {
|
||||
double lat = lats[i];
|
||||
double lon = lons[i];
|
||||
if (distanceBoundingCheck.isWithin(lat, lon)) {
|
||||
double d = fixedSourceDistance.calculate(lat, lon);
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
if (d >= inclusiveLowerPoint && d <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
|
@ -219,10 +211,9 @@ public class GeoDistanceRangeFilter extends Filter {
|
|||
}
|
||||
return false;
|
||||
} else {
|
||||
double lat = fieldData.latValue(doc);
|
||||
double lon = fieldData.lonValue(doc);
|
||||
if (distanceBoundingCheck.isWithin(lat, lon)) {
|
||||
double d = fixedSourceDistance.calculate(lat, lon);
|
||||
GeoPoint point = values.getValue(doc);
|
||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
if (d >= inclusiveLowerPoint && d <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -25,9 +25,9 @@ import org.apache.lucene.search.Filter;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPoint;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -39,14 +39,11 @@ public class GeoPolygonFilter extends Filter {
|
|||
|
||||
private final Point[] points;
|
||||
|
||||
private final String fieldName;
|
||||
private final IndexGeoPointFieldData indexFieldData;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
public GeoPolygonFilter(Point[] points, String fieldName, FieldDataCache fieldDataCache) {
|
||||
public GeoPolygonFilter(Point[] points, IndexGeoPointFieldData indexFieldData) {
|
||||
this.points = points;
|
||||
this.fieldName = fieldName;
|
||||
this.fieldDataCache = fieldDataCache;
|
||||
this.indexFieldData = indexFieldData;
|
||||
}
|
||||
|
||||
public Point[] points() {
|
||||
|
@ -54,27 +51,27 @@ public class GeoPolygonFilter extends Filter {
|
|||
}
|
||||
|
||||
public String fieldName() {
|
||||
return this.fieldName;
|
||||
return indexFieldData.getFieldNames().indexName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptedDocs) throws IOException {
|
||||
final GeoPointFieldData fieldData = (GeoPointFieldData) fieldDataCache.cache(GeoPointFieldDataType.TYPE, context.reader(), fieldName);
|
||||
return new GeoPolygonDocIdSet(context.reader().maxDoc(), acceptedDocs, fieldData, points);
|
||||
final GeoPointValues values = indexFieldData.load(context).getGeoPointValues();
|
||||
return new GeoPolygonDocIdSet(context.reader().maxDoc(), acceptedDocs, values, points);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "GeoPolygonFilter(" + fieldName + ", " + Arrays.toString(points) + ")";
|
||||
return "GeoPolygonFilter(" + indexFieldData.getFieldNames().indexName() + ", " + Arrays.toString(points) + ")";
|
||||
}
|
||||
|
||||
public static class GeoPolygonDocIdSet extends MatchDocIdSet {
|
||||
private final GeoPointFieldData fieldData;
|
||||
private final GeoPointValues values;
|
||||
private final Point[] points;
|
||||
|
||||
public GeoPolygonDocIdSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointFieldData fieldData, Point[] points) {
|
||||
public GeoPolygonDocIdSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointValues values, Point[] points) {
|
||||
super(maxDoc, acceptDocs);
|
||||
this.fieldData = fieldData;
|
||||
this.values = values;
|
||||
this.points = points;
|
||||
}
|
||||
|
||||
|
@ -85,22 +82,21 @@ public class GeoPolygonFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fieldData.multiValued()) {
|
||||
double[] lats = fieldData.latValues(doc);
|
||||
double[] lons = fieldData.lonValues(doc);
|
||||
for (int i = 0; i < lats.length; i++) {
|
||||
if (pointInPolygon(points, lats[i], lons[i])) {
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (pointInPolygon(points, point.lat(), point.lon())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
double lat = fieldData.latValue(doc);
|
||||
double lon = fieldData.lonValue(doc);
|
||||
return pointInPolygon(points, lat, lon);
|
||||
GeoPoint point = values.getValue(doc);
|
||||
return pointInPolygon(points, point.lat(), point.lon());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -25,9 +25,9 @@ import org.apache.lucene.search.Filter;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPoint;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -37,18 +37,14 @@ import java.io.IOException;
|
|||
public class InMemoryGeoBoundingBoxFilter extends Filter {
|
||||
|
||||
private final Point topLeft;
|
||||
|
||||
private final Point bottomRight;
|
||||
|
||||
private final String fieldName;
|
||||
private final IndexGeoPointFieldData indexFieldData;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
public InMemoryGeoBoundingBoxFilter(Point topLeft, Point bottomRight, String fieldName, FieldDataCache fieldDataCache) {
|
||||
public InMemoryGeoBoundingBoxFilter(Point topLeft, Point bottomRight, IndexGeoPointFieldData indexFieldData) {
|
||||
this.topLeft = topLeft;
|
||||
this.bottomRight = bottomRight;
|
||||
this.fieldName = fieldName;
|
||||
this.fieldDataCache = fieldDataCache;
|
||||
this.indexFieldData = indexFieldData;
|
||||
}
|
||||
|
||||
public Point topLeft() {
|
||||
|
@ -60,34 +56,34 @@ public class InMemoryGeoBoundingBoxFilter extends Filter {
|
|||
}
|
||||
|
||||
public String fieldName() {
|
||||
return fieldName;
|
||||
return indexFieldData.getFieldNames().indexName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptedDocs) throws IOException {
|
||||
final GeoPointFieldData fieldData = (GeoPointFieldData) fieldDataCache.cache(GeoPointFieldDataType.TYPE, context.reader(), fieldName);
|
||||
final GeoPointValues values = indexFieldData.load(context).getGeoPointValues();
|
||||
|
||||
//checks to see if bounding box crosses 180 degrees
|
||||
if (topLeft.lon > bottomRight.lon) {
|
||||
return new Meridian180GeoBoundingBoxDocSet(context.reader().maxDoc(), acceptedDocs, fieldData, topLeft, bottomRight);
|
||||
return new Meridian180GeoBoundingBoxDocSet(context.reader().maxDoc(), acceptedDocs, values, topLeft, bottomRight);
|
||||
} else {
|
||||
return new GeoBoundingBoxDocSet(context.reader().maxDoc(), acceptedDocs, fieldData, topLeft, bottomRight);
|
||||
return new GeoBoundingBoxDocSet(context.reader().maxDoc(), acceptedDocs, values, topLeft, bottomRight);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "GeoBoundingBoxFilter(" + fieldName + ", " + topLeft + ", " + bottomRight + ")";
|
||||
return "GeoBoundingBoxFilter(" + indexFieldData.getFieldNames().indexName() + ", " + topLeft + ", " + bottomRight + ")";
|
||||
}
|
||||
|
||||
public static class Meridian180GeoBoundingBoxDocSet extends MatchDocIdSet {
|
||||
private final GeoPointFieldData fieldData;
|
||||
private final GeoPointValues values;
|
||||
private final Point topLeft;
|
||||
private final Point bottomRight;
|
||||
|
||||
public Meridian180GeoBoundingBoxDocSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointFieldData fieldData, Point topLeft, Point bottomRight) {
|
||||
public Meridian180GeoBoundingBoxDocSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointValues values, Point topLeft, Point bottomRight) {
|
||||
super(maxDoc, acceptDocs);
|
||||
this.fieldData = fieldData;
|
||||
this.values = values;
|
||||
this.topLeft = topLeft;
|
||||
this.bottomRight = bottomRight;
|
||||
}
|
||||
|
@ -99,27 +95,24 @@ public class InMemoryGeoBoundingBoxFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fieldData.multiValued()) {
|
||||
double[] lats = fieldData.latValues(doc);
|
||||
double[] lons = fieldData.lonValues(doc);
|
||||
for (int i = 0; i < lats.length; i++) {
|
||||
double lat = lats[i];
|
||||
double lon = lons[i];
|
||||
if (((topLeft.lon <= lon || bottomRight.lon >= lon)) &&
|
||||
(topLeft.lat >= lat && bottomRight.lat <= lat)) {
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (((topLeft.lon <= point.lon() || bottomRight.lon >= point.lon())) &&
|
||||
(topLeft.lat >= point.lat() && bottomRight.lat <= point.lat())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
double lat = fieldData.latValue(doc);
|
||||
double lon = fieldData.lonValue(doc);
|
||||
GeoPoint point = values.getValue(doc);
|
||||
|
||||
if (((topLeft.lon <= lon || bottomRight.lon >= lon)) &&
|
||||
(topLeft.lat >= lat && bottomRight.lat <= lat)) {
|
||||
if (((topLeft.lon <= point.lon() || bottomRight.lon >= point.lon())) &&
|
||||
(topLeft.lat >= point.lat() && bottomRight.lat <= point.lat())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -128,13 +121,13 @@ public class InMemoryGeoBoundingBoxFilter extends Filter {
|
|||
}
|
||||
|
||||
public static class GeoBoundingBoxDocSet extends MatchDocIdSet {
|
||||
private final GeoPointFieldData fieldData;
|
||||
private final GeoPointValues values;
|
||||
private final Point topLeft;
|
||||
private final Point bottomRight;
|
||||
|
||||
public GeoBoundingBoxDocSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointFieldData fieldData, Point topLeft, Point bottomRight) {
|
||||
public GeoBoundingBoxDocSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointValues values, Point topLeft, Point bottomRight) {
|
||||
super(maxDoc, acceptDocs);
|
||||
this.fieldData = fieldData;
|
||||
this.values = values;
|
||||
this.topLeft = topLeft;
|
||||
this.bottomRight = bottomRight;
|
||||
}
|
||||
|
@ -146,25 +139,23 @@ public class InMemoryGeoBoundingBoxFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!fieldData.hasValue(doc)) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fieldData.multiValued()) {
|
||||
double[] lats = fieldData.latValues(doc);
|
||||
double[] lons = fieldData.lonValues(doc);
|
||||
for (int i = 0; i < lats.length; i++) {
|
||||
if (topLeft.lon <= lons[i] && bottomRight.lon >= lons[i]
|
||||
&& topLeft.lat >= lats[i] && bottomRight.lat <= lats[i]) {
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (topLeft.lon <= point.lon() && bottomRight.lon >= point.lon()
|
||||
&& topLeft.lat >= point.lat() && bottomRight.lat <= point.lat()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
double lat = fieldData.latValue(doc);
|
||||
double lon = fieldData.lonValue(doc);
|
||||
|
||||
if (topLeft.lon <= lon && bottomRight.lon >= lon
|
||||
&& topLeft.lat >= lat && bottomRight.lat <= lat) {
|
||||
GeoPoint point = values.getValue(doc);
|
||||
if (topLeft.lon <= point.lon() && bottomRight.lon >= point.lon()
|
||||
&& topLeft.lat >= point.lat() && bottomRight.lat <= point.lat()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
|||
import org.elasticsearch.common.lucene.search.function.BoostScoreFunction;
|
||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.cache.filter.FilterCache;
|
||||
import org.elasticsearch.index.cache.id.IdCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
|
@ -356,10 +355,6 @@ public class SearchContext implements Releasable {
|
|||
return indexService.cache().filter();
|
||||
}
|
||||
|
||||
public FieldDataCache fieldDataCache() {
|
||||
return indexService.cache().fieldData();
|
||||
}
|
||||
|
||||
public IndexFieldDataService fieldData() {
|
||||
return indexService.fieldData();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue