Core: Prevent non segment readers from entering the filter cache and the field data caches.
Percolator: Never cache filters and field data in percolator for the percolator query parsing part. Closes #6553
This commit is contained in:
parent
0a93956d9a
commit
ec74a7e76f
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.cache;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
|
@ -84,11 +83,6 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
|
|||
}
|
||||
}
|
||||
|
||||
public void clear(IndexReader reader) {
|
||||
filterCache.clear(reader);
|
||||
docSetCache.clear(reader);
|
||||
}
|
||||
|
||||
public void clear(String reason) {
|
||||
filterCache.clear(reason);
|
||||
queryParserCache.clear();
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.google.common.cache.Cache;
|
|||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.Weigher;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.SegmentReader;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.Filter;
|
||||
|
@ -32,6 +33,7 @@ import org.elasticsearch.ElasticsearchException;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.SegmentReaderUtils;
|
||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||
import org.elasticsearch.common.lucene.search.CachedFilter;
|
||||
import org.elasticsearch.common.lucene.search.NoCacheFilter;
|
||||
|
@ -51,7 +53,7 @@ import org.elasticsearch.indices.cache.filter.IndicesFilterCache;
|
|||
import java.io.IOException;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
public class WeightedFilterCache extends AbstractIndexComponent implements FilterCache, SegmentReader.CoreClosedListener {
|
||||
public class WeightedFilterCache extends AbstractIndexComponent implements FilterCache, SegmentReader.CoreClosedListener, IndexReader.ReaderClosedListener {
|
||||
|
||||
final IndicesFilterCache indicesFilterCache;
|
||||
IndexService indexService;
|
||||
|
@ -79,6 +81,12 @@ public class WeightedFilterCache extends AbstractIndexComponent implements Filte
|
|||
clear("close");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose(IndexReader reader) {
|
||||
clear(reader.getCoreCacheKey());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void clear(String reason) {
|
||||
logger.debug("full cache clear, reason [{}]", reason);
|
||||
|
@ -160,9 +168,7 @@ public class WeightedFilterCache extends AbstractIndexComponent implements Filte
|
|||
Boolean previous = cache.seenReaders.putIfAbsent(context.reader().getCoreCacheKey(), Boolean.TRUE);
|
||||
if (previous == null) {
|
||||
// we add a core closed listener only, for non core IndexReaders we rely on clear being called (percolator for example)
|
||||
if (context.reader() instanceof SegmentReader) {
|
||||
((SegmentReader) context.reader()).addCoreClosedListener(cache);
|
||||
}
|
||||
SegmentReaderUtils.registerCoreListener(context.reader(), cache);
|
||||
}
|
||||
}
|
||||
// we can't pass down acceptedDocs provided, because we are caching the result, and acceptedDocs
|
||||
|
|
|
@ -69,6 +69,33 @@ public interface IndexFieldDataCache {
|
|||
void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes);
|
||||
}
|
||||
|
||||
class None implements IndexFieldDataCache {
|
||||
|
||||
@Override
|
||||
public <FD extends AtomicFieldData, IFD extends IndexFieldData<FD>> FD load(AtomicReaderContext context, IFD indexFieldData) throws Exception {
|
||||
return indexFieldData.loadDirect(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public <IFD extends IndexFieldData.WithOrdinals<?>> IFD load(IndexReader indexReader, IFD indexFieldData) throws Exception {
|
||||
return (IFD) indexFieldData.localGlobalDirect(indexReader);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear(String fieldName) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear(Object coreCacheKey) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The resident field data cache is a *per field* cache that keeps all the values in memory.
|
||||
*/
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.fielddata;
|
|||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
|
@ -40,6 +39,7 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
|||
import org.elasticsearch.index.service.IndexService;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener;
|
||||
|
||||
|
@ -187,17 +187,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
}
|
||||
}
|
||||
|
||||
public void clear(IndexReader reader) {
|
||||
synchronized (loadedFieldData) {
|
||||
for (IndexFieldData<?> indexFieldData : loadedFieldData.values()) {
|
||||
indexFieldData.clear(reader);
|
||||
}
|
||||
for (IndexFieldDataCache cache : fieldDataCaches.values()) {
|
||||
cache.clear(reader);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void onMappingUpdate() {
|
||||
// synchronize to make sure to not miss field data instances that are being loaded
|
||||
synchronized (loadedFieldData) {
|
||||
|
@ -206,6 +195,7 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper<?> mapper) {
|
||||
final FieldMapper.Names fieldNames = mapper.names();
|
||||
final FieldDataType type = mapper.fieldDataType();
|
||||
|
@ -251,6 +241,8 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
cache = new IndexFieldDataCache.Soft(logger, indexService, fieldNames, type, indicesFieldDataCacheListener);
|
||||
} else if ("node".equals(cacheType)) {
|
||||
cache = indicesFieldDataCache.buildIndexFieldDataCache(indexService, index, fieldNames, type);
|
||||
} else if ("none".equals(cacheType)){
|
||||
cache = new IndexFieldDataCache.None();
|
||||
} else {
|
||||
throw new ElasticsearchIllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldNames.fullName() + "]");
|
||||
}
|
||||
|
@ -266,4 +258,41 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
return (IFD) fieldData;
|
||||
}
|
||||
|
||||
public <IFD extends IndexFieldData<?>> IFD getForFieldDirect(FieldMapper<?> mapper) {
|
||||
final FieldMapper.Names fieldNames = mapper.names();
|
||||
final FieldDataType type = mapper.fieldDataType();
|
||||
if (type == null) {
|
||||
throw new ElasticsearchIllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]");
|
||||
}
|
||||
final boolean docValues = mapper.hasDocValues();
|
||||
|
||||
IndexFieldData.Builder builder = null;
|
||||
String format = type.getFormat(indexSettings);
|
||||
if (format != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(format) && !docValues) {
|
||||
logger.warn("field [" + fieldNames.fullName() + "] has no doc values, will use default field data format");
|
||||
format = null;
|
||||
}
|
||||
if (format != null) {
|
||||
builder = buildersByTypeAndFormat.get(Tuple.tuple(type.getType(), format));
|
||||
if (builder == null) {
|
||||
logger.warn("failed to find format [" + format + "] for field [" + fieldNames.fullName() + "], will use default");
|
||||
}
|
||||
}
|
||||
if (builder == null && docValues) {
|
||||
builder = docValuesBuildersByType.get(type.getType());
|
||||
}
|
||||
if (builder == null) {
|
||||
builder = buildersByType.get(type.getType());
|
||||
}
|
||||
if (builder == null) {
|
||||
throw new ElasticsearchIllegalArgumentException("failed to find field data builder for field " + fieldNames.fullName() + ", and type " + type.getType());
|
||||
}
|
||||
|
||||
CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
|
||||
GlobalOrdinalsBuilder globalOrdinalBuilder = new InternalGlobalOrdinalsBuilder(index(), indexSettings);
|
||||
@SuppressWarnings("unchecked")
|
||||
IFD ifd = (IFD) builder.build(index, indexSettings, mapper, new IndexFieldDataCache.None(), circuitBreakerService, indexService.mapperService(), globalOrdinalBuilder);
|
||||
return ifd;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.unit.Fuzziness;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
@ -249,7 +248,7 @@ public interface FieldMapper<T> extends Mapper {
|
|||
|
||||
Filter termsFilter(List values, @Nullable QueryParseContext context);
|
||||
|
||||
Filter termsFilter(IndexFieldDataService fieldData, List values, @Nullable QueryParseContext context);
|
||||
Filter termsFilter(QueryParseContext parseContext, List values, @Nullable QueryParseContext context);
|
||||
|
||||
Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
|
||||
|
||||
|
|
|
@ -50,7 +50,6 @@ import org.elasticsearch.index.codec.postingsformat.PostingFormats;
|
|||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatService;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
|
@ -491,7 +490,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
|
|||
* A terms filter based on the field data cache
|
||||
*/
|
||||
@Override
|
||||
public Filter termsFilter(IndexFieldDataService fieldDataService, List values, @Nullable QueryParseContext context) {
|
||||
public Filter termsFilter(QueryParseContext fieldDataService, List values, @Nullable QueryParseContext context) {
|
||||
// create with initial size large enough to avoid rehashing
|
||||
ObjectOpenHashSet<BytesRef> terms =
|
||||
new ObjectOpenHashSet<>((int) (values.size() * (1 + ObjectOpenHashSet.DEFAULT_LOAD_FACTOR)));
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -223,8 +222,8 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newByteRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newByteRange((IndexNumericFieldData) parseContext.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -46,7 +46,6 @@ import org.elasticsearch.index.analysis.NumericDateAnalyzer;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
|
||||
|
@ -369,11 +368,11 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return rangeFilter(fieldData, lowerTerm, upperTerm, includeLower, includeUpper, context, false);
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return rangeFilter(parseContext, lowerTerm, upperTerm, includeLower, includeUpper, context, false);
|
||||
}
|
||||
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context, boolean explicitCaching) {
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context, boolean explicitCaching) {
|
||||
boolean cache = explicitCaching;
|
||||
Long lowerVal = null;
|
||||
Long upperVal = null;
|
||||
|
@ -397,7 +396,7 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
Filter filter = NumericRangeFieldDataFilter.newLongRange(
|
||||
(IndexNumericFieldData<?>) fieldData.getForField(this), lowerVal,upperVal, includeLower, includeUpper
|
||||
(IndexNumericFieldData<?>) parseContext.getForField(this), lowerVal,upperVal, includeLower, includeUpper
|
||||
);
|
||||
if (!cache) {
|
||||
// We don't cache range filter if `now` date expression is used and also when a compound filter wraps
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.elasticsearch.index.analysis.NumericDoubleAnalyzer;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -218,8 +217,8 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newDoubleRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newDoubleRange((IndexNumericFieldData) parseContext.getForField(this),
|
||||
lowerTerm == null ? null : parseDoubleValue(lowerTerm),
|
||||
upperTerm == null ? null : parseDoubleValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -46,7 +46,6 @@ import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -223,8 +222,8 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newFloatRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newFloatRange((IndexNumericFieldData) parseContext.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -218,8 +217,8 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newIntRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newIntRange((IndexNumericFieldData) parseContext.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.elasticsearch.index.analysis.NumericLongAnalyzer;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -208,8 +207,8 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newLongRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newLongRange((IndexNumericFieldData) parseContext.getForField(this),
|
||||
lowerTerm == null ? null : parseLongValue(lowerTerm),
|
||||
upperTerm == null ? null : parseLongValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
|
@ -283,13 +282,13 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
|
|||
/**
|
||||
* A range filter based on the field data cache.
|
||||
*/
|
||||
public abstract Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
|
||||
public abstract Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
|
||||
|
||||
/**
|
||||
* A terms filter based on the field data cache for numeric fields.
|
||||
*/
|
||||
@Override
|
||||
public Filter termsFilter(IndexFieldDataService fieldDataService, List values, @Nullable QueryParseContext context) {
|
||||
public Filter termsFilter(QueryParseContext fieldDataService, List values, @Nullable QueryParseContext context) {
|
||||
IndexNumericFieldData fieldData = fieldDataService.getForField(this);
|
||||
if (fieldData.getNumericType().isFloatingPoint()) {
|
||||
// create with initial size large enough to avoid rehashing
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -224,8 +223,8 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newShortRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newShortRange((IndexNumericFieldData) parseContext.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
|
||||
|
@ -210,8 +209,8 @@ public class BoostFieldMapper extends NumberFieldMapper<Float> implements Intern
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newFloatRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newFloatRange((IndexNumericFieldData) parseContext.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.elasticsearch.index.analysis.NumericTokenizer;
|
|||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
|
||||
|
@ -250,8 +249,8 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(IndexFieldDataService fieldData, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newLongRange((IndexNumericFieldData) fieldData.getForField(this),
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newLongRange((IndexNumericFieldData) parseContext.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.queries.TermFilter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -81,6 +82,13 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
|||
private final PercolateTypeListener percolateTypeListener = new PercolateTypeListener();
|
||||
private final AtomicBoolean realTimePercolatorEnabled = new AtomicBoolean(false);
|
||||
|
||||
private CloseableThreadLocal<QueryParseContext> cache = new CloseableThreadLocal<QueryParseContext>() {
|
||||
@Override
|
||||
protected QueryParseContext initialValue() {
|
||||
return new QueryParseContext(shardId.index(), queryParserService, true);
|
||||
}
|
||||
};
|
||||
|
||||
@Inject
|
||||
public PercolatorQueriesRegistry(ShardId shardId, @IndexSettings Settings indexSettings, IndexQueryParserService queryParserService,
|
||||
ShardIndexingService indexingService, IndicesLifecycle indicesLifecycle, MapperService mapperService,
|
||||
|
@ -187,25 +195,22 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
|||
}
|
||||
|
||||
private Query parseQuery(String type, BytesReference querySource, XContentParser parser) {
|
||||
if (type == null) {
|
||||
if (parser != null) {
|
||||
return queryParserService.parse(parser).query();
|
||||
} else {
|
||||
return queryParserService.parse(querySource).query();
|
||||
String[] previousTypes = null;
|
||||
if (type != null) {
|
||||
QueryParseContext.setTypesWithPrevious(new String[]{type});
|
||||
}
|
||||
}
|
||||
|
||||
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{type});
|
||||
try {
|
||||
if (parser != null) {
|
||||
return queryParserService.parse(parser).query();
|
||||
return queryParserService.parse(cache.get(), parser).query();
|
||||
} else {
|
||||
return queryParserService.parse(querySource).query();
|
||||
return queryParserService.parse(cache.get(), querySource).query();
|
||||
}
|
||||
} finally {
|
||||
if (type != null) {
|
||||
QueryParseContext.setTypes(previousTypes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class PercolateTypeListener implements DocumentTypeListener {
|
||||
|
||||
|
|
|
@ -175,7 +175,7 @@ public class GeoBoundingBoxFilterParser implements FilterParser {
|
|||
if ("indexed".equals(type)) {
|
||||
filter = IndexedGeoBoundingBoxFilter.create(topLeft, bottomRight, geoMapper);
|
||||
} else if ("memory".equals(type)) {
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.getForField(mapper);
|
||||
filter = new InMemoryGeoBoundingBoxFilter(topLeft, bottomRight, indexFieldData);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext.index(), "geo bounding box type [" + type + "] not supported, either 'indexed' or 'memory' are allowed");
|
||||
|
|
|
@ -163,7 +163,7 @@ public class GeoDistanceFilterParser implements FilterParser {
|
|||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
|
||||
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.getForField(mapper);
|
||||
Filter filter = new GeoDistanceFilter(point.lat(), point.lon(), distance, geoDistance, indexFieldData, geoMapper, optimizeBbox);
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
|
|
|
@ -205,7 +205,7 @@ public class GeoDistanceRangeFilterParser implements FilterParser {
|
|||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.getForField(mapper);
|
||||
Filter filter = new GeoDistanceRangeFilter(point, from, to, includeLower, includeUpper, geoDistance, geoMapper, indexFieldData, optimizeBbox);
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
|
|
|
@ -150,7 +150,7 @@ public class GeoPolygonFilterParser implements FilterParser {
|
|||
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.getForField(mapper);
|
||||
Filter filter = new GeoPolygonFilter(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
|
|
|
@ -156,7 +156,7 @@ public class HasChildFilterParser implements FilterParser {
|
|||
}
|
||||
|
||||
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.fieldData().getForField(parentFieldMapper);
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
|
||||
|
||||
Query childrenQuery;
|
||||
if (minChildren > 1 || maxChildren > 0) {
|
||||
|
|
|
@ -159,7 +159,7 @@ public class HasChildQueryParser implements QueryParser {
|
|||
|
||||
Query query;
|
||||
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.fieldData().getForField(parentFieldMapper);
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
|
||||
if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) {
|
||||
query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren,
|
||||
maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter);
|
||||
|
|
|
@ -136,7 +136,7 @@ public class HasParentFilterParser implements FilterParser {
|
|||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active()) {
|
||||
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
|
||||
parentChildIndexFieldData = parseContext.fieldData().getForField(parentFieldMapper);
|
||||
parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
|
||||
if (parentTypeDocumentMapper == null) {
|
||||
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
|
||||
parentTypes.add(parentFieldMapper.type());
|
||||
|
|
|
@ -138,7 +138,7 @@ public class HasParentQueryParser implements QueryParser {
|
|||
for (DocumentMapper documentMapper : parseContext.mapperService()) {
|
||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active()) {
|
||||
parentChildIndexFieldData = parseContext.fieldData().getForField(parentFieldMapper);
|
||||
parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
|
||||
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
|
||||
if (parentTypeDocumentMapper == null) {
|
||||
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
|
||||
|
|
|
@ -224,10 +224,14 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
}
|
||||
|
||||
public ParsedQuery parse(BytesReference source) throws ElasticsearchException {
|
||||
return parse(cache.get(), source);
|
||||
}
|
||||
|
||||
public ParsedQuery parse(QueryParseContext context, BytesReference source) throws ElasticsearchException {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(source);
|
||||
return parse(cache.get(), parser);
|
||||
return innerParse(context, parser);
|
||||
} catch (QueryParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
|
@ -243,7 +247,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(source);
|
||||
return parse(cache.get(), parser);
|
||||
return innerParse(cache.get(), parser);
|
||||
} catch (QueryParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
|
@ -256,8 +260,12 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
}
|
||||
|
||||
public ParsedQuery parse(XContentParser parser) {
|
||||
try {
|
||||
return parse(cache.get(), parser);
|
||||
}
|
||||
|
||||
public ParsedQuery parse(QueryParseContext context, XContentParser parser) {
|
||||
try {
|
||||
return innerParse(context, parser);
|
||||
} catch (IOException e) {
|
||||
throw new QueryParsingException(index, "Failed to parse", e);
|
||||
}
|
||||
|
@ -325,7 +333,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
throw new QueryParsingException(index(), "Required query is missing");
|
||||
}
|
||||
|
||||
private ParsedQuery parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
|
||||
private ParsedQuery innerParse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
|
||||
parseContext.reset(parser);
|
||||
try {
|
||||
if (strict) {
|
||||
|
|
|
@ -120,7 +120,7 @@ public class NumericRangeFilterParser implements FilterParser {
|
|||
if (!(mapper instanceof NumberFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "Field [" + fieldName + "] is not a numeric type");
|
||||
}
|
||||
Filter filter = ((NumberFieldMapper) mapper).rangeFilter(parseContext.fieldData(), from, to, includeLower, includeUpper, parseContext);
|
||||
Filter filter = ((NumberFieldMapper) mapper).rangeFilter(parseContext, from, to, includeLower, includeUpper, parseContext);
|
||||
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
|
|
|
@ -34,10 +34,10 @@ import org.elasticsearch.common.lucene.search.Queries;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
|
||||
import org.elasticsearch.index.cache.query.parser.QueryParserCache;
|
||||
import org.elasticsearch.index.engine.IndexEngine;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMappers;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
@ -88,10 +88,17 @@ public class QueryParseContext {
|
|||
|
||||
private EnumSet<ParseField.Flag> parseFlags = ParseField.EMPTY_FLAGS;
|
||||
|
||||
private final boolean disableCaching;
|
||||
|
||||
public QueryParseContext(Index index, IndexQueryParserService indexQueryParser) {
|
||||
this(index, indexQueryParser, false);
|
||||
}
|
||||
|
||||
public QueryParseContext(Index index, IndexQueryParserService indexQueryParser, boolean disableCaching) {
|
||||
this.index = index;
|
||||
this.indexQueryParser = indexQueryParser;
|
||||
this.propagateNoCache = disableCaching;
|
||||
this.disableCaching = disableCaching;
|
||||
}
|
||||
|
||||
public void parseFlags(EnumSet<ParseField.Flag> parseFlags) {
|
||||
|
@ -150,12 +157,8 @@ public class QueryParseContext {
|
|||
return indexQueryParser.similarityService != null ? indexQueryParser.similarityService.similarity() : null;
|
||||
}
|
||||
|
||||
public IndexCache indexCache() {
|
||||
return indexQueryParser.indexCache;
|
||||
}
|
||||
|
||||
public IndexFieldDataService fieldData() {
|
||||
return indexQueryParser.fieldDataService;
|
||||
public QueryParserCache queryParserCache() {
|
||||
return indexQueryParser.indexCache.queryParserCache();
|
||||
}
|
||||
|
||||
public String defaultField() {
|
||||
|
@ -175,7 +178,7 @@ public class QueryParseContext {
|
|||
if (filter == null) {
|
||||
return null;
|
||||
}
|
||||
if (this.propagateNoCache || filter instanceof NoCacheFilter) {
|
||||
if (this.disableCaching || this.propagateNoCache || filter instanceof NoCacheFilter) {
|
||||
return filter;
|
||||
}
|
||||
if (cacheKey != null) {
|
||||
|
@ -184,6 +187,14 @@ public class QueryParseContext {
|
|||
return indexQueryParser.indexCache.filter().cache(filter);
|
||||
}
|
||||
|
||||
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper<?> mapper) {
|
||||
if (disableCaching) {
|
||||
return indexQueryParser.fieldDataService.getForFieldDirect(mapper);
|
||||
} else {
|
||||
return indexQueryParser.fieldDataService.getForField(mapper);
|
||||
}
|
||||
}
|
||||
|
||||
public void addNamedFilter(String name, Filter filter) {
|
||||
namedFilters.put(name, filter);
|
||||
}
|
||||
|
@ -342,7 +353,7 @@ public class QueryParseContext {
|
|||
return current.lookup();
|
||||
}
|
||||
if (lookup == null) {
|
||||
lookup = new SearchLookup(mapperService(), fieldData(), null);
|
||||
lookup = new SearchLookup(mapperService(), indexQueryParser.fieldDataService, null);
|
||||
}
|
||||
return lookup;
|
||||
}
|
||||
|
|
|
@ -210,7 +210,7 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
qpSettings.queryTypes(parseContext.queryTypes());
|
||||
Query query = parseContext.indexCache().queryParserCache().get(qpSettings);
|
||||
Query query = parseContext.queryParserCache().get(qpSettings);
|
||||
if (query != null) {
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, query);
|
||||
|
@ -232,7 +232,7 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
if (query instanceof BooleanQuery) {
|
||||
Queries.applyMinimumShouldMatch((BooleanQuery) query, qpSettings.minimumShouldMatch());
|
||||
}
|
||||
parseContext.indexCache().queryParserCache().put(qpSettings, query);
|
||||
parseContext.queryParserCache().put(qpSettings, query);
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, query);
|
||||
}
|
||||
|
|
|
@ -143,9 +143,9 @@ public class RangeFilterParser implements FilterParser {
|
|||
throw new QueryParsingException(parseContext.index(), "[range] filter field [" + fieldName + "] is not a numeric type");
|
||||
}
|
||||
if (mapper instanceof DateFieldMapper) {
|
||||
filter = ((DateFieldMapper) mapper).rangeFilter(parseContext.fieldData(), from, to, includeLower, includeUpper, parseContext, explicitlyCached);
|
||||
filter = ((DateFieldMapper) mapper).rangeFilter(parseContext, from, to, includeLower, includeUpper, parseContext, explicitlyCached);
|
||||
} else {
|
||||
filter = ((NumberFieldMapper) mapper).rangeFilter(parseContext.fieldData(), from, to, includeLower, includeUpper, parseContext);
|
||||
filter = ((NumberFieldMapper) mapper).rangeFilter(parseContext, from, to, includeLower, includeUpper, parseContext);
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext.index(), "[range] filter doesn't support [" + execution + "] execution");
|
||||
|
|
|
@ -214,7 +214,7 @@ public class TermsFilterParser implements FilterParser {
|
|||
return Queries.MATCH_NO_FILTER;
|
||||
}
|
||||
|
||||
filter = fieldMapper.termsFilter(parseContext.fieldData(), terms, parseContext);
|
||||
filter = fieldMapper.termsFilter(parseContext, terms, parseContext);
|
||||
if (cache != null && cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
}
|
||||
|
|
|
@ -136,7 +136,7 @@ public class TopChildrenQueryParser implements QueryParser {
|
|||
innerQuery.setBoost(boost);
|
||||
// wrap the query with type query
|
||||
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(childDocMapper.typeFilter(), null));
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.fieldData().getForField(parentFieldMapper);
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
|
||||
TopChildrenQuery query = new TopChildrenQuery(parentChildIndexFieldData, innerQuery, childType, parentType, scoreType, factor, incrementalFactor, parseContext.cacheRecycler(), nonNestedDocsFilter);
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedFilter(queryName, new CustomQueryWrappingFilter(query));
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.query.functionscore;
|
||||
|
||||
import com.sun.swing.internal.plaf.metal.resources.metal;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.search.ComplexExplanation;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
|
@ -50,7 +49,6 @@ import org.elasticsearch.index.query.QueryParsingException;
|
|||
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -205,7 +203,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
throw new ElasticsearchParseException("Both " + DecayFunctionBuilder.SCALE + "and " + DecayFunctionBuilder.ORIGIN
|
||||
+ " must be set for numeric fields.");
|
||||
}
|
||||
IndexNumericFieldData<?> numericFieldData = parseContext.fieldData().getForField(mapper);
|
||||
IndexNumericFieldData<?> numericFieldData = parseContext.getForField(mapper);
|
||||
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
|
||||
}
|
||||
|
||||
|
@ -237,7 +235,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
}
|
||||
double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
|
||||
double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.getForField(mapper);
|
||||
return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode);
|
||||
|
||||
}
|
||||
|
@ -277,7 +275,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
double scale = val.getMillis();
|
||||
val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24));
|
||||
double offset = val.getMillis();
|
||||
IndexNumericFieldData<?> numericFieldData = parseContext.fieldData().getForField(dateFieldMapper);
|
||||
IndexNumericFieldData<?> numericFieldData = parseContext.getForField(dateFieldMapper);
|
||||
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
|
||||
}
|
||||
|
||||
|
|
|
@ -27,13 +27,13 @@ import org.elasticsearch.common.unit.ByteSizeValue;
|
|||
/**
|
||||
* Class that returns a breaker that never breaks
|
||||
*/
|
||||
public class DummyCircuitBreakerService implements CircuitBreakerService {
|
||||
public class NoneCircuitBreakerService implements CircuitBreakerService {
|
||||
|
||||
private final ESLogger logger = Loggers.getLogger(DummyCircuitBreakerService.class);
|
||||
private final ESLogger logger = Loggers.getLogger(NoneCircuitBreakerService.class);
|
||||
|
||||
private final MemoryCircuitBreaker breaker = new MemoryCircuitBreaker(new ByteSizeValue(Long.MAX_VALUE), 0.0, logger);
|
||||
|
||||
public DummyCircuitBreakerService() {}
|
||||
public NoneCircuitBreakerService() {}
|
||||
|
||||
@Override
|
||||
public MemoryCircuitBreaker getBreaker() {
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.action.percolate.PercolateShardRequest;
|
|||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.CacheRecycler;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
|
@ -210,13 +209,7 @@ public class PercolateContext extends SearchContext {
|
|||
|
||||
@Override
|
||||
protected void doClose() {
|
||||
try (Releasable releasable = Releasables.wrap(engineSearcher, docSearcher)) {
|
||||
if (docSearcher != null) {
|
||||
IndexReader indexReader = docSearcher.reader();
|
||||
fieldDataService.clear(indexReader);
|
||||
indexService.cache().clear(indexReader);
|
||||
}
|
||||
}
|
||||
Releasables.close(engineSearcher, docSearcher);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
|||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
|
||||
import java.util.Random;
|
||||
|
||||
|
@ -145,7 +145,7 @@ public class LongFieldDataBenchmark {
|
|||
indexWriter.close();
|
||||
|
||||
final DirectoryReader dr = DirectoryReader.open(dir);
|
||||
final IndexFieldDataService fds = new IndexFieldDataService(new Index("dummy"), new DummyCircuitBreakerService());
|
||||
final IndexFieldDataService fds = new IndexFieldDataService(new Index("dummy"), new NoneCircuitBreakerService());
|
||||
final LongFieldMapper mapper = new LongFieldMapper.Builder(fieldName).build(new BuilderContext(null, new ContentPath(1)));
|
||||
final IndexNumericFieldData<AtomicNumericFieldData> fd = fds.getForField(mapper);
|
||||
final long start = System.nanoTime();
|
||||
|
|
|
@ -45,8 +45,10 @@ import org.elasticsearch.index.query.functionscore.FunctionScoreModule;
|
|||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.indices.InvalidAliasNameException;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -87,7 +89,7 @@ public class IndexAliasesServiceTests extends ElasticsearchTestCase {
|
|||
@Override
|
||||
protected void configure() {
|
||||
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
|
||||
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
}
|
||||
}
|
||||
).createInjector();
|
||||
|
|
|
@ -1,95 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.cache.filter;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.queries.TermFilter;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.filter.none.NoneFilterCache;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FilterCacheTests extends ElasticsearchTestCase {
|
||||
|
||||
|
||||
@Test
|
||||
public void testNoCache() throws Exception {
|
||||
verifyCache(new NoneFilterCache(new Index("test"), EMPTY_SETTINGS));
|
||||
}
|
||||
|
||||
private void verifyCache(FilterCache filterCache) throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
DirectoryReader reader = DirectoryReader.open(indexWriter, true);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
Document document = new Document();
|
||||
document.add(new TextField("id", Integer.toString(i), Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
}
|
||||
|
||||
reader = refreshReader(reader);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
assertThat(Lucene.count(searcher, new ConstantScoreQuery(filterCache.cache(new TermFilter(new Term("id", "1"))))), equalTo(1l));
|
||||
assertThat(Lucene.count(searcher, new XFilteredQuery(new MatchAllDocsQuery(), filterCache.cache(new TermFilter(new Term("id", "1"))))), equalTo(1l));
|
||||
|
||||
indexWriter.deleteDocuments(new Term("id", "1"));
|
||||
reader = refreshReader(reader);
|
||||
searcher = new IndexSearcher(reader);
|
||||
TermFilter filter = new TermFilter(new Term("id", "1"));
|
||||
Filter cachedFilter = filterCache.cache(filter);
|
||||
long constantScoreCount = filter == cachedFilter ? 0 : 1;
|
||||
// sadly, when caching based on cacheKey with NRT, this fails, that's why we have DeletionAware one
|
||||
assertThat(Lucene.count(searcher, new ConstantScoreQuery(cachedFilter)), equalTo(constantScoreCount));
|
||||
assertThat(Lucene.count(searcher, new XConstantScoreQuery(cachedFilter)), equalTo(0l));
|
||||
assertThat(Lucene.count(searcher, new XFilteredQuery(new MatchAllDocsQuery(), cachedFilter)), equalTo(0l));
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
|
||||
private DirectoryReader refreshReader(DirectoryReader reader) throws IOException {
|
||||
IndexReader oldReader = reader;
|
||||
reader = DirectoryReader.openIfChanged(reader);
|
||||
if (reader != oldReader) {
|
||||
oldReader.close();
|
||||
}
|
||||
return reader;
|
||||
}
|
||||
}
|
|
@ -54,7 +54,7 @@ import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
|
|||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
@ -421,7 +421,7 @@ public class CodecTests extends ElasticsearchLuceneTestCase {
|
|||
.add(new AbstractModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
}
|
||||
})
|
||||
.createInjector();
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -86,12 +86,12 @@ public abstract class AbstractFieldDataTests extends ElasticsearchTestCase {
|
|||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
CircuitBreakerService circuitBreakerService = new DummyCircuitBreakerService();
|
||||
CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
|
||||
indicesFieldDataCache = new IndicesFieldDataCache(
|
||||
ImmutableSettings.Builder.EMPTY_SETTINGS,
|
||||
new IndicesFieldDataCacheListener(circuitBreakerService)
|
||||
);
|
||||
ifdService = new IndexFieldDataService(new Index("test"), circuitBreakerService, indicesFieldDataCache);
|
||||
ifdService = new IndexFieldDataService(new Index("test"), ImmutableSettings.builder().put("index.fielddata.cache", "none").build(), indicesFieldDataCache, circuitBreakerService, new IndicesFieldDataCacheListener(circuitBreakerService));
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(ifdService.index(), ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
ifdService.setIndexService(new StubIndexService(mapperService));
|
||||
// LogByteSizeMP to preserve doc ID order
|
||||
|
|
|
@ -535,7 +535,7 @@ public abstract class AbstractStringFieldDataTests extends AbstractFieldDataImpl
|
|||
public void testGlobalOrdinalsGetRemovedOnceIndexReaderCloses() throws Exception {
|
||||
fillExtendedMvSet();
|
||||
refreshReader();
|
||||
FieldDataType fieldDataType = new FieldDataType("string", ImmutableSettings.builder().put("global_values", "fixed"));
|
||||
FieldDataType fieldDataType = new FieldDataType("string", ImmutableSettings.builder().put("global_values", "fixed").put("cache", "node"));
|
||||
IndexFieldData.WithOrdinals ifd = getForField(fieldDataType, "value");
|
||||
IndexFieldData.WithOrdinals globalOrdinals = ifd.loadGlobal(topLevelReader);
|
||||
assertThat(ifd.loadGlobal(topLevelReader), sameInstance(globalOrdinals));
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.elasticsearch.index.fielddata.plain.*;
|
|||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
||||
import org.elasticsearch.index.mapper.core.*;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.elasticsearch.test.index.service.StubIndexService;
|
||||
|
||||
|
@ -49,7 +49,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchTestCase {
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testGetForFieldDefaults() {
|
||||
final IndexFieldDataService ifdService = new IndexFieldDataService(new Index("test"), new DummyCircuitBreakerService());
|
||||
final IndexFieldDataService ifdService = new IndexFieldDataService(new Index("test"), new NoneCircuitBreakerService());
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(ifdService.index(), ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
ifdService.setIndexService(new StubIndexService(mapperService));
|
||||
for (boolean docValues : Arrays.asList(true, false)) {
|
||||
|
@ -100,7 +100,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchTestCase {
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testByPassDocValues() {
|
||||
final IndexFieldDataService ifdService = new IndexFieldDataService(new Index("test"), new DummyCircuitBreakerService());
|
||||
final IndexFieldDataService ifdService = new IndexFieldDataService(new Index("test"), new NoneCircuitBreakerService());
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(ifdService.index(), ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
ifdService.setIndexService(new StubIndexService(mapperService));
|
||||
final BuilderContext ctx = new BuilderContext(null, new ContentPath(1));
|
||||
|
@ -133,7 +133,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchTestCase {
|
|||
}
|
||||
|
||||
public void testChangeFieldDataFormat() throws Exception {
|
||||
final IndexFieldDataService ifdService = new IndexFieldDataService(new Index("test"), new DummyCircuitBreakerService());
|
||||
final IndexFieldDataService ifdService = new IndexFieldDataService(new Index("test"), new NoneCircuitBreakerService());
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(ifdService.index(), ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
ifdService.setIndexService(new StubIndexService(mapperService));
|
||||
final BuilderContext ctx = new BuilderContext(null, new ContentPath(1));
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.elasticsearch.index.settings.IndexSettingsModule;
|
|||
import org.elasticsearch.index.similarity.SimilarityLookupService;
|
||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -59,7 +59,7 @@ public class MapperTestUtils {
|
|||
}
|
||||
|
||||
public static MapperService newMapperService(Index index, Settings indexSettings) {
|
||||
return new MapperService(index, indexSettings, new Environment(), newAnalysisService(), new IndexFieldDataService(index, new DummyCircuitBreakerService()),
|
||||
return new MapperService(index, indexSettings, new Environment(), newAnalysisService(), new IndexFieldDataService(index, new NoneCircuitBreakerService()),
|
||||
new PostingsFormatService(index), new DocValuesFormatService(index), newSimilarityLookupService());
|
||||
}
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ import org.elasticsearch.index.search.child.TestSearchContext;
|
|||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -110,7 +110,7 @@ public class IndexQueryParserFilterCachingTests extends ElasticsearchTestCase {
|
|||
@Override
|
||||
protected void configure() {
|
||||
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
|
||||
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
}
|
||||
}
|
||||
).createInjector();
|
||||
|
|
|
@ -69,7 +69,7 @@ import org.elasticsearch.index.search.morelikethis.MoreLikeThisFetchService.Like
|
|||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -133,7 +133,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchTestCase {
|
|||
@Override
|
||||
protected void configure() {
|
||||
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
|
||||
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
}
|
||||
}
|
||||
).createInjector();
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.elasticsearch.index.query.functionscore.FunctionScoreModule;
|
|||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -89,7 +89,7 @@ public class TemplateQueryParserTest extends ElasticsearchTestCase {
|
|||
@Override
|
||||
protected void configure() {
|
||||
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
|
||||
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
}
|
||||
}
|
||||
).createInjector();
|
||||
|
|
|
@ -38,8 +38,10 @@ import org.elasticsearch.index.query.IndexQueryParserService;
|
|||
import org.elasticsearch.index.query.functionscore.FunctionScoreModule;
|
||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -86,7 +88,7 @@ public class IndexQueryParserModuleTests extends ElasticsearchTestCase {
|
|||
@Override
|
||||
protected void configure() {
|
||||
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
|
||||
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
}
|
||||
}
|
||||
).createInjector();
|
||||
|
|
|
@ -39,8 +39,10 @@ import org.elasticsearch.index.query.IndexQueryParserService;
|
|||
import org.elasticsearch.index.query.functionscore.FunctionScoreModule;
|
||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -83,7 +85,7 @@ public class IndexQueryParserPlugin2Tests extends ElasticsearchTestCase {
|
|||
@Override
|
||||
protected void configure() {
|
||||
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
|
||||
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
}
|
||||
}
|
||||
).createInjector();
|
||||
|
|
|
@ -39,8 +39,10 @@ import org.elasticsearch.index.query.IndexQueryParserService;
|
|||
import org.elasticsearch.index.query.functionscore.FunctionScoreModule;
|
||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -92,7 +94,7 @@ public class IndexQueryParserPluginTests extends ElasticsearchTestCase {
|
|||
@Override
|
||||
protected void configure() {
|
||||
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
|
||||
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
}
|
||||
}
|
||||
).createInjector();
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.search;
|
|||
import com.carrotsearch.hppc.DoubleOpenHashSet;
|
||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.index.*;
|
||||
|
@ -39,7 +40,15 @@ import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
|||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.index.query.FilterParser;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParser;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.elasticsearch.test.index.service.StubIndexService;
|
||||
|
@ -57,6 +66,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
|||
*/
|
||||
public class FieldDataTermsFilterTests extends ElasticsearchTestCase {
|
||||
|
||||
protected QueryParseContext parseContext;
|
||||
protected IndexFieldDataService ifdService;
|
||||
protected IndexWriter writer;
|
||||
protected AtomicReader reader;
|
||||
|
@ -79,9 +89,20 @@ public class FieldDataTermsFilterTests extends ElasticsearchTestCase {
|
|||
.build(new Mapper.BuilderContext(null, new ContentPath(1)));
|
||||
|
||||
// create index and fielddata service
|
||||
ifdService = new IndexFieldDataService(new Index("test"), new DummyCircuitBreakerService());
|
||||
Index index = new Index("test");
|
||||
CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
|
||||
IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(
|
||||
ImmutableSettings.Builder.EMPTY_SETTINGS,
|
||||
new IndicesFieldDataCacheListener(circuitBreakerService)
|
||||
);
|
||||
ifdService = new IndexFieldDataService(index, ImmutableSettings.builder().put("index.fielddata.cache", "none").build(), indicesFieldDataCache, circuitBreakerService, new IndicesFieldDataCacheListener(circuitBreakerService));
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(ifdService.index(), ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
ifdService.setIndexService(new StubIndexService(mapperService));
|
||||
IndexQueryParserService parserService = new IndexQueryParserService(
|
||||
index, ImmutableSettings.EMPTY, new IndicesQueriesRegistry(ImmutableSettings.EMPTY, ImmutableSet.<QueryParser>of(), ImmutableSet.<FilterParser>of()),
|
||||
null, null, null, mapperService, null, ifdService, null, null, null, null
|
||||
);
|
||||
parseContext = new QueryParseContext(index, parserService);
|
||||
writer = new IndexWriter(new RAMDirectory(),
|
||||
new IndexWriterConfig(Lucene.VERSION, new StandardAnalyzer(Lucene.VERSION)));
|
||||
|
||||
|
@ -142,7 +163,7 @@ public class FieldDataTermsFilterTests extends ElasticsearchTestCase {
|
|||
// filter from mapper
|
||||
result.clear(0, size);
|
||||
assertThat(result.cardinality(), equalTo(0));
|
||||
result.or(strMapper.termsFilter(ifdService, cTerms, null)
|
||||
result.or(strMapper.termsFilter(parseContext, cTerms, null)
|
||||
.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
|
||||
assertThat(result.cardinality(), equalTo(docs.size()));
|
||||
for (int i = 0; i < reader.maxDoc(); i++) {
|
||||
|
@ -193,7 +214,7 @@ public class FieldDataTermsFilterTests extends ElasticsearchTestCase {
|
|||
// filter from mapper
|
||||
result.clear(0, size);
|
||||
assertThat(result.cardinality(), equalTo(0));
|
||||
result.or(lngMapper.termsFilter(ifdService, cTerms, null)
|
||||
result.or(lngMapper.termsFilter(parseContext, cTerms, null)
|
||||
.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
|
||||
assertThat(result.cardinality(), equalTo(docs.size()));
|
||||
for (int i = 0; i < reader.maxDoc(); i++) {
|
||||
|
@ -232,7 +253,7 @@ public class FieldDataTermsFilterTests extends ElasticsearchTestCase {
|
|||
// filter from mapper
|
||||
result.clear(0, size);
|
||||
assertThat(result.cardinality(), equalTo(0));
|
||||
result.or(dblMapper.termsFilter(ifdService, cTerms, null)
|
||||
result.or(dblMapper.termsFilter(parseContext, cTerms, null)
|
||||
.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
|
||||
assertThat(result.cardinality(), equalTo(docs.size()));
|
||||
for (int i = 0; i < reader.maxDoc(); i++) {
|
||||
|
|
|
@ -54,7 +54,7 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
|||
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
|
||||
import org.elasticsearch.index.service.IndexService;
|
||||
import org.elasticsearch.indices.cache.filter.IndicesFilterCache;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -354,7 +354,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
|||
final BigArrays bigArrays = new BigArrays(ImmutableSettings.EMPTY, pageCacheRecycler);
|
||||
Settings settings = ImmutableSettings.EMPTY;
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(index, settings);
|
||||
IndexFieldDataService indexFieldDataService = new IndexFieldDataService(index, new DummyCircuitBreakerService());
|
||||
IndexFieldDataService indexFieldDataService = new IndexFieldDataService(index, new NoneCircuitBreakerService());
|
||||
final IndexService indexService = new StubIndexService(mapperService);
|
||||
indexFieldDataService.setIndexService(indexService);
|
||||
// Id_cache is now registered as document type listener, so we can add mappings.
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.MapperServiceModule;
|
||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -209,7 +209,7 @@ public class SimilarityTests extends ElasticsearchTestCase {
|
|||
.add(new AbstractModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
}
|
||||
})
|
||||
.createInjector();
|
||||
|
|
|
@ -1646,7 +1646,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
.startObject("doc").field("message", "A new bonsai tree ").endObject()
|
||||
.endObject())
|
||||
.execute().actionGet();
|
||||
assertThat(percolate.getFailedShards(), equalTo(0));
|
||||
assertNoFailures(percolate);
|
||||
assertMatchCount(percolate, 0l);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue