Refactoring: Rename FieldMapper.termsFilter to fieldDataTermsFilter.

FieldMapper has two methods
`Filter termsFilter(List values, @Nullable QueryParseContext)` which is supposed
to work on the inverted index and
`Filter termsFilter(QueryParseContext, List, QueryParseContext)` which is
supposed to work on field data. Let's rename the second one to
`fieldDataTermsFilter` and remove the unused `QueryParseContext`.

Close #6888
This commit is contained in:
Adrien Grand 2014-07-16 12:36:55 +02:00
parent 7223e9959d
commit 9714dd55c2
5 changed files with 10 additions and 10 deletions

View File

@ -248,7 +248,7 @@ public interface FieldMapper<T> extends Mapper {
Filter termsFilter(List values, @Nullable QueryParseContext context);
Filter termsFilter(QueryParseContext parseContext, List values, @Nullable QueryParseContext context);
Filter fieldDataTermsFilter(List values, @Nullable QueryParseContext context);
Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);

View File

@ -490,7 +490,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
* A terms filter based on the field data cache
*/
@Override
public Filter termsFilter(QueryParseContext fieldDataService, List values, @Nullable QueryParseContext context) {
public Filter fieldDataTermsFilter(List values, @Nullable QueryParseContext context) {
// create with initial size large enough to avoid rehashing
ObjectOpenHashSet<BytesRef> terms =
new ObjectOpenHashSet<>((int) (values.size() * (1 + ObjectOpenHashSet.DEFAULT_LOAD_FACTOR)));
@ -498,7 +498,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
terms.add(indexedValueForSearch(values.get(i)));
}
return FieldDataTermsFilter.newBytes(fieldDataService.getForField(this), terms);
return FieldDataTermsFilter.newBytes(context.getForField(this), terms);
}
@Override

View File

@ -288,8 +288,8 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
* A terms filter based on the field data cache for numeric fields.
*/
@Override
public Filter termsFilter(QueryParseContext fieldDataService, List values, @Nullable QueryParseContext context) {
IndexNumericFieldData fieldData = fieldDataService.getForField(this);
public Filter fieldDataTermsFilter(List values, @Nullable QueryParseContext context) {
IndexNumericFieldData fieldData = context.getForField(this);
if (fieldData.getNumericType().isFloatingPoint()) {
// create with initial size large enough to avoid rehashing
DoubleOpenHashSet terms =

View File

@ -214,7 +214,7 @@ public class TermsFilterParser implements FilterParser {
return Queries.MATCH_NO_FILTER;
}
filter = fieldMapper.termsFilter(parseContext, terms, parseContext);
filter = fieldMapper.fieldDataTermsFilter(terms, parseContext);
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}

View File

@ -40,11 +40,11 @@ import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
import org.elasticsearch.index.query.FilterParser;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParser;
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
import org.elasticsearch.indices.fielddata.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener;
@ -163,7 +163,7 @@ public class FieldDataTermsFilterTests extends ElasticsearchTestCase {
// filter from mapper
result.clear(0, size);
assertThat(result.cardinality(), equalTo(0));
result.or(strMapper.termsFilter(parseContext, cTerms, null)
result.or(strMapper.fieldDataTermsFilter(cTerms, parseContext)
.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
assertThat(result.cardinality(), equalTo(docs.size()));
for (int i = 0; i < reader.maxDoc(); i++) {
@ -214,7 +214,7 @@ public class FieldDataTermsFilterTests extends ElasticsearchTestCase {
// filter from mapper
result.clear(0, size);
assertThat(result.cardinality(), equalTo(0));
result.or(lngMapper.termsFilter(parseContext, cTerms, null)
result.or(lngMapper.fieldDataTermsFilter(cTerms, parseContext)
.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
assertThat(result.cardinality(), equalTo(docs.size()));
for (int i = 0; i < reader.maxDoc(); i++) {
@ -253,7 +253,7 @@ public class FieldDataTermsFilterTests extends ElasticsearchTestCase {
// filter from mapper
result.clear(0, size);
assertThat(result.cardinality(), equalTo(0));
result.or(dblMapper.termsFilter(parseContext, cTerms, null)
result.or(dblMapper.fieldDataTermsFilter(cTerms, parseContext)
.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
assertThat(result.cardinality(), equalTo(docs.size()));
for (int i = 0; i < reader.maxDoc(); i++) {