rename fieldQuery/fieldFilter to termQuery/termFilter in mappers

This commit is contained in:
Shay Banon 2012-12-28 13:48:48 -08:00
parent 12239169b1
commit 64a01c28c3
25 changed files with 85 additions and 93 deletions

View File

@ -215,17 +215,17 @@ public class MapperQueryParser extends QueryParser {
currentMapper = fieldMappers.fieldMappers().mapper();
if (currentMapper != null) {
Query query = null;
if (currentMapper.useFieldQueryWithQueryString()) {
if (currentMapper.useTermQueryWithQueryString()) {
try {
if (fieldMappers.explicitTypeInNameWithDocMapper()) {
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{fieldMappers.docMapper().type()});
try {
query = currentMapper.fieldQuery(queryText, parseContext);
query = currentMapper.termQuery(queryText, parseContext);
} finally {
QueryParseContext.setTypes(previousTypes);
}
} else {
query = currentMapper.fieldQuery(queryText, parseContext);
query = currentMapper.termQuery(queryText, parseContext);
}
} catch (RuntimeException e) {
if (settings.lenient()) {
@ -471,7 +471,7 @@ public class MapperQueryParser extends QueryParser {
currentMapper = fieldMappers.fieldMappers().mapper();
if (currentMapper != null) {
Query query = null;
if (currentMapper.useFieldQueryWithQueryString()) {
if (currentMapper.useTermQueryWithQueryString()) {
if (fieldMappers.explicitTypeInNameWithDocMapper()) {
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{fieldMappers.docMapper().type()});
try {

View File

@ -20,9 +20,7 @@
package org.elasticsearch.common;
/**
* A set of utlities around numbers.
*
*
* A set of utilities for numbers.
*/
public final class Numbers {

View File

@ -297,7 +297,7 @@ public class DocumentMapper implements ToXContent {
this.searchAnalyzer = searchAnalyzer;
this.searchQuoteAnalyzer = searchQuoteAnalyzer != null ? searchQuoteAnalyzer : searchAnalyzer;
this.typeFilter = typeMapper().fieldFilter(type, null);
this.typeFilter = typeMapper().termFilter(type, null);
if (rootMapper(ParentFieldMapper.class) != null) {
// mark the routing field mapper as required

View File

@ -142,11 +142,6 @@ public interface FieldMapper<T> {
*/
SimilarityProvider similarity();
/**
* Returns the value that will be used as a result for search. Can be only of specific types... .
*/
Object valueForSearch(Object value);
/**
* Returns the actual value of the field.
*/
@ -154,6 +149,11 @@ public interface FieldMapper<T> {
T valueFromString(String value);
/**
* Returns the value that will be used as a result for search. Can be only of specific types... .
*/
Object valueForSearch(Object value);
/**
* Returns the actual value of the field as string.
*/
@ -165,17 +165,21 @@ public interface FieldMapper<T> {
BytesRef indexedValue(String value);
/**
* Should the field query {@link #fieldQuery(String, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this
* Should the field query {@link #termQuery(String, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this
* field in query string.
*/
boolean useFieldQueryWithQueryString();
boolean useTermQueryWithQueryString();
/**
* A field query for the specified value.
*/
Query fieldQuery(String value, @Nullable QueryParseContext context);
Query termQuery(String value, @Nullable QueryParseContext context);
Filter fieldFilter(String value, @Nullable QueryParseContext context);
Filter termFilter(String value, @Nullable QueryParseContext context);
Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
Filter rangeFilter(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
Query fuzzyQuery(String value, String minSim, int prefixLength, int maxExpansions, boolean transpositions);
@ -192,18 +196,9 @@ public interface FieldMapper<T> {
/**
* A term query to use when parsing a query string. Can return <tt>null</tt>.
*/
@Nullable
Query queryStringTermQuery(Term term);
/**
* Constructs a range query based on the mapper.
*/
Query rangeQuery(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
/**
* Constructs a range query filter based on the mapper.
*/
Filter rangeFilter(String lowerTerm, String upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
/**
* Null value filter, returns <tt>null</tt> if there is no null value associated with the field.
*/
@ -213,5 +208,4 @@ public interface FieldMapper<T> {
FieldDataType fieldDataType();
PostingsFormatProvider postingsFormatProvider();
}

View File

@ -398,17 +398,17 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, Mapper {
}
@Override
public boolean useFieldQueryWithQueryString() {
public boolean useTermQueryWithQueryString() {
return false;
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
return new TermQuery(names().createIndexNameTerm(indexedValue(value)));
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
return new TermFilter(names().createIndexNameTerm(indexedValue(value)));
}

View File

@ -160,7 +160,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper<Boolean> {
}
@Override
public boolean useFieldQueryWithQueryString() {
public boolean useTermQueryWithQueryString() {
return true;
}

View File

@ -183,7 +183,7 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
int iValue = Integer.parseInt(value);
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
iValue, iValue, true, true);
@ -198,7 +198,7 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
int iValue = Integer.parseInt(value);
return NumericRangeFilter.newIntRange(names.indexName(), precisionStep,
iValue, iValue, true, true);

View File

@ -258,7 +258,7 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
long now = context == null ? System.currentTimeMillis() : context.nowInMillis();
long lValue = dateMathParser.parse(value, now);
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
@ -275,7 +275,7 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
long now = context == null ? System.currentTimeMillis() : context.nowInMillis();
long lValue = dateMathParser.parse(value, now);
return NumericRangeFilter.newLongRange(names.indexName(), precisionStep,

View File

@ -181,7 +181,7 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
double dValue = Double.parseDouble(value);
return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep,
dValue, dValue, true, true);
@ -196,7 +196,7 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
double dValue = Double.parseDouble(value);
return NumericRangeFilter.newDoubleRange(names.indexName(), precisionStep,
dValue, dValue, true, true);

View File

@ -179,7 +179,7 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
float fValue = Float.parseFloat(value);
return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep,
fValue, fValue, true, true);
@ -194,7 +194,7 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
float fValue = Float.parseFloat(value);
return NumericRangeFilter.newFloatRange(names.indexName(), precisionStep,
fValue, fValue, true, true);

View File

@ -185,7 +185,7 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
int iValue = Integer.parseInt(value);
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
iValue, iValue, true, true);
@ -200,7 +200,7 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
int iValue = Integer.parseInt(value);
return NumericRangeFilter.newIntRange(names.indexName(), precisionStep,
iValue, iValue, true, true);

View File

@ -185,7 +185,7 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
long iValue = Long.parseLong(value);
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
iValue, iValue, true, true);
@ -200,7 +200,7 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
long iValue = Long.parseLong(value);
return NumericRangeFilter.newLongRange(names.indexName(), precisionStep,
iValue, iValue, true, true);

View File

@ -206,7 +206,7 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
* Use the field query created here when matching on numbers.
*/
@Override
public boolean useFieldQueryWithQueryString() {
public boolean useTermQueryWithQueryString() {
return true;
}
@ -215,7 +215,7 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
* way to execute it.
*/
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
return rangeQuery(value, value, true, true, context);
}
@ -230,7 +230,7 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
* way to execute it.
*/
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
return rangeFilter(value, value, true, true, context);
}

View File

@ -185,7 +185,7 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
int iValue = Integer.parseInt(value);
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
iValue, iValue, true, true);
@ -200,7 +200,7 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
int iValue = Integer.parseInt(value);
return NumericRangeFilter.newIntRange(names.indexName(), precisionStep,
iValue, iValue, true, true);

View File

@ -253,7 +253,7 @@ public class StringFieldMapper extends AbstractFieldMapper<String> implements Al
if (nullValue == null) {
return null;
}
return fieldFilter(nullValue, null);
return termFilter(nullValue, null);
}
@Override

View File

@ -163,7 +163,7 @@ public class AllFieldMapper extends AbstractFieldMapper<Void> implements Interna
}
@Override
public Query fieldQuery(String value, QueryParseContext context) {
public Query termQuery(String value, QueryParseContext context) {
return queryStringTermQuery(names().createIndexNameTerm(value));
}

View File

@ -149,14 +149,14 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements Intern
}
@Override
public boolean useFieldQueryWithQueryString() {
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
if (fieldType.indexed() || context == null) {
return super.fieldQuery(value, context);
return super.termQuery(value, context);
}
UidFilter filter = new UidFilter(context.queryTypes(), ImmutableList.of(value));
// no need for constant score filter, since we don't cache the filter, and it always takes deletes into account
@ -164,9 +164,9 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements Intern
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
if (fieldType.indexed() || context == null) {
return super.fieldFilter(value, context);
return super.termFilter(value, context);
}
return new UidFilter(context.queryTypes(), ImmutableList.of(value));
}

View File

@ -206,17 +206,17 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
public Query termQuery(String value, @Nullable QueryParseContext context) {
if (context == null) {
return super.fieldQuery(value, context);
return super.termQuery(value, context);
}
return new ConstantScoreQuery(fieldFilter(value, context));
return new ConstantScoreQuery(termFilter(value, context));
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
if (context == null) {
return super.fieldFilter(value, context);
return super.termFilter(value, context);
}
// we use all types, cause we don't know if its exact or not...
Term[] typesTerms = new Term[context.mapperService().types().size()];
@ -231,7 +231,7 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
* We don't need to analyzer the text, and we need to convert it to UID...
*/
@Override
public boolean useFieldQueryWithQueryString() {
public boolean useTermQueryWithQueryString() {
return true;
}

View File

@ -125,7 +125,7 @@ public class TypeFieldMapper extends AbstractFieldMapper<String> implements Inte
}
@Override
public Filter fieldFilter(String value, @Nullable QueryParseContext context) {
public Filter termFilter(String value, @Nullable QueryParseContext context) {
if (!fieldType.indexed()) {
return new PrefixFilter(new Term(UidFieldMapper.NAME, Uid.typePrefix(value)));
}
@ -133,12 +133,12 @@ public class TypeFieldMapper extends AbstractFieldMapper<String> implements Inte
}
@Override
public Query fieldQuery(String value, @Nullable QueryParseContext context) {
return new XConstantScoreQuery(context.cacheFilter(fieldFilter(value, context), null));
public Query termQuery(String value, @Nullable QueryParseContext context) {
return new XConstantScoreQuery(context.cacheFilter(termFilter(value, context), null));
}
@Override
public boolean useFieldQueryWithQueryString() {
public boolean useTermQueryWithQueryString() {
return true;
}

View File

@ -112,12 +112,12 @@ public class TermFilterParser implements FilterParser {
if (smartNameFieldMappers.explicitTypeInNameWithDocMapper()) {
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{smartNameFieldMappers.docMapper().type()});
try {
filter = smartNameFieldMappers.mapper().fieldFilter(value, parseContext);
filter = smartNameFieldMappers.mapper().termFilter(value, parseContext);
} finally {
QueryParseContext.setTypes(previousTypes);
}
} else {
filter = smartNameFieldMappers.mapper().fieldFilter(value, parseContext);
filter = smartNameFieldMappers.mapper().termFilter(value, parseContext);
}
}
if (filter == null) {

View File

@ -93,12 +93,12 @@ public class TermQueryParser implements QueryParser {
if (smartNameFieldMappers.explicitTypeInNameWithDocMapper()) {
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{smartNameFieldMappers.docMapper().type()});
try {
query = smartNameFieldMappers.mapper().fieldQuery(value, parseContext);
query = smartNameFieldMappers.mapper().termQuery(value, parseContext);
} finally {
QueryParseContext.setTypes(previousTypes);
}
} else {
query = smartNameFieldMappers.mapper().fieldQuery(value, parseContext);
query = smartNameFieldMappers.mapper().termQuery(value, parseContext);
}
}
if (query == null) {

View File

@ -132,7 +132,7 @@ public class TermsFilterParser implements FilterParser {
XBooleanFilter boolFiler = new XBooleanFilter();
if (fieldMapper != null) {
for (String term : terms) {
boolFiler.add(parseContext.cacheFilter(fieldMapper.fieldFilter(term, parseContext), null), BooleanClause.Occur.SHOULD);
boolFiler.add(parseContext.cacheFilter(fieldMapper.termFilter(term, parseContext), null), BooleanClause.Occur.SHOULD);
}
} else {
for (String term : terms) {
@ -148,7 +148,7 @@ public class TermsFilterParser implements FilterParser {
XBooleanFilter boolFiler = new XBooleanFilter();
if (fieldMapper != null) {
for (String term : terms) {
boolFiler.add(fieldMapper.fieldFilter(term, parseContext), BooleanClause.Occur.SHOULD);
boolFiler.add(fieldMapper.termFilter(term, parseContext), BooleanClause.Occur.SHOULD);
}
} else {
for (String term : terms) {
@ -164,7 +164,7 @@ public class TermsFilterParser implements FilterParser {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (String term : terms) {
filters.add(parseContext.cacheFilter(fieldMapper.fieldFilter(term, parseContext), null));
filters.add(parseContext.cacheFilter(fieldMapper.termFilter(term, parseContext), null));
}
} else {
for (String term : terms) {
@ -180,7 +180,7 @@ public class TermsFilterParser implements FilterParser {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (String term : terms) {
filters.add(fieldMapper.fieldFilter(term, parseContext));
filters.add(fieldMapper.termFilter(term, parseContext));
}
} else {
for (String term : terms) {
@ -196,7 +196,7 @@ public class TermsFilterParser implements FilterParser {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (String term : terms) {
filters.add(parseContext.cacheFilter(fieldMapper.fieldFilter(term, parseContext), null));
filters.add(parseContext.cacheFilter(fieldMapper.termFilter(term, parseContext), null));
}
} else {
for (String term : terms) {
@ -212,7 +212,7 @@ public class TermsFilterParser implements FilterParser {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (String term : terms) {
filters.add(fieldMapper.fieldFilter(term, parseContext));
filters.add(fieldMapper.termFilter(term, parseContext));
}
} else {
for (String term : terms) {

View File

@ -112,7 +112,7 @@ public class TermsQueryParser implements QueryParser {
BooleanQuery query = new BooleanQuery(disableCoord);
for (String value : values) {
if (mapper != null) {
query.add(new BooleanClause(mapper.fieldQuery(value, parseContext), BooleanClause.Occur.SHOULD));
query.add(new BooleanClause(mapper.termQuery(value, parseContext), BooleanClause.Occur.SHOULD));
} else {
query.add(new TermQuery(new Term(fieldName, value)), BooleanClause.Occur.SHOULD);
}

View File

@ -74,7 +74,7 @@ public class MatchQuery {
protected int maxExpansions = FuzzyQuery.defaultMaxExpansions;
//LUCENE 4 UPGRADE we need a default value for this!
protected boolean transpositions = false;
protected MultiTermQuery.RewriteMethod rewriteMethod;
protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod;
@ -114,7 +114,7 @@ public class MatchQuery {
public void setMaxExpansions(int maxExpansions) {
this.maxExpansions = maxExpansions;
}
public void setTranspositions(boolean transpositions) {
this.transpositions = transpositions;
}
@ -146,11 +146,11 @@ public class MatchQuery {
field = fieldName;
}
if (mapper != null && mapper.useFieldQueryWithQueryString()) {
if (mapper != null && mapper.useTermQueryWithQueryString()) {
if (smartNameFieldMappers.explicitTypeInNameWithDocMapper()) {
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{smartNameFieldMappers.docMapper().type()});
try {
return wrapSmartNameQuery(mapper.fieldQuery(text, parseContext), smartNameFieldMappers, parseContext);
return wrapSmartNameQuery(mapper.termQuery(text, parseContext), smartNameFieldMappers, parseContext);
} catch (RuntimeException e) {
if (lenient) {
return null;
@ -161,7 +161,7 @@ public class MatchQuery {
}
} else {
try {
return wrapSmartNameQuery(mapper.fieldQuery(text, parseContext), smartNameFieldMappers, parseContext);
return wrapSmartNameQuery(mapper.termQuery(text, parseContext), smartNameFieldMappers, parseContext);
} catch (RuntimeException e) {
if (lenient) {
return null;
@ -199,7 +199,7 @@ public class MatchQuery {
source = analyzer.tokenStream(field, new FastStringReader(text));
source.reset();
success = true;
} catch(IOException ex) {
} catch (IOException ex) {
//LUCENE 4 UPGRADE not sure what todo here really lucene 3.6 had a tokenStream that didn't throw an exc.
// success==false if we hit an exception
}
@ -390,8 +390,8 @@ public class MatchQuery {
}
String text = term.text();
//LUCENE 4 UPGRADE we need to document that this should now be an int rather than a float
int edits = FuzzyQuery.floatToEdits(Float.parseFloat(fuzziness),
text.codePointCount(0, text.length()));
int edits = FuzzyQuery.floatToEdits(Float.parseFloat(fuzziness),
text.codePointCount(0, text.length()));
FuzzyQuery query = new FuzzyQuery(term, edits, fuzzyPrefixLength, maxExpansions, transpositions);
QueryParsers.setRewriteMethod(query, rewriteMethod);
return query;
@ -404,7 +404,7 @@ public class MatchQuery {
}
return new TermQuery(term);
}
private static BytesRef termToByteRef(CharTermAttribute attr, BytesRef ref) {
UnicodeUtil.UTF16toUTF8WithHash(attr.buffer(), 0, attr.length(), ref);
return ref;

View File

@ -47,25 +47,25 @@ public class DoubleIndexingDocTest {
IndexReader reader = IndexReader.open(writer, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(mapper.mappers().smartName("field1").mapper().fieldQuery("value1", null), 10);
TopDocs topDocs = searcher.search(mapper.mappers().smartName("field1").mapper().termQuery("value1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field2").mapper().fieldQuery("1", null), 10);
topDocs = searcher.search(mapper.mappers().smartName("field2").mapper().termQuery("1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field3").mapper().fieldQuery("1.1", null), 10);
topDocs = searcher.search(mapper.mappers().smartName("field3").mapper().termQuery("1.1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field4").mapper().fieldQuery("2010-01-01", null), 10);
topDocs = searcher.search(mapper.mappers().smartName("field4").mapper().termQuery("2010-01-01", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().fieldQuery("1", null), 10);
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().termQuery("1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().fieldQuery("2", null), 10);
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().termQuery("2", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().fieldQuery("3", null), 10);
topDocs = searcher.search(mapper.mappers().smartName("field5").mapper().termQuery("3", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
}
}