Mappings: Refactor core index/query time properties into FieldType

Mappers are currently used at both index and query time for deciding
how to "use" a field.  For #8871, we need the index wide view of
mappings to have a unified set of settings for each field of a given
name within the index.

This change moves all the current settings (and methods defining
query time behavior) into subclasses of FieldType. In a future
PR, this will allow storing the field type at the index level,
instead of mappers (which can still have settings that differ
per document type).

The change is quite large (I'm sorry). I could not see a way to
migrate to this in a more piecemeal way. I did leave out cutting
over callers of the query methods to using the field type, as
that can be done in a follow up.
This commit is contained in:
Ryan Ernst 2015-05-29 17:07:04 +02:00
parent 29fbcd225b
commit 1f2c42fd0b
136 changed files with 3247 additions and 2558 deletions

View File

@ -260,7 +260,7 @@ public class MapperQueryParser extends QueryParser {
}
}
if (query == null) {
query = super.getFieldQuery(currentMapper.names().indexName(), queryText, quoted);
query = super.getFieldQuery(currentMapper.fieldType().names().indexName(), queryText, quoted);
}
return query;
}
@ -372,7 +372,7 @@ public class MapperQueryParser extends QueryParser {
Query rangeQuery;
if (currentMapper instanceof DateFieldMapper && settings.timeZone() != null) {
DateFieldMapper dateFieldMapper = (DateFieldMapper) this.currentMapper;
rangeQuery = dateFieldMapper.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext);
rangeQuery = dateFieldMapper.fieldType().rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext);
} else {
rangeQuery = currentMapper.rangeQuery(part1, part2, startInclusive, endInclusive, parseContext);
}
@ -508,7 +508,7 @@ public class MapperQueryParser extends QueryParser {
query = currentMapper.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
}
if (query == null) {
query = getPossiblyAnalyzedPrefixQuery(currentMapper.names().indexName(), termStr);
query = getPossiblyAnalyzedPrefixQuery(currentMapper.fieldType().names().indexName(), termStr);
}
return query;
}
@ -644,7 +644,7 @@ public class MapperQueryParser extends QueryParser {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentMapper));
}
indexedNameField = currentMapper.names().indexName();
indexedNameField = currentMapper.fieldType().names().indexName();
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
}
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);

View File

@ -113,8 +113,8 @@ public class TransportAnalyzeAction extends TransportSingleCustomOperationAction
if (fieldMapper.isNumeric()) {
throw new IllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are not supported on numeric fields");
}
analyzer = fieldMapper.indexAnalyzer();
field = fieldMapper.names().indexName();
analyzer = fieldMapper.fieldType().indexAnalyzer();
field = fieldMapper.fieldType().names().indexName();
}
}

View File

@ -179,7 +179,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO
for (String field : request.fields()) {
if (Regex.isMatchAllPattern(field)) {
for (FieldMapper fieldMapper : allFieldMappers) {
addFieldMapper(fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults());
addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults());
}
} else if (Regex.isSimpleMatchPattern(field)) {
// go through the field mappers 3 times, to make sure we give preference to the resolve order: full name, index name, name.
@ -187,22 +187,22 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO
Collection<FieldMapper> remainingFieldMappers = Lists.newLinkedList(allFieldMappers);
for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) {
final FieldMapper fieldMapper = it.next();
if (Regex.simpleMatch(field, fieldMapper.names().fullName())) {
addFieldMapper(fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults());
if (Regex.simpleMatch(field, fieldMapper.fieldType().names().fullName())) {
addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults());
it.remove();
}
}
for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) {
final FieldMapper fieldMapper = it.next();
if (Regex.simpleMatch(field, fieldMapper.names().indexName())) {
addFieldMapper(fieldMapper.names().indexName(), fieldMapper, fieldMappings, request.includeDefaults());
if (Regex.simpleMatch(field, fieldMapper.fieldType().names().indexName())) {
addFieldMapper(fieldMapper.fieldType().names().indexName(), fieldMapper, fieldMappings, request.includeDefaults());
it.remove();
}
}
for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) {
final FieldMapper fieldMapper = it.next();
if (Regex.simpleMatch(field, fieldMapper.names().shortName())) {
addFieldMapper(fieldMapper.names().shortName(), fieldMapper, fieldMappings, request.includeDefaults());
if (Regex.simpleMatch(field, fieldMapper.fieldType().names().shortName())) {
addFieldMapper(fieldMapper.fieldType().names().shortName(), fieldMapper, fieldMappings, request.includeDefaults());
it.remove();
}
}
@ -229,7 +229,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO
builder.startObject();
fieldMapper.toXContent(builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS);
builder.endObject();
fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.names().fullName(), builder.bytes()));
fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().names().fullName(), builder.bytes()));
} catch (IOException e) {
throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e);
}

View File

@ -289,7 +289,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
this.id = new Id(docMapper.idFieldMapper().path());
this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path());
this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(),
docMapper.timestampFieldMapper().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(),
docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(),
docMapper.timestampFieldMapper().ignoreMissing());
this.hasParentField = docMapper.parentFieldMapper().active();
}

View File

@ -728,7 +728,7 @@ public abstract class ShapeBuilder implements ToXContent {
Distance radius = null;
CoordinateNode node = null;
GeometryCollectionBuilder geometryCollections = null;
Orientation requestedOrientation = (shapeMapper == null) ? Orientation.RIGHT : shapeMapper.orientation();
Orientation requestedOrientation = (shapeMapper == null) ? Orientation.RIGHT : shapeMapper.fieldType().orientation();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.fielddata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.FieldMapper.Loading;
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
/**
*/

View File

@ -32,6 +32,7 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexComponent;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -77,7 +78,7 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
/**
* The field name.
*/
FieldMapper.Names getFieldNames();
MappedFieldType.Names getFieldNames();
/**
* The field data type.

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
/**
* A simple field data cache abstraction on the *index* level.
@ -47,9 +48,9 @@ public interface IndexFieldDataCache {
interface Listener {
void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage);
void onLoad(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage);
void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes);
void onUnload(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes);
}
class None implements IndexFieldDataCache {

View File

@ -32,6 +32,7 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.plain.*;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
@ -46,6 +47,8 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import static org.elasticsearch.index.mapper.MappedFieldType.Names;
/**
*/
public class IndexFieldDataService extends AbstractIndexComponent {
@ -226,12 +229,12 @@ public class IndexFieldDataService extends AbstractIndexComponent {
@SuppressWarnings("unchecked")
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper mapper) {
final FieldMapper.Names fieldNames = mapper.names();
final FieldDataType type = mapper.fieldDataType();
final Names fieldNames = mapper.fieldType().names();
final FieldDataType type = mapper.fieldType().fieldDataType();
if (type == null) {
throw new IllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]");
}
final boolean docValues = mapper.hasDocValues();
final boolean docValues = mapper.fieldType().hasDocValues();
final String key = fieldNames.indexName();
IndexFieldData<?> fieldData = loadedFieldData.get(key);
if (fieldData == null) {

View File

@ -26,7 +26,7 @@ import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.ShardId;
@ -62,7 +62,7 @@ public class ShardFieldData extends AbstractIndexShardComponent implements Index
}
@Override
public void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) {
public void onLoad(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) {
totalMetric.inc(ramUsage.ramBytesUsed());
String keyFieldName = fieldNames.indexName();
CounterMetric total = perFieldTotals.get(keyFieldName);
@ -79,7 +79,7 @@ public class ShardFieldData extends AbstractIndexShardComponent implements Index
}
@Override
public void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onUnload(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
if (wasEvicted) {
evictionsMetric.inc();
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.MultiValueMode;
import java.util.Collection;
@ -41,11 +42,11 @@ import java.util.Collections;
*/
public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexOrdinalsFieldData, Accountable {
private final FieldMapper.Names fieldNames;
private final MappedFieldType.Names fieldNames;
private final FieldDataType fieldDataType;
private final long memorySizeInBytes;
protected GlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) {
protected GlobalOrdinalsIndexFieldData(Index index, Settings settings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) {
super(index, settings);
this.fieldNames = fieldNames;
this.fieldDataType = fieldDataType;
@ -68,7 +69,7 @@ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponen
}
@Override
public FieldMapper.Names getFieldNames() {
public MappedFieldType.Names getFieldNames() {
return fieldNames;
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.util.Collection;
@ -38,7 +39,7 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel
private final Atomic[] atomicReaders;
InternalGlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) {
InternalGlobalOrdinalsIndexFieldData(Index index, Settings settings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) {
super(index, settings, fieldNames, fieldDataType, memorySizeInBytes);
this.atomicReaders = new Atomic[segmentAfd.length];
for (int i = 0; i < segmentAfd.length; i++) {

View File

@ -30,6 +30,7 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.settings.IndexSettings;
import java.io.IOException;
@ -38,11 +39,11 @@ import java.io.IOException;
*/
public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends AbstractIndexComponent implements IndexFieldData<FD> {
private final FieldMapper.Names fieldNames;
private final MappedFieldType.Names fieldNames;
protected final FieldDataType fieldDataType;
protected final IndexFieldDataCache cache;
public AbstractIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) {
public AbstractIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) {
super(index, indexSettings);
this.fieldNames = fieldNames;
this.fieldDataType = fieldDataType;
@ -50,7 +51,7 @@ public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends
}
@Override
public FieldMapper.Names getFieldNames() {
public MappedFieldType.Names getFieldNames() {
return this.fieldNames;
}

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;

View File

@ -29,7 +29,7 @@ import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.search.MultiValueMode;
public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData<BinaryDVAtomicFieldData> {

View File

@ -39,7 +39,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;

View File

@ -29,7 +29,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
@ -67,8 +67,8 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
final Names fieldNames = mapper.names();
return new BytesBinaryDVIndexFieldData(index, fieldNames, mapper.fieldDataType());
final Names fieldNames = mapper.fieldType().names();
return new BytesBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
}
}

View File

@ -25,7 +25,7 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.search.MultiValueMode;
@ -42,7 +42,7 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData<AtomicF
public IndexFieldData<AtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore Circuit Breaker
return new DisabledIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache);
return new DisabledIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache);
}
}

View File

@ -31,7 +31,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
@ -93,8 +94,8 @@ public abstract class DocValuesIndexFieldData {
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore Circuit Breaker
final FieldMapper.Names fieldNames = mapper.names();
final Settings fdSettings = mapper.fieldDataType().getSettings();
final Names fieldNames = mapper.fieldType().names();
final Settings fdSettings = mapper.fieldType().fieldDataType().getSettings();
final Map<String, Settings> filter = fdSettings.getGroups("filter");
if (filter != null && !filter.isEmpty()) {
throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.fullName() + "]");
@ -102,19 +103,19 @@ public abstract class DocValuesIndexFieldData {
if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {
assert numericType == null;
return new BinaryDVIndexFieldData(index, fieldNames, mapper.fieldDataType());
return new BinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
} else if (NUMERIC_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {
assert !numericType.isFloatingPoint();
return new NumericDVIndexFieldData(index, fieldNames, mapper.fieldDataType());
return new NumericDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
} else if (numericType != null) {
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1)) {
return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, mapper.fieldDataType());
return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType());
} else {
// prior to ES 1.4: multi-valued numerics were boxed inside a byte[] as BINARY
return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, mapper.fieldDataType());
return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType());
}
} else {
return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, mapper.fieldDataType());
return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, mapper.fieldType().fieldDataType());
}
}

View File

@ -53,6 +53,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorS
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -74,11 +75,11 @@ public class DoubleArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new DoubleArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService);
return new DoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
}
}
public DoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
public DoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache);
this.breakerService = breakerService;

View File

@ -33,6 +33,7 @@ import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -48,11 +49,11 @@ public class FSTBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
@Override
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
return new FSTBytesIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService);
return new FSTBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
}
}
FSTBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType fieldDataType,
FSTBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType,
IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache, breakerService);
this.breakerService = breakerService;

View File

@ -52,6 +52,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSo
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -73,11 +74,11 @@ public class FloatArrayIndexFieldData extends AbstractIndexFieldData<AtomicNumer
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new FloatArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService);
return new FloatArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
}
}
public FloatArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
public FloatArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache);
this.breakerService = breakerService;

View File

@ -27,7 +27,8 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
@ -65,8 +66,8 @@ public class GeoPointBinaryDVIndexFieldData extends DocValuesIndexFieldData impl
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
final FieldMapper.Names fieldNames = mapper.names();
return new GeoPointBinaryDVIndexFieldData(index, fieldNames, mapper.fieldDataType());
final Names fieldNames = mapper.fieldType().names();
return new GeoPointBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
}
}

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.settings.IndexSettings;
@ -54,7 +55,7 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
FieldDataType type = mapper.fieldDataType();
FieldDataType type = mapper.fieldType().fieldDataType();
final String precisionAsString = type.getSettings().get(PRECISION_KEY);
final Distance precision;
if (precisionAsString != null) {
@ -62,13 +63,13 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField
} else {
precision = DEFAULT_PRECISION_VALUE;
}
return new GeoPointCompressedIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, precision, breakerService);
return new GeoPointCompressedIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, precision, breakerService);
}
}
private final GeoPointFieldMapper.Encoding encoding;
public GeoPointCompressedIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
public GeoPointCompressedIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, Distance precision,
CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache);

View File

@ -33,6 +33,7 @@ import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -48,11 +49,11 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexGeoPointFiel
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService);
return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
}
}
public GeoPointDoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
public GeoPointDoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache);
this.breakerService = breakerService;

View File

@ -34,6 +34,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -47,7 +48,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData {
@Override
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new IndexIndexFieldData(index, mapper.names());
return new IndexIndexFieldData(index, mapper.fieldType().names());
}
}
@ -101,7 +102,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData {
private final AtomicOrdinalsFieldData atomicFieldData;
private IndexIndexFieldData(Index index, FieldMapper.Names names) {
private IndexIndexFieldData(Index index, MappedFieldType.Names names) {
super(index, Settings.EMPTY, names, new FieldDataType("string"), null, null);
atomicFieldData = new IndexAtomicFieldData(index().name());
}

View File

@ -31,7 +31,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;

View File

@ -57,6 +57,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSou
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -86,14 +87,14 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
@Override
public IndexFieldData<AtomicNumericFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
return new PackedArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, numericType, breakerService);
return new PackedArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, numericType, breakerService);
}
}
private final NumericType numericType;
private final CircuitBreakerService breakerService;
public PackedArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
public PackedArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, NumericType numericType,
CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache);

View File

@ -33,6 +33,7 @@ import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -49,11 +50,11 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
@Override
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
return new PagedBytesIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService);
return new PagedBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
}
}
public PagedBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
public PagedBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache, breakerService);
}

View File

@ -61,7 +61,8 @@ import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentTypeListener;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
@ -96,7 +97,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
// while loading.
private final Object lock = new Object();
public ParentChildIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
public ParentChildIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService,
CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache);
@ -228,8 +229,8 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
IndexFieldDataCache cache, CircuitBreakerService breakerService,
MapperService mapperService) {
return new ParentChildIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache,
mapperService, breakerService);
return new ParentChildIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache,
mapperService, breakerService);
}
}

View File

@ -40,7 +40,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;

View File

@ -28,7 +28,7 @@ import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper.Names;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.indices.breaker.CircuitBreakerService;

View File

@ -67,7 +67,7 @@ public class SingleFieldsVisitor extends FieldsVisitor {
if (fieldsValues == null) {
return;
}
List<Object> fieldValues = fieldsValues.get(mapper.names().indexName());
List<Object> fieldValues = fieldsValues.get(mapper.fieldType().names().indexName());
if (fieldValues == null) {
return;
}

View File

@ -59,19 +59,19 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
@Override
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
return Maps.immutableEntry(input.names().indexName(), input.indexAnalyzer());
return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer());
}
}));
FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
@Override
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
return Maps.immutableEntry(input.names().indexName(), input.searchAnalyzer());
return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer());
}
}));
FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
@Override
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
return Maps.immutableEntry(input.names().indexName(), input.searchQuoteAnalyzer());
return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchQuoteAnalyzer());
}
}));
return new DocumentFieldMappers(fieldMappers, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer);

View File

@ -19,9 +19,6 @@
package org.elasticsearch.index.mapper;
import com.google.common.base.Strings;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.MultiTermQuery;
@ -30,10 +27,8 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.List;
@ -45,146 +40,7 @@ public interface FieldMapper extends Mapper {
String DOC_VALUES_FORMAT = "doc_values_format";
class Names {
private final String shortName;
private final String indexName;
private final String originalIndexName;
private final String fullName;
public Names(String name) {
this(name, name, name, name);
}
public Names(String shortName, String indexName, String originalIndexName, String fullName) {
this.shortName = shortName;
this.indexName = indexName;
this.originalIndexName = originalIndexName;
this.fullName = fullName;
}
/**
* The logical name of the field.
*/
public String shortName() {
return shortName;
}
/**
* The indexed name of the field. This is the name under which we will
* store it in the index.
*/
public String indexName() {
return indexName;
}
/**
* The original index name, before any "path" modifications performed on it.
*/
public String originalIndexName() {
return originalIndexName;
}
/**
* The full name, including dot path.
*/
public String fullName() {
return fullName;
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
Names names = (Names) o;
if (!fullName.equals(names.fullName)) return false;
if (!indexName.equals(names.indexName)) return false;
if (!originalIndexName.equals(names.originalIndexName)) return false;
if (!shortName.equals(names.shortName)) return false;
return true;
}
@Override
public int hashCode() {
int result = shortName.hashCode();
result = 31 * result + indexName.hashCode();
result = 31 * result + originalIndexName.hashCode();
result = 31 * result + fullName.hashCode();
return result;
}
}
enum Loading {
LAZY {
@Override
public String toString() {
return LAZY_VALUE;
}
},
EAGER {
@Override
public String toString() {
return EAGER_VALUE;
}
},
EAGER_GLOBAL_ORDINALS {
@Override
public String toString() {
return EAGER_GLOBAL_ORDINALS_VALUE;
}
};
public static final String KEY = "loading";
public static final String EAGER_GLOBAL_ORDINALS_VALUE = "eager_global_ordinals";
public static final String EAGER_VALUE = "eager";
public static final String LAZY_VALUE = "lazy";
public static Loading parse(String loading, Loading defaultValue) {
if (Strings.isNullOrEmpty(loading)) {
return defaultValue;
} else if (EAGER_GLOBAL_ORDINALS_VALUE.equalsIgnoreCase(loading)) {
return EAGER_GLOBAL_ORDINALS;
} else if (EAGER_VALUE.equalsIgnoreCase(loading)) {
return EAGER;
} else if (LAZY_VALUE.equalsIgnoreCase(loading)) {
return LAZY;
} else {
throw new MapperParsingException("Unknown [" + KEY + "] value: [" + loading + "]");
}
}
}
Names names();
FieldType fieldType();
float boost();
/**
* The analyzer that will be used to index the field.
*/
Analyzer indexAnalyzer();
/**
* The analyzer that will be used to search the field.
*/
Analyzer searchAnalyzer();
/**
* The analyzer that will be used for quoted search on the field.
*/
Analyzer searchQuoteAnalyzer();
/**
* Similarity used for scoring queries on the field
*/
SimilarityProvider similarity();
MappedFieldType fieldType();
/**
* List of fields where this field should be copied to
@ -236,18 +92,12 @@ public interface FieldMapper extends Mapper {
@Nullable
Query nullValueFilter();
FieldDataType fieldDataType();
boolean isNumeric();
boolean isSortable();
boolean supportsNullValue();
boolean hasDocValues();
Loading normsLoading(Loading defaultLoading);
/**
* Fields might not be available before indexing, for example _all, token_count,...
* When get is called and these fields are requested, this case needs special treatment.

View File

@ -53,7 +53,7 @@ class FieldMappersLookup implements Iterable<FieldMapper> {
CopyOnWriteHashMap<String, FieldMappers> map = this.mappers;
for (FieldMapper mapper : newMappers) {
String key = mapper.names().fullName();
String key = mapper.fieldType().names().fullName();
FieldMappers mappers = map.get(key);
if (mappers == null) {
@ -76,13 +76,13 @@ class FieldMappersLookup implements Iterable<FieldMapper> {
public FieldMappers indexName(String indexName) {
FieldMappers fieldMappers = fullName(indexName);
if (fieldMappers != null) {
if (fieldMappers.mapper().names().indexName().equals(indexName)) {
if (fieldMappers.mapper().fieldType().names().indexName().equals(indexName)) {
return fieldMappers;
}
}
fieldMappers = new FieldMappers();
for (FieldMapper mapper : this) {
if (mapper.names().indexName().equals(indexName)) {
if (mapper.fieldType().names().indexName().equals(indexName)) {
fieldMappers = fieldMappers.concat(mapper);
}
}
@ -117,10 +117,10 @@ class FieldMappersLookup implements Iterable<FieldMapper> {
public Collection<String> simpleMatchToIndexNames(String pattern) {
Set<String> fields = Sets.newHashSet();
for (FieldMapper fieldMapper : this) {
if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) {
fields.add(fieldMapper.names().indexName());
} else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) {
fields.add(fieldMapper.names().indexName());
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) {
fields.add(fieldMapper.fieldType().names().indexName());
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) {
fields.add(fieldMapper.fieldType().names().indexName());
}
}
return fields;
@ -132,10 +132,10 @@ class FieldMappersLookup implements Iterable<FieldMapper> {
public Collection<String> simpleMatchToFullName(String pattern) {
Set<String> fields = Sets.newHashSet();
for (FieldMapper fieldMapper : this) {
if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) {
fields.add(fieldMapper.names().fullName());
} else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) {
fields.add(fieldMapper.names().fullName());
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) {
fields.add(fieldMapper.fieldType().names().fullName());
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) {
fields.add(fieldMapper.fieldType().names().fullName());
}
}
return fields;

View File

@ -0,0 +1,368 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.google.common.base.Strings;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.List;
/**
* This defines the core properties and functions to operate on a field.
*/
public class MappedFieldType extends FieldType {
public static class Names {
private final String shortName;
private final String indexName;
private final String originalIndexName;
private final String fullName;
public Names(String name) {
this(name, name, name, name);
}
public Names(String shortName, String indexName, String originalIndexName, String fullName) {
this.shortName = shortName;
this.indexName = indexName;
this.originalIndexName = originalIndexName;
this.fullName = fullName;
}
/**
* The logical name of the field.
*/
public String shortName() {
return shortName;
}
/**
* The indexed name of the field. This is the name under which we will
* store it in the index.
*/
public String indexName() {
return indexName;
}
/**
* The original index name, before any "path" modifications performed on it.
*/
public String originalIndexName() {
return originalIndexName;
}
/**
* The full name, including dot path.
*/
public String fullName() {
return fullName;
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
Names names = (Names) o;
if (!fullName.equals(names.fullName)) return false;
if (!indexName.equals(names.indexName)) return false;
if (!originalIndexName.equals(names.originalIndexName)) return false;
if (!shortName.equals(names.shortName)) return false;
return true;
}
@Override
public int hashCode() {
int result = shortName.hashCode();
result = 31 * result + indexName.hashCode();
result = 31 * result + originalIndexName.hashCode();
result = 31 * result + fullName.hashCode();
return result;
}
}
public enum Loading {
LAZY {
@Override
public String toString() {
return LAZY_VALUE;
}
},
EAGER {
@Override
public String toString() {
return EAGER_VALUE;
}
},
EAGER_GLOBAL_ORDINALS {
@Override
public String toString() {
return EAGER_GLOBAL_ORDINALS_VALUE;
}
};
public static final String KEY = "loading";
public static final String EAGER_GLOBAL_ORDINALS_VALUE = "eager_global_ordinals";
public static final String EAGER_VALUE = "eager";
public static final String LAZY_VALUE = "lazy";
public static Loading parse(String loading, Loading defaultValue) {
if (Strings.isNullOrEmpty(loading)) {
return defaultValue;
} else if (EAGER_GLOBAL_ORDINALS_VALUE.equalsIgnoreCase(loading)) {
return EAGER_GLOBAL_ORDINALS;
} else if (EAGER_VALUE.equalsIgnoreCase(loading)) {
return EAGER;
} else if (LAZY_VALUE.equalsIgnoreCase(loading)) {
return LAZY;
} else {
throw new MapperParsingException("Unknown [" + KEY + "] value: [" + loading + "]");
}
}
}
private Names names;
private float boost;
// TODO: remove this docvalues flag and use docValuesType
private boolean docValues;
private NamedAnalyzer indexAnalyzer;
private NamedAnalyzer searchAnalyzer;
private NamedAnalyzer searchQuoteAnalyzer;
private SimilarityProvider similarity;
private Loading normsLoading;
private FieldDataType fieldDataType;
protected MappedFieldType(MappedFieldType ref) {
super(ref);
this.names = ref.names();
this.boost = ref.boost();
this.docValues = ref.hasDocValues();
this.indexAnalyzer = ref.indexAnalyzer();
this.searchAnalyzer = ref.searchAnalyzer();
this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer();
this.similarity = ref.similarity();
this.normsLoading = ref.normsLoading();
this.fieldDataType = ref.fieldDataType();
}
public MappedFieldType() {}
public MappedFieldType clone() {
return new MappedFieldType(this);
}
public boolean isNumeric() {
return false;
}
public boolean isSortable() {
return true;
}
public Names names() {
return names;
}
public void setNames(Names names) {
checkIfFrozen();
this.names = names;
}
public float boost() {
return boost;
}
public void setBoost(float boost) {
checkIfFrozen();
this.boost = boost;
}
public FieldDataType fieldDataType() {
return fieldDataType;
}
public void setFieldDataType(FieldDataType fieldDataType) {
checkIfFrozen();
this.fieldDataType = fieldDataType;
}
public boolean hasDocValues() {
return docValues;
}
public void setHasDocValues(boolean hasDocValues) {
checkIfFrozen();
this.docValues = hasDocValues;
}
public Loading normsLoading() {
return normsLoading;
}
public void setNormsLoading(Loading normsLoading) {
checkIfFrozen();
this.normsLoading = normsLoading;
}
public NamedAnalyzer indexAnalyzer() {
return indexAnalyzer;
}
public void setIndexAnalyzer(NamedAnalyzer analyzer) {
checkIfFrozen();
this.indexAnalyzer = analyzer;
}
public NamedAnalyzer searchAnalyzer() {
return searchAnalyzer;
}
public void setSearchAnalyzer(NamedAnalyzer analyzer) {
checkIfFrozen();
this.searchAnalyzer = analyzer;
}
public NamedAnalyzer searchQuoteAnalyzer() {
return searchQuoteAnalyzer == null ? searchAnalyzer : searchQuoteAnalyzer;
}
public void setSearchQuoteAnalyzer(NamedAnalyzer analyzer) {
checkIfFrozen();
this.searchQuoteAnalyzer = analyzer;
}
public SimilarityProvider similarity() {
return similarity;
}
public void setSimilarity(SimilarityProvider similarity) {
checkIfFrozen();
this.similarity = similarity;
}
/** Returns the actual value of the field. */
public Object value(Object value) {
return value;
}
/** Returns the value that will be used as a result for search. Can be only of specific types... */
public Object valueForSearch(Object value) {
return value;
}
/** Returns the indexed value used to construct search "values". */
public BytesRef indexedValueForSearch(Object value) {
return BytesRefs.toBytesRef(value);
}
/**
* Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this
* field in query string.
*/
public boolean useTermQueryWithQueryString() {
return false;
}
/** Creates a term associated with the field of this mapper for the given value */
protected Term createTerm(Object value) {
return new Term(names().indexName(), indexedValueForSearch(value));
}
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return new TermQuery(createTerm(value));
}
public Query termsQuery(List values, @Nullable QueryParseContext context) {
BytesRef[] bytesRefs = new BytesRef[values.size()];
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
}
return new TermsQuery(names.indexName(), bytesRefs);
}
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeQuery(names().indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
}
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
return new FuzzyQuery(createTerm(value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
}
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
PrefixQuery query = new PrefixQuery(createTerm(value));
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates);
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
/**
* @return a {@link FieldStats} instance that maps to the type of this field based on the provided {@link Terms} instance.
*/
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
return new FieldStats.Text(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax()
);
}
/** A term query to use when parsing a query string. Can return <tt>null</tt>. */
@Nullable
public Query queryStringTermQuery(Term term) {
return null;
}
}

View File

@ -690,8 +690,8 @@ public class MapperService extends AbstractIndexComponent {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
FieldMapper mapper = smartNameFieldMapper(fieldName);
if (mapper != null && mapper.searchAnalyzer() != null) {
return mapper.searchAnalyzer();
if (mapper != null && mapper.fieldType().searchAnalyzer() != null) {
return mapper.fieldType().searchAnalyzer();
}
return defaultAnalyzer;
}
@ -709,8 +709,8 @@ public class MapperService extends AbstractIndexComponent {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
FieldMapper mapper = smartNameFieldMapper(fieldName);
if (mapper != null && mapper.searchQuoteAnalyzer() != null) {
return mapper.searchQuoteAnalyzer();
if (mapper != null && mapper.fieldType().searchQuoteAnalyzer() != null) {
return mapper.fieldType().searchQuoteAnalyzer();
}
return defaultAnalyzer;
}

View File

@ -25,28 +25,19 @@ import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -54,6 +45,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -74,14 +66,12 @@ import java.util.List;
import java.util.Locale;
import java.util.TreeMap;
/**
*
*/
import static org.elasticsearch.index.mapper.core.TypeParsers.DOC_VALUES;
public abstract class AbstractFieldMapper implements FieldMapper {
public static class Defaults {
public static final FieldType FIELD_TYPE = new FieldType();
public static final boolean PRE_2X_DOC_VALUES = false;
public static final MappedFieldType FIELD_TYPE = new MappedFieldType();
static {
FIELD_TYPE.setTokenized(true);
@ -89,6 +79,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.setOmitNorms(false);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.setBoost(Defaults.BOOST);
FIELD_TYPE.freeze();
}
@ -98,26 +89,21 @@ public abstract class AbstractFieldMapper implements FieldMapper {
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends Mapper.Builder<T, Y> {
protected final FieldType fieldType;
protected final MappedFieldType fieldType;
private final IndexOptions defaultOptions;
protected Boolean docValues;
protected float boost = Defaults.BOOST;
protected boolean omitNormsSet = false;
protected String indexName;
protected NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected Boolean includeInAll;
protected boolean indexOptionsSet = false;
protected SimilarityProvider similarity;
protected Loading normsLoading;
@Nullable
protected Settings fieldDataSettings;
protected final MultiFields.Builder multiFieldsBuilder;
protected CopyTo copyTo;
protected Builder(String name, FieldType fieldType) {
protected Builder(String name, MappedFieldType fieldType) {
super(name);
this.fieldType = fieldType;
this.fieldType = fieldType.clone();
this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable
multiFieldsBuilder = new MultiFields.Builder();
}
@ -191,7 +177,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
public T boost(float boost) {
this.boost = boost;
this.fieldType.setBoost(boost);
return builder;
}
@ -213,12 +199,12 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
public T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
this.fieldType.setIndexAnalyzer(indexAnalyzer);
return builder;
}
public T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
this.fieldType.setSearchAnalyzer(searchAnalyzer);
return builder;
}
@ -228,12 +214,12 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
public T similarity(SimilarityProvider similarity) {
this.similarity = similarity;
this.fieldType.setSimilarity(similarity);
return builder;
}
public T normsLoading(Loading normsLoading) {
this.normsLoading = normsLoading;
public T normsLoading(MappedFieldType.Loading normsLoading) {
this.fieldType.setNormsLoading(normsLoading);
return builder;
}
@ -257,8 +243,8 @@ public abstract class AbstractFieldMapper implements FieldMapper {
return builder;
}
protected Names buildNames(BuilderContext context) {
return new Names(name, buildIndexName(context), buildIndexNameClean(context), buildFullName(context));
protected MappedFieldType.Names buildNames(BuilderContext context) {
return new MappedFieldType.Names(name, buildIndexName(context), buildIndexNameClean(context), buildFullName(context));
}
protected String buildIndexName(BuilderContext context) {
@ -279,136 +265,82 @@ public abstract class AbstractFieldMapper implements FieldMapper {
protected String buildFullName(BuilderContext context) {
return context.path().fullPathAsText(name);
}
protected void setupFieldType(BuilderContext context) {
fieldType.setNames(buildNames(context));
}
}
protected final Names names;
protected float boost;
protected FieldType fieldType;
protected final Boolean docValues;
protected final NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected final SimilarityProvider similarity;
protected Loading normsLoading;
protected MappedFieldType fieldType;
protected final boolean hasDefaultDocValues;
protected Settings customFieldDataSettings;
protected FieldDataType fieldDataType;
protected final MultiFields multiFields;
protected CopyTo copyTo;
protected final boolean indexCreatedBefore2x;
protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings) {
this(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, similarity,
normsLoading, fieldDataSettings, indexSettings, MultiFields.empty(), null);
protected AbstractFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) {
this(fieldType, docValues, fieldDataSettings, indexSettings, MultiFields.empty(), null);
}
protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
protected AbstractFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
assert indexSettings != null;
this.names = names;
this.boost = boost;
this.fieldType = fieldType;
this.fieldType.freeze();
this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0);
boolean indexedNotAnalyzed = this.fieldType.tokenized() == false && this.fieldType.indexOptions() != IndexOptions.NONE;
if (indexAnalyzer == null && indexedNotAnalyzed) {
this.indexAnalyzer = this.searchAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.indexAnalyzer = indexAnalyzer;
this.searchAnalyzer = searchAnalyzer;
}
this.similarity = similarity;
this.normsLoading = normsLoading;
this.customFieldDataSettings = fieldDataSettings;
FieldDataType fieldDataType;
if (fieldDataSettings == null) {
this.fieldDataType = defaultFieldDataType();
fieldDataType = defaultFieldDataType();
} else {
// create a new field data type, with the default settings as well as the "new ones"
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
Settings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings)
fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
Settings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings)
);
}
if (docValues != null) {
// explicitly set
this.docValues = docValues;
} else if (fieldDataType != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings))) {
// convoluted way to enable doc values, should be removed in the future
this.docValues = true;
} else {
this.docValues = null; // use the default
// TODO: hasDocValues should just be set directly on the field type by callers of this ctor, but
// then we need to eliminate defaultDocValues() (only needed by geo, which needs to be fixed with passing
// doc values setting down to lat/lon) and get rid of specifying doc values in fielddata (which
// complicates whether we can just compare to the default value to know whether to write the setting)
if (docValues == null && fieldDataType != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings))) {
docValues = true;
}
hasDefaultDocValues = docValues == null;
this.fieldType = fieldType.clone();
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
this.fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
this.fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
}
this.fieldType.setHasDocValues(docValues == null ? defaultDocValues() : docValues);
this.fieldType.setFieldDataType(fieldDataType);
this.fieldType.freeze();
this.multiFields = multiFields;
this.copyTo = copyTo;
}
protected boolean defaultDocValues() {
if (indexCreatedBefore2x) {
return Defaults.PRE_2X_DOC_VALUES;
return false;
} else {
return fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
}
}
@Override
public final boolean hasDocValues() {
return docValues == null ? defaultDocValues() : docValues;
}
@Override
public String name() {
// TODO: cleanup names so Mapper knows about paths, so that it is always clear whether we are using short or full name
return names.shortName();
return fieldType.names().shortName();
}
@Override
public Names names() {
return this.names;
}
public abstract FieldType defaultFieldType();
public abstract MappedFieldType defaultFieldType();
public abstract FieldDataType defaultFieldDataType();
@Override
public final FieldDataType fieldDataType() {
return fieldDataType;
}
@Override
public FieldType fieldType() {
public MappedFieldType fieldType() {
return fieldType;
}
@Override
public float boost() {
return this.boost;
}
@Override
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
@Override
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
@Override
public Analyzer searchQuoteAnalyzer() {
return this.searchAnalyzer;
}
@Override
public SimilarityProvider similarity() {
return similarity;
}
@Override
public CopyTo copyTo() {
return copyTo;
@ -421,12 +353,12 @@ public abstract class AbstractFieldMapper implements FieldMapper {
parseCreateField(context, fields);
for (Field field : fields) {
if (!customBoost()) {
field.setBoost(boost);
field.setBoost(fieldType.boost());
}
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
throw new MapperParsingException("failed to parse [" + fieldType.names().fullName() + "]", e);
}
multiFields.parse(this, context);
return null;
@ -452,72 +384,59 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
@Override
public Object valueForSearch(Object value) {
return value;
public final Object value(Object value) {
return fieldType().value(value);
}
@Override
public final Object valueForSearch(Object value) {
return fieldType().valueForSearch(value);
}
// TODO: this is not final so ParentFieldMapper can have custom behavior, per type...
@Override
public BytesRef indexedValueForSearch(Object value) {
return BytesRefs.toBytesRef(value);
return fieldType().indexedValueForSearch(value);
}
@Override
public Query queryStringTermQuery(Term term) {
return null;
public final Query queryStringTermQuery(Term term) {
return fieldType().queryStringTermQuery(term);
}
@Override
public boolean useTermQueryWithQueryString() {
return false;
public final boolean useTermQueryWithQueryString() {
return fieldType().useTermQueryWithQueryString();
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return new TermQuery(createTerm(value));
public final Query termQuery(Object value, @Nullable QueryParseContext context) {
return fieldType().termQuery(value, context);
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
BytesRef[] bytesRefs = new BytesRef[values.size()];
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
}
return new TermsQuery(names.indexName(), bytesRefs);
public final Query termsQuery(List values, @Nullable QueryParseContext context) {
return fieldType().termsQuery(values, context);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeQuery(names.indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return fieldType().rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, context);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
return new FuzzyQuery(createTerm(value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
public final Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
return fieldType().fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
}
@Override
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
PrefixQuery query = new PrefixQuery(createTerm(value));
if (method != null) {
query.setRewriteMethod(method);
}
return query;
public final Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
return fieldType().prefixQuery(value, method, context);
}
@Override
public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates);
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
protected Term createTerm(Object value) {
return new Term(names.indexName(), indexedValueForSearch(value));
public final Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
return fieldType().regexpQuery(value, flags, maxDeterminizedStates, method, context);
}
@Override
@ -532,7 +451,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
if (mergeWith instanceof AbstractFieldMapper) {
mergedType = ((AbstractFieldMapper) mergeWith).contentType();
}
mergeResult.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
// different types, return
return;
}
@ -540,86 +459,86 @@ public abstract class AbstractFieldMapper implements FieldMapper {
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
boolean mergeWithIndexed = fieldMergeWith.fieldType().indexOptions() != IndexOptions.NONE;
if (indexed != mergeWithIndexed || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different index values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different index values");
}
if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store values");
}
if (!this.hasDocValues() && fieldMergeWith.hasDocValues()) {
if (!this.fieldType().hasDocValues() && fieldMergeWith.fieldType().hasDocValues()) {
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set
// when the doc_values field data format is configured
mergeResult.addConflict("mapper [" + names.fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
}
if (this.fieldType().omitNorms() && !fieldMergeWith.fieldType.omitNorms()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] cannot enable norms (`norms.enabled`)");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] cannot enable norms (`norms.enabled`)");
}
if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different tokenize values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tokenize values");
}
if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector values");
}
if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_offsets values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_offsets values");
}
if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_positions values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_positions values");
}
if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_payloads values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_payloads values");
}
// null and "default"-named index analyzers both mean the default is used
if (this.indexAnalyzer == null || "default".equals(this.indexAnalyzer.name())) {
if (fieldMergeWith.indexAnalyzer != null && !"default".equals(fieldMergeWith.indexAnalyzer.name())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer");
if (this.fieldType.indexAnalyzer() == null || "default".equals(this.fieldType.indexAnalyzer().name())) {
if (fieldMergeWith.fieldType.indexAnalyzer() != null && "default".equals(fieldMergeWith.fieldType.indexAnalyzer().name()) == false) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer");
}
} else if (fieldMergeWith.indexAnalyzer == null || "default".equals(fieldMergeWith.indexAnalyzer.name())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer");
} else if (!this.indexAnalyzer.name().equals(fieldMergeWith.indexAnalyzer.name())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer");
} else if (fieldMergeWith.fieldType.indexAnalyzer() == null || "default".equals(fieldMergeWith.fieldType.indexAnalyzer().name())) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer");
} else if (this.fieldType.indexAnalyzer().name().equals(fieldMergeWith.fieldType.indexAnalyzer().name()) == false) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer");
}
if (!this.names().equals(fieldMergeWith.names())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different index_name");
if (!this.fieldType().names().equals(fieldMergeWith.fieldType().names())) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different index_name");
}
if (this.similarity == null) {
if (fieldMergeWith.similarity() != null) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity");
if (this.fieldType.similarity() == null) {
if (fieldMergeWith.fieldType.similarity() != null) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity");
}
} else if (fieldMergeWith.similarity() == null) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity");
} else if (!this.similarity().equals(fieldMergeWith.similarity())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity");
} else if (fieldMergeWith.fieldType().similarity() == null) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity");
} else if (!this.fieldType().similarity().equals(fieldMergeWith.fieldType().similarity())) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity");
}
multiFields.merge(mergeWith, mergeResult);
if (!mergeResult.simulate()) {
// apply changeable values
this.fieldType = new FieldType(this.fieldType);
this.fieldType = this.fieldType.clone();
this.fieldType.setOmitNorms(fieldMergeWith.fieldType.omitNorms());
this.fieldType.freeze();
this.boost = fieldMergeWith.boost;
this.normsLoading = fieldMergeWith.normsLoading;
this.copyTo = fieldMergeWith.copyTo;
if (fieldMergeWith.searchAnalyzer != null) {
this.searchAnalyzer = fieldMergeWith.searchAnalyzer;
this.fieldType.setBoost(fieldMergeWith.fieldType.boost());
this.fieldType.setNormsLoading(fieldMergeWith.fieldType.normsLoading());
if (fieldMergeWith.fieldType.searchAnalyzer() != null) {
this.fieldType.setSearchAnalyzer(fieldMergeWith.fieldType.searchAnalyzer());
}
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
Settings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
);
this.fieldType.setFieldDataType(new FieldDataType(defaultFieldDataType().getType(),
Settings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
));
}
}
this.fieldType.freeze();
this.copyTo = fieldMergeWith.copyTo;
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(names.shortName());
builder.startObject(fieldType.names().shortName());
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
doXContentBody(builder, includeDefaults, params);
return builder.endObject();
@ -628,12 +547,12 @@ public abstract class AbstractFieldMapper implements FieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (indexCreatedBefore2x && (includeDefaults || !names.shortName().equals(names.originalIndexName()))) {
builder.field("index_name", names.originalIndexName());
if (indexCreatedBefore2x && (includeDefaults || !fieldType.names().shortName().equals(fieldType.names().originalIndexName()))) {
builder.field("index_name", fieldType.names().originalIndexName());
}
if (includeDefaults || boost != 1.0f) {
builder.field("boost", boost);
if (includeDefaults || fieldType.boost() != 1.0f) {
builder.field("boost", fieldType.boost());
}
FieldType defaultFieldType = defaultFieldType();
@ -650,13 +569,13 @@ public abstract class AbstractFieldMapper implements FieldMapper {
if (includeDefaults || fieldType.storeTermVectors() != defaultFieldType.storeTermVectors()) {
builder.field("term_vector", termVectorOptionsToString(fieldType));
}
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || normsLoading != null) {
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || fieldType.normsLoading() != null) {
builder.startObject("norms");
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms()) {
builder.field("enabled", !fieldType.omitNorms());
}
if (normsLoading != null) {
builder.field(Loading.KEY, normsLoading);
if (fieldType.normsLoading() != null) {
builder.field(MappedFieldType.Loading.KEY, fieldType.normsLoading());
}
builder.endObject();
}
@ -666,8 +585,8 @@ public abstract class AbstractFieldMapper implements FieldMapper {
doXContentAnalyzers(builder, includeDefaults);
if (similarity() != null) {
builder.field("similarity", similarity().name());
if (fieldType().similarity() != null) {
builder.field("similarity", fieldType().similarity().name());
} else if (includeDefaults) {
builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY);
}
@ -677,7 +596,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
orderedFielddataSettings.putAll(customFieldDataSettings.getAsMap());
builder.field("fielddata", orderedFielddataSettings);
} else if (includeDefaults) {
orderedFielddataSettings.putAll(fieldDataType.getSettings().getAsMap());
orderedFielddataSettings.putAll(fieldType.fieldDataType().getSettings().getAsMap());
builder.field("fielddata", orderedFielddataSettings);
}
multiFields.toXContent(builder, params);
@ -688,21 +607,21 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
protected void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException {
if (indexAnalyzer == null) {
if (fieldType.indexAnalyzer() == null) {
if (includeDefaults) {
builder.field("analyzer", "default");
}
} else if (includeDefaults || indexAnalyzer.name().startsWith("_") == false && indexAnalyzer.name().equals("default") == false) {
builder.field("analyzer", indexAnalyzer.name());
if (searchAnalyzer.name().equals(indexAnalyzer.name()) == false) {
builder.field("search_analyzer", searchAnalyzer.name());
} else if (includeDefaults || fieldType.indexAnalyzer().name().startsWith("_") == false && fieldType.indexAnalyzer().name().equals("default") == false) {
builder.field("analyzer", fieldType.indexAnalyzer().name());
if (fieldType.searchAnalyzer().name().equals(fieldType.indexAnalyzer().name()) == false) {
builder.field("search_analyzer", fieldType.searchAnalyzer().name());
}
}
}
protected void doXContentDocValues(XContentBuilder builder, boolean includeDefaults) throws IOException {
if (includeDefaults || docValues != null) {
builder.field(TypeParsers.DOC_VALUES, hasDocValues());
if (includeDefaults || hasDefaultDocValues == false) {
builder.field(DOC_VALUES, fieldType().hasDocValues());
}
}
@ -753,7 +672,6 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
}
protected abstract String contentType();
@Override
@ -762,13 +680,13 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
@Override
public boolean isNumeric() {
return false;
public final boolean isNumeric() {
return fieldType().isNumeric();
}
@Override
public boolean isSortable() {
return true;
public final boolean isSortable() {
return fieldType().isSortable();
}
@Override
@ -776,11 +694,6 @@ public abstract class AbstractFieldMapper implements FieldMapper {
return true;
}
@Override
public Loading normsLoading(Loading defaultLoading) {
return normsLoading == null ? defaultLoading : normsLoading;
}
public static class MultiFields {
public static MultiFields empty() {
@ -854,7 +767,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainField.names().shortName());
context.path().add(mainField.fieldType().names().shortName());
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
cursor.value.parse(context);
}
@ -871,7 +784,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
for (ObjectCursor<FieldMapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
FieldMapper mergeWithMapper = cursor.value;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.names().shortName());
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.fieldType().names().shortName());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeResult.simulate()) {
@ -882,7 +795,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(mergeWithMapper.names().shortName(), mergeWithMapper);
newMappersBuilder.put(mergeWithMapper.fieldType().names().shortName(), mergeWithMapper);
if (mergeWithMapper instanceof AbstractFieldMapper) {
if (newFieldMappers == null) {
newFieldMappers = new ArrayList<>(2);
@ -992,9 +905,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
return new FieldStats.Text(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax()
);
public final FieldStats stats(Terms terms, int maxDoc) throws IOException {
return fieldType().stats(terms, maxDoc);
}
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.ObjectArrayList;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.DocValuesType;
@ -40,6 +39,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
@ -63,7 +63,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new BinaryFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
@ -74,13 +74,15 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
public static class Builder extends AbstractFieldMapper.Builder<Builder, BinaryFieldMapper> {
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
super(name, Defaults.FIELD_TYPE);
builder = this;
}
@Override
public BinaryFieldMapper build(BuilderContext context) {
return new BinaryFieldMapper(buildNames(context), fieldType, docValues,
setupFieldType(context);
((BinaryFieldType)fieldType).tryUncompressing = context.indexCreatedVersion().before(Version.V_2_0_0);
return new BinaryFieldMapper(fieldType, docValues,
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
@ -102,13 +104,67 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
}
}
protected BinaryFieldMapper(Names names, FieldType fieldType, Boolean docValues,
public static class BinaryFieldType extends MappedFieldType {
protected boolean tryUncompressing = false;
public BinaryFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected BinaryFieldType(BinaryFieldType ref) {
super(ref);
this.tryUncompressing = ref.tryUncompressing;
}
@Override
public MappedFieldType clone() {
return new BinaryFieldType(this);
}
@Override
public BytesReference value(Object value) {
if (value == null) {
return null;
}
BytesReference bytes;
if (value instanceof BytesRef) {
bytes = new BytesArray((BytesRef) value);
} else if (value instanceof BytesReference) {
bytes = (BytesReference) value;
} else if (value instanceof byte[]) {
bytes = new BytesArray((byte[]) value);
} else {
try {
bytes = new BytesArray(Base64.decode(value.toString()));
} catch (IOException e) {
throw new ElasticsearchParseException("failed to convert bytes", e);
}
}
try {
if (tryUncompressing) { // backcompat behavior
return CompressorFactory.uncompressIfNeeded(bytes);
} else {
return bytes;
}
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
}
}
@Override
public Object valueForSearch(Object value) {
return value(value);
}
}
protected BinaryFieldMapper(MappedFieldType fieldType, Boolean docValues,
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, 1.0f, fieldType, docValues, null, null, null, null, fieldDataSettings, indexSettings, multiFields, copyTo);
super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -117,45 +173,9 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
return new FieldDataType("binary");
}
@Override
public Object valueForSearch(Object value) {
return value(value);
}
@Override
public BytesReference value(Object value) {
if (value == null) {
return null;
}
BytesReference bytes;
if (value instanceof BytesRef) {
bytes = new BytesArray((BytesRef) value);
} else if (value instanceof BytesReference) {
bytes = (BytesReference) value;
} else if (value instanceof byte[]) {
bytes = new BytesArray((byte[]) value);
} else {
try {
bytes = new BytesArray(Base64.decode(value.toString()));
} catch (IOException e) {
throw new ElasticsearchParseException("failed to convert bytes", e);
}
}
try {
if (indexCreatedBefore2x) {
return CompressorFactory.uncompressIfNeeded(bytes);
} else {
return bytes;
}
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
}
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (!fieldType().stored() && !hasDocValues()) {
if (!fieldType().stored() && !fieldType().hasDocValues()) {
return;
}
byte[] value = context.parseExternalValue(byte[].class);
@ -170,14 +190,14 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
return;
}
if (fieldType().stored()) {
fields.add(new Field(names.indexName(), value, fieldType));
fields.add(new Field(fieldType().names().indexName(), value, fieldType()));
}
if (hasDocValues()) {
CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(names().indexName());
if (fieldType().hasDocValues()) {
CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().names().indexName());
if (field == null) {
field = new CustomBinaryDocValuesField(names().indexName(), value);
context.doc().addWithKey(names().indexName(), field);
field = new CustomBinaryDocValuesField(fieldType().names().indexName(), value);
context.doc().addWithKey(fieldType().names().indexName(), field);
} else {
field.add(value);
}
@ -192,17 +212,11 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
public static class CustomBinaryDocValuesField extends NumberFieldMapper.CustomNumericDocValuesField {
public static final FieldType TYPE = new FieldType();
static {
TYPE.setDocValuesType(DocValuesType.BINARY);
TYPE.freeze();
}
private final ObjectArrayList<byte[]> bytesList;
private int totalSize = 0;
public CustomBinaryDocValuesField(String name, byte[] bytes) {
public CustomBinaryDocValuesField(String name, byte[] bytes) {
super(name);
bytesList = new ObjectArrayList<>();
add(bytes);

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.ConstantScoreQuery;
@ -34,6 +33,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -58,12 +58,14 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
public static final String CONTENT_TYPE = "boolean";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new BooleanFieldType();
static {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.freeze();
}
@ -80,7 +82,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
private Boolean nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
super(name, Defaults.FIELD_TYPE);
this.builder = this;
}
@ -99,8 +101,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
@Override
public BooleanFieldMapper build(BuilderContext context) {
return new BooleanFieldMapper(buildNames(context), boost, fieldType, docValues, nullValue,
similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
setupFieldType(context);
return new BooleanFieldMapper(fieldType, docValues, nullValue,
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
@ -125,17 +128,86 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
}
}
public static class BooleanFieldType extends MappedFieldType {
public BooleanFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected BooleanFieldType(BooleanFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new BooleanFieldType(this);
}
@Override
public BytesRef indexedValueForSearch(Object value) {
if (value == null) {
return Values.FALSE;
}
if (value instanceof Boolean) {
return ((Boolean) value) ? Values.TRUE : Values.FALSE;
}
String sValue;
if (value instanceof BytesRef) {
sValue = ((BytesRef) value).utf8ToString();
} else {
sValue = value.toString();
}
if (sValue.length() == 0) {
return Values.FALSE;
}
if (sValue.length() == 1 && sValue.charAt(0) == 'F') {
return Values.FALSE;
}
if (Booleans.parseBoolean(sValue, false)) {
return Values.TRUE;
}
return Values.FALSE;
}
@Override
public Boolean value(Object value) {
if (value == null) {
return Boolean.FALSE;
}
String sValue = value.toString();
if (sValue.length() == 0) {
return Boolean.FALSE;
}
if (sValue.length() == 1 && sValue.charAt(0) == 'F') {
return Boolean.FALSE;
}
if (Booleans.parseBoolean(sValue, false)) {
return Boolean.TRUE;
}
return Boolean.FALSE;
}
@Override
public Object valueForSearch(Object value) {
return value(value);
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
}
private Boolean nullValue;
protected BooleanFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, Boolean nullValue,
SimilarityProvider similarity, Loading normsLoading,
protected BooleanFieldMapper(MappedFieldType fieldType, Boolean docValues, Boolean nullValue,
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, boost, fieldType, docValues, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -145,60 +217,6 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
return new FieldDataType(CONTENT_TYPE);
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Boolean value(Object value) {
if (value == null) {
return Boolean.FALSE;
}
String sValue = value.toString();
if (sValue.length() == 0) {
return Boolean.FALSE;
}
if (sValue.length() == 1 && sValue.charAt(0) == 'F') {
return Boolean.FALSE;
}
if (Booleans.parseBoolean(sValue, false)) {
return Boolean.TRUE;
}
return Boolean.FALSE;
}
@Override
public Object valueForSearch(Object value) {
return value(value);
}
@Override
public BytesRef indexedValueForSearch(Object value) {
if (value == null) {
return Values.FALSE;
}
if (value instanceof Boolean) {
return ((Boolean) value) ? Values.TRUE : Values.FALSE;
}
String sValue;
if (value instanceof BytesRef) {
sValue = ((BytesRef) value).utf8ToString();
} else {
sValue = value.toString();
}
if (sValue.length() == 0) {
return Values.FALSE;
}
if (sValue.length() == 1 && sValue.charAt(0) == 'F') {
return Values.FALSE;
}
if (Booleans.parseBoolean(sValue, false)) {
return Values.TRUE;
}
return Values.FALSE;
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
@ -209,7 +227,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !hasDocValues()) {
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) {
return;
}
@ -228,9 +246,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
if (value == null) {
return;
}
fields.add(new Field(names.indexName(), value ? "T" : "F", fieldType));
if (hasDocValues()) {
fields.add(new SortedNumericDocValuesField(names.indexName(), value ? 1 : 0));
fields.add(new Field(fieldType.names().indexName(), value ? "T" : "F", fieldType));
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType.names().indexName(), value ? 1 : 0));
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
@ -41,13 +40,13 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
@ -66,7 +65,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
public static final String CONTENT_TYPE = "byte";
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new ByteFieldType();
static {
FIELD_TYPE.freeze();
@ -80,7 +79,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
protected Byte nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_8_BIT);
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_8_BIT);
builder = this;
}
@ -91,14 +90,23 @@ public class ByteFieldMapper extends NumberFieldMapper {
@Override
public ByteFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
ByteFieldMapper fieldMapper = new ByteFieldMapper(buildNames(context),
fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context),
coerce(context), similarity, normsLoading,
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
setupFieldType(context);
ByteFieldMapper fieldMapper = new ByteFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context),
coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
String name = precisionStep == Integer.MAX_VALUE ? "_byte/max" : ("_byte/" + precisionStep);
return new NamedAnalyzer(name, new NumericIntegerAnalyzer(precisionStep));
}
@Override
protected int maxPrecisionStep() {
return 32;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -122,24 +130,81 @@ public class ByteFieldMapper extends NumberFieldMapper {
}
}
public static class ByteFieldType extends NumberFieldType {
public ByteFieldType() {}
protected ByteFieldType(ByteFieldType ref) {
super(ref);
}
@Override
public NumberFieldType clone() {
return new ByteFieldType(this);
}
@Override
public Byte value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).byteValue();
}
if (value instanceof BytesRef) {
return ((BytesRef) value).bytes[((BytesRef) value).offset];
}
return Byte.parseByte(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : (int)parseValue(lowerTerm),
upperTerm == null ? null : (int)parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
byte iValue = Byte.parseByte(value);
byte iSim = fuzziness.asByte();
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinInt(terms);
long maxValue = NumericUtils.getMaxInt(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
}
private Byte nullValue;
private String nullValueAsString;
protected ByteFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
Byte nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading,
protected ByteFieldMapper(MappedFieldType fieldType, Boolean docValues,
Byte nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues,
ignoreMalformed, coerce, new NamedAnalyzer("_byte/" + precisionStep, new NumericIntegerAnalyzer(precisionStep)),
new NamedAnalyzer("_byte/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)),
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -148,33 +213,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
return new FieldDataType("byte");
}
@Override
protected int maxPrecisionStep() {
return 32;
}
@Override
public Byte value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).byteValue();
}
if (value instanceof BytesRef) {
return ((BytesRef) value).bytes[((BytesRef) value).offset];
}
return Byte.parseByte(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
private byte parseValue(Object value) {
private static byte parseValue(Object value) {
if (value instanceof Number) {
return ((Number) value).byteValue();
}
@ -184,28 +223,6 @@ public class ByteFieldMapper extends NumberFieldMapper {
return Byte.parseByte(value.toString());
}
private int parseValueAsInt(Object value) {
return parseValue(value);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
byte iValue = Byte.parseByte(value);
byte iSim = fuzziness.asByte();
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseValueAsInt(lowerTerm),
upperTerm == null ? null : parseValueAsInt(upperTerm),
includeLower, includeUpper);
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
@ -222,7 +239,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
byte value;
float boost = this.boost;
float boost = this.fieldType.boost();
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue == null) {
@ -244,7 +261,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).byteValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), Byte.toString(value), boost);
context.allEntries().addText(fieldType.names().fullName(), Byte.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -255,7 +272,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
}
value = nullValue;
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(names.fullName(), nullValueAsString, boost);
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -284,7 +301,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
} else {
value = (byte) parser.shortValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), parser.text(), boost);
context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost);
}
}
}
@ -293,7 +310,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
field.setBoost(boost);
fields.add(field);
}
if (hasDocValues()) {
if (fieldType().hasDocValues()) {
addDocValue(context, fields, value);
}
}
@ -319,8 +336,8 @@ public class ByteFieldMapper extends NumberFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_8_BIT) {
builder.field("precision_step", precisionStep);
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_8_BIT) {
builder.field("precision_step", fieldType.numericPrecisionStep());
}
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
@ -332,22 +349,13 @@ public class ByteFieldMapper extends NumberFieldMapper {
}
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinInt(terms);
long maxValue = NumericUtils.getMaxInt(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
public static class CustomByteNumericField extends CustomNumericField {
private final byte number;
private final NumberFieldMapper mapper;
public CustomByteNumericField(NumberFieldMapper mapper, byte number, FieldType fieldType) {
public CustomByteNumericField(NumberFieldMapper mapper, byte number, MappedFieldType fieldType) {
super(mapper, number, fieldType);
this.mapper = mapper;
this.number = number;

View File

@ -25,7 +25,6 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
@ -39,11 +38,12 @@ import org.elasticsearch.common.xcontent.XContentParser.NumberType;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider;
@ -72,7 +72,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
public static final String CONTENT_TYPE = "completion";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new CompletionFieldType();
static {
FIELD_TYPE.setOmitNorms(true);
@ -114,7 +114,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
private SortedMap<String, ContextMapping> contextMapping = ContextMapping.EMPTY_MAPPING;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
super(name, Defaults.FIELD_TYPE);
builder = this;
}
@ -148,7 +148,8 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
@Override
public CompletionFieldMapper build(Mapper.BuilderContext context) {
return new CompletionFieldMapper(buildNames(context), indexAnalyzer, searchAnalyzer, null, similarity, payloads,
setupFieldType(context);
return new CompletionFieldMapper(fieldType, null, payloads,
preserveSeparators, preservePositionIncrements, maxInputLength, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, this.contextMapping);
}
@ -219,6 +220,35 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
}
}
public static class CompletionFieldType extends MappedFieldType {
public CompletionFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected CompletionFieldType(CompletionFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new CompletionFieldType(this);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public boolean isSortable() {
return false;
}
}
private static final BytesRef EMPTY = new BytesRef();
private PostingsFormat postingsFormat;
@ -236,9 +266,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
*/
// Custom postings formats are deprecated but we still accept a postings format here to be able to test backward compatibility
// with older postings formats such as Elasticsearch090
public CompletionFieldMapper(Names names, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, PostingsFormat wrappedPostingsFormat, SimilarityProvider similarity, boolean payloads,
public CompletionFieldMapper(MappedFieldType fieldType, PostingsFormat wrappedPostingsFormat, boolean payloads,
boolean preserveSeparators, boolean preservePositionIncrements, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, SortedMap<String, ContextMapping> contextMappings) {
super(names, 1.0f, Defaults.FIELD_TYPE, false, indexAnalyzer, searchAnalyzer, similarity, null, null, indexSettings, multiFields, copyTo);
super(fieldType, false, null, indexSettings, multiFields, copyTo);
analyzingSuggestLookupProvider = new AnalyzingCompletionLookupProvider(preserveSeparators, false, preservePositionIncrements, payloads);
if (wrappedPostingsFormat == null) {
// delayed until postingsFormat() is called
@ -424,7 +454,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
+ "] at position " + i + " is a reserved character");
}
}
return new SuggestField(names.indexName(), ctx, input, this.fieldType, payload, analyzingSuggestLookupProvider);
return new SuggestField(fieldType.names().indexName(), ctx, input, this.fieldType, payload, analyzingSuggestLookupProvider);
}
public static int correctSubStringLen(String input, int len) {
@ -445,7 +475,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
private final CompletionTokenStream.ToFiniteStrings toFiniteStrings;
private final ContextMapping.Context ctx;
public SuggestField(String name, ContextMapping.Context ctx, String value, FieldType type, BytesRef payload, CompletionTokenStream.ToFiniteStrings toFiniteStrings) {
public SuggestField(String name, ContextMapping.Context ctx, String value, MappedFieldType type, BytesRef payload, CompletionTokenStream.ToFiniteStrings toFiniteStrings) {
super(name, value, type);
this.payload = payload;
this.toFiniteStrings = toFiniteStrings;
@ -461,12 +491,12 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(names().shortName())
builder.startObject(fieldType().names().shortName())
.field(Fields.TYPE, CONTENT_TYPE);
builder.field(Fields.ANALYZER, indexAnalyzer.name());
if (indexAnalyzer.name().equals(searchAnalyzer.name()) == false) {
builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), searchAnalyzer.name());
builder.field(Fields.ANALYZER, fieldType.indexAnalyzer().name());
if (fieldType.indexAnalyzer().name().equals(fieldType.searchAnalyzer().name()) == false) {
builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType.searchAnalyzer().name());
}
builder.field(Fields.PAYLOADS, this.payloads);
builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), this.preserveSeparators);
@ -494,18 +524,13 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
return CONTENT_TYPE;
}
@Override
public boolean isSortable() {
return false;
}
@Override
public boolean supportsNullValue() {
return false;
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -514,14 +539,6 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
return null;
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
public boolean isStoringPayloads() {
return payloads;
}
@ -531,16 +548,16 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
super.merge(mergeWith, mergeResult);
CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
if (payloads != fieldMergeWith.payloads) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different payload values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different payload values");
}
if (preservePositionIncrements != fieldMergeWith.preservePositionIncrements) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_position_increments' values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_position_increments' values");
}
if (preserveSeparators != fieldMergeWith.preserveSeparators) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_separators' values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_separators' values");
}
if(!ContextMapping.mappingsAreEqual(getContextMapping(), fieldMergeWith.getContextMapping())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'context_mapping' values");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'context_mapping' values");
}
if (!mergeResult.simulate()) {
this.maxInputLength = fieldMergeWith.maxInputLength;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
@ -44,8 +43,10 @@ import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -53,7 +54,6 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.DateTimeZone;
@ -75,37 +75,35 @@ public class DateFieldMapper extends NumberFieldMapper {
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime", Locale.ROOT);
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS;
public static final DateFieldType FIELD_TYPE = new DateFieldType();
static {
FIELD_TYPE.freeze();
}
public static final String NULL_VALUE = null;
public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS;
}
public static class Builder extends NumberFieldMapper.Builder<Builder, DateFieldMapper> {
protected TimeUnit timeUnit = Defaults.TIME_UNIT;
protected String nullValue = Defaults.NULL_VALUE;
protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER;
private Locale locale;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT);
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
builder = this;
// do *NOT* rely on the default locale
locale = Locale.ROOT;
}
DateFieldType fieldType() {
return (DateFieldType)fieldType;
}
public Builder timeUnit(TimeUnit timeUnit) {
this.timeUnit = timeUnit;
fieldType().setTimeUnit(timeUnit);
return this;
}
@ -115,28 +113,42 @@ public class DateFieldMapper extends NumberFieldMapper {
}
public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
this.dateTimeFormatter = dateTimeFormatter;
fieldType().setDateTimeFormatter(dateTimeFormatter);
return this;
}
@Override
public DateFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
if (!locale.equals(dateTimeFormatter.locale())) {
dateTimeFormatter = new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale);
}
DateFieldMapper fieldMapper = new DateFieldMapper(buildNames(context), dateTimeFormatter,
fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, timeUnit, ignoreMalformed(context), coerce(context),
similarity, normsLoading, fieldDataSettings, context.indexSettings(),
multiFieldsBuilder.build(this, context), copyTo);
setupFieldType(context);
DateFieldMapper fieldMapper = new DateFieldMapper(fieldType,
docValues, nullValue, ignoreMalformed(context), coerce(context),
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
protected void setupFieldType(BuilderContext context) {
FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
if (!locale.equals(dateTimeFormatter.locale())) {
fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale));
}
super.setupFieldType(context);
}
public Builder locale(Locale locale) {
this.locale = locale;
return this;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericDateAnalyzer.buildNamedAnalyzer(fieldType().dateTimeFormatter, precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 64;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -169,37 +181,222 @@ public class DateFieldMapper extends NumberFieldMapper {
}
}
protected FormatDateTimeFormatter dateTimeFormatter;
public static class DateFieldType extends NumberFieldType {
private final DateMathParser dateMathParser;
final class LateParsingQuery extends Query {
final Object lowerTerm;
final Object upperTerm;
final boolean includeLower;
final boolean includeUpper;
final DateTimeZone timeZone;
final DateMathParser forcedDateParser;
public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) {
this.lowerTerm = lowerTerm;
this.upperTerm = upperTerm;
this.includeLower = includeLower;
this.includeUpper = includeUpper;
this.timeZone = timeZone;
this.forcedDateParser = forcedDateParser;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query query = innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
return query.rewrite(reader);
}
@Override
public String toString(String s) {
final StringBuilder sb = new StringBuilder();
return sb.append(names().indexName()).append(':')
.append(includeLower ? '[' : '{')
.append((lowerTerm == null) ? "*" : lowerTerm.toString())
.append(" TO ")
.append((upperTerm == null) ? "*" : upperTerm.toString())
.append(includeUpper ? ']' : '}')
.append(ToStringUtils.boost(getBoost()))
.toString();
}
}
protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER;
protected TimeUnit timeUnit = Defaults.TIME_UNIT;
protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit);
public DateFieldType() {}
protected DateFieldType(DateFieldType ref) {
super(ref);
this.dateTimeFormatter = ref.dateTimeFormatter;
this.timeUnit = ref.timeUnit;
this.dateMathParser = ref.dateMathParser;
}
public DateFieldType clone() {
return new DateFieldType(this);
}
public FormatDateTimeFormatter dateTimeFormatter() {
return dateTimeFormatter;
}
public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
checkIfFrozen();
this.dateTimeFormatter = dateTimeFormatter;
this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit);
}
public TimeUnit timeUnit() {
return timeUnit;
}
public void setTimeUnit(TimeUnit timeUnit) {
checkIfFrozen();
this.timeUnit = timeUnit;
this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit);
}
protected DateMathParser dateMathParser() {
return dateMathParser;
}
private long parseValue(Object value) {
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof BytesRef) {
return dateTimeFormatter().parser().parseMillis(((BytesRef) value).utf8ToString());
}
return dateTimeFormatter().parser().parseMillis(value.toString());
}
protected long parseStringValue(String value) {
try {
return dateTimeFormatter().parser().parseMillis(value);
} catch (RuntimeException e) {
try {
return timeUnit().toMillis(Long.parseLong(value));
} catch (NumberFormatException e1) {
throw new MapperParsingException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter().format() + "], and timestamp number with locale [" + dateTimeFormatter().locale() + "]", e);
}
}
}
@Override
public Long value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToLong((BytesRef) value);
}
return parseStringValue(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Object valueForSearch(Object value) {
if (value instanceof String) {
// assume its the string that was indexed, just return it... (for example, with get)
return value;
}
Long val = value(value);
if (val == null) {
return null;
}
return dateTimeFormatter().printer().print(val);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
long iValue = dateMathParser().parse(value, now());
long iSim;
try {
iSim = fuzziness.asTimeValue().millis();
} catch (Exception e) {
// not a time format
iSim = fuzziness.asLong();
}
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinLong(terms);
long maxValue = NumericUtils.getMaxLong(terms);
return new FieldStats.Date(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter()
);
}
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, @Nullable QueryParseContext context) {
// If the current search context is null we're parsing percolator query or a index alias filter.
if (SearchContext.current() == null) {
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
} else {
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
}
}
private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
includeLower, includeUpper);
}
public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) {
if (value instanceof Number) {
return ((Number) value).longValue();
}
DateMathParser dateParser = dateMathParser();
if (forcedDateParser != null) {
dateParser = forcedDateParser;
}
String strValue;
if (value instanceof BytesRef) {
strValue = ((BytesRef) value).utf8ToString();
} else {
strValue = value.toString();
}
return dateParser.parse(strValue, now(), inclusive, zone);
}
}
private String nullValue;
protected final TimeUnit timeUnit;
protected DateFieldMapper(Names names, FormatDateTimeFormatter dateTimeFormatter, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
String nullValue, TimeUnit timeUnit, Explicit<Boolean> ignoreMalformed,Explicit<Boolean> coerce,
SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, NumericDateAnalyzer.buildNamedAnalyzer(dateTimeFormatter, precisionStep),
NumericDateAnalyzer.buildNamedAnalyzer(dateTimeFormatter, Integer.MAX_VALUE),
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
this.dateTimeFormatter = dateTimeFormatter;
protected DateFieldMapper(MappedFieldType fieldType, Boolean docValues, String nullValue, Explicit<Boolean> ignoreMalformed,Explicit<Boolean> coerce,
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
this.timeUnit = timeUnit;
this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit);
}
public FormatDateTimeFormatter dateTimeFormatter() {
return dateTimeFormatter;
}
public DateMathParser dateMathParser() {
return dateMathParser;
}
@Override
public FieldType defaultFieldType() {
public DateFieldType fieldType() {
return (DateFieldType)fieldType;
}
@Override
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -208,63 +405,6 @@ public class DateFieldMapper extends NumberFieldMapper {
return new FieldDataType("long");
}
@Override
protected int maxPrecisionStep() {
return 64;
}
@Override
public Long value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToLong((BytesRef) value);
}
return parseStringValue(value.toString());
}
/** Dates should return as a string. */
@Override
public Object valueForSearch(Object value) {
if (value instanceof String) {
// assume its the string that was indexed, just return it... (for example, with get)
return value;
}
Long val = value(value);
if (val == null) {
return null;
}
return dateTimeFormatter.printer().print(val);
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
private long parseValue(Object value) {
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof BytesRef) {
return dateTimeFormatter.parser().parseMillis(((BytesRef) value).utf8ToString());
}
return dateTimeFormatter.parser().parseMillis(value.toString());
}
private String convertToString(Object value) {
if (value instanceof BytesRef) {
return ((BytesRef) value).utf8ToString();
}
return value.toString();
}
private static Callable<Long> now() {
return new Callable<Long>() {
@Override
@ -277,62 +417,6 @@ public class DateFieldMapper extends NumberFieldMapper {
};
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
long iValue = dateMathParser.parse(value, now());
long iSim;
try {
iSim = fuzziness.asTimeValue().millis();
} catch (Exception e) {
// not a time format
iSim = fuzziness.asLong();
}
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
public long parseToMilliseconds(Object value) {
return parseToMilliseconds(value, false, null, dateMathParser);
}
public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) {
if (value instanceof Number) {
return ((Number) value).longValue();
}
return parseToMilliseconds(convertToString(value), inclusive, zone, forcedDateParser);
}
public long parseToMilliseconds(String value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) {
DateMathParser dateParser = dateMathParser;
if (forcedDateParser != null) {
dateParser = forcedDateParser;
}
return dateParser.parse(value, now(), inclusive, zone);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context);
}
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, @Nullable QueryParseContext context) {
// If the current search context is null we're parsing percolator query or a index alias filter.
if (SearchContext.current() == null) {
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
} else {
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
}
}
private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
includeLower, includeUpper);
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
@ -351,7 +435,7 @@ public class DateFieldMapper extends NumberFieldMapper {
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
String dateAsString = null;
Long value = null;
float boost = this.boost;
float boost = this.fieldType.boost();
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue instanceof Number) {
@ -398,20 +482,20 @@ public class DateFieldMapper extends NumberFieldMapper {
if (dateAsString != null) {
assert value == null;
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), dateAsString, boost);
context.allEntries().addText(fieldType.names().fullName(), dateAsString, boost);
}
value = parseStringValue(dateAsString);
value = fieldType().parseStringValue(dateAsString);
} else if (value != null) {
value = timeUnit.toMillis(value);
value = ((DateFieldType)fieldType).timeUnit().toMillis(value);
}
if (value != null) {
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType);
CustomLongNumericField field = new CustomLongNumericField(this, value, (NumberFieldType)fieldType);
field.setBoost(boost);
fields.add(field);
}
if (hasDocValues()) {
if (fieldType().hasDocValues()) {
addDocValue(context, fields, value);
}
}
@ -430,7 +514,9 @@ public class DateFieldMapper extends NumberFieldMapper {
}
if (!mergeResult.simulate()) {
this.nullValue = ((DateFieldMapper) mergeWith).nullValue;
this.dateTimeFormatter = ((DateFieldMapper) mergeWith).dateTimeFormatter;
this.fieldType = this.fieldType.clone();
fieldType().setDateTimeFormatter(((DateFieldMapper) mergeWith).fieldType().dateTimeFormatter());
this.fieldType.freeze();
}
}
@ -438,10 +524,10 @@ public class DateFieldMapper extends NumberFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", precisionStep);
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", fieldType.numericPrecisionStep());
}
builder.field("format", dateTimeFormatter.format());
builder.field("format", fieldType().dateTimeFormatter().format());
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
}
@ -451,77 +537,18 @@ public class DateFieldMapper extends NumberFieldMapper {
builder.field("include_in_all", false);
}
if (includeDefaults || timeUnit != Defaults.TIME_UNIT) {
builder.field("numeric_resolution", timeUnit.name().toLowerCase(Locale.ROOT));
if (includeDefaults || fieldType().timeUnit() != Defaults.TIME_UNIT) {
builder.field("numeric_resolution", fieldType().timeUnit().name().toLowerCase(Locale.ROOT));
}
// only serialize locale if needed, ROOT is the default, so no need to serialize that case as well...
if (dateTimeFormatter.locale() != null && dateTimeFormatter.locale() != Locale.ROOT) {
builder.field("locale", dateTimeFormatter.locale());
if (fieldType().dateTimeFormatter().locale() != null && fieldType().dateTimeFormatter().locale() != Locale.ROOT) {
builder.field("locale", fieldType().dateTimeFormatter().locale());
} else if (includeDefaults) {
if (dateTimeFormatter.locale() == null) {
if (fieldType().dateTimeFormatter().locale() == null) {
builder.field("locale", Locale.ROOT);
} else {
builder.field("locale", dateTimeFormatter.locale());
builder.field("locale", fieldType().dateTimeFormatter().locale());
}
}
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinLong(terms);
long maxValue = NumericUtils.getMaxLong(terms);
return new FieldStats.Date(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter
);
}
private long parseStringValue(String value) {
try {
return dateTimeFormatter.parser().parseMillis(value);
} catch (RuntimeException e) {
try {
return timeUnit.toMillis(Long.parseLong(value));
} catch (NumberFormatException e1) {
throw new MapperParsingException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter.format() + "], and timestamp number with locale [" + dateTimeFormatter.locale() + "]", e);
}
}
}
public final class LateParsingQuery extends Query {
final Object lowerTerm;
final Object upperTerm;
final boolean includeLower;
final boolean includeUpper;
final DateTimeZone timeZone;
final DateMathParser forcedDateParser;
public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) {
this.lowerTerm = lowerTerm;
this.upperTerm = upperTerm;
this.includeLower = includeLower;
this.includeUpper = includeUpper;
this.timeZone = timeZone;
this.forcedDateParser = forcedDateParser;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query query = innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
return query.rewrite(reader);
}
@Override
public String toString(String s) {
final StringBuilder sb = new StringBuilder();
return sb.append(names.indexName()).append(':')
.append(includeLower ? '[' : '{')
.append((lowerTerm == null) ? "*" : lowerTerm.toString())
.append(" TO ")
.append((upperTerm == null) ? "*" : upperTerm.toString())
.append(includeUpper ? ']' : '}')
.append(ToStringUtils.boost(getBoost()))
.toString();
}
}
}

View File

@ -20,12 +20,10 @@
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.DoubleArrayList;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
@ -44,21 +42,23 @@ import org.elasticsearch.common.util.ByteUtils;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDoubleAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.apache.lucene.util.NumericUtils.doubleToSortableLong;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeDoubleValue;
import static org.elasticsearch.index.mapper.MapperBuilders.doubleField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
@ -71,7 +71,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
public static final String CONTENT_TYPE = "double";
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new DoubleFieldType();
static {
FIELD_TYPE.freeze();
@ -85,7 +85,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
protected Double nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT);
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
builder = this;
}
@ -96,13 +96,22 @@ public class DoubleFieldMapper extends NumberFieldMapper {
@Override
public DoubleFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
DoubleFieldMapper fieldMapper = new DoubleFieldMapper(buildNames(context),
fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context),
similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
setupFieldType(context);
DoubleFieldMapper fieldMapper = new DoubleFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context),
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericDoubleAnalyzer.buildNamedAnalyzer(precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 64;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -126,24 +135,82 @@ public class DoubleFieldMapper extends NumberFieldMapper {
}
}
public static class DoubleFieldType extends NumberFieldType {
public DoubleFieldType() {}
protected DoubleFieldType(DoubleFieldType ref) {
super(ref);
}
@Override
public NumberFieldType clone() {
return new DoubleFieldType(this);
}
@Override
public Double value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).doubleValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToDouble((BytesRef) value);
}
return Double.parseDouble(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value));
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : parseDoubleValue(lowerTerm),
upperTerm == null ? null : parseDoubleValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
double iValue = Double.parseDouble(value);
double iSim = fuzziness.asDouble();
return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
double minValue = NumericUtils.sortableLongToDouble(NumericUtils.getMinLong(terms));
double maxValue = NumericUtils.sortableLongToDouble(NumericUtils.getMaxLong(terms));
return new FieldStats.Double(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
}
private Double nullValue;
private String nullValueAsString;
protected DoubleFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
Double nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce,
NumericDoubleAnalyzer.buildNamedAnalyzer(precisionStep), NumericDoubleAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE),
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
protected DoubleFieldMapper(MappedFieldType fieldType, Boolean docValues, Double nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -152,53 +219,8 @@ public class DoubleFieldMapper extends NumberFieldMapper {
return new FieldDataType("double");
}
@Override
protected int maxPrecisionStep() {
return 64;
}
@Override
public Double value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).doubleValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToDouble((BytesRef) value);
}
return Double.parseDouble(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value));
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
double iValue = Double.parseDouble(value);
double iSim = fuzziness.asDouble();
return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseDoubleValue(lowerTerm),
upperTerm == null ? null : parseDoubleValue(upperTerm),
includeLower, includeUpper);
}
public Query rangeFilter(Double lowerTerm, Double upperTerm, boolean includeLower, boolean includeUpper) {
return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, lowerTerm, upperTerm, includeLower, includeUpper);
return NumericRangeQuery.newDoubleRange(fieldType.names().indexName(), fieldType.numericPrecisionStep(), lowerTerm, upperTerm, includeLower, includeUpper);
}
@Override
@ -217,7 +239,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
double value;
float boost = this.boost;
float boost = this.fieldType.boost();
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue == null) {
@ -239,7 +261,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).doubleValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), Double.toString(value), boost);
context.allEntries().addText(fieldType.names().fullName(), Double.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -250,7 +272,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
}
value = nullValue;
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(names.fullName(), nullValueAsString, boost);
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -279,26 +301,26 @@ public class DoubleFieldMapper extends NumberFieldMapper {
} else {
value = parser.doubleValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), parser.text(), boost);
context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost);
}
}
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
CustomDoubleNumericField field = new CustomDoubleNumericField(this, value, fieldType);
CustomDoubleNumericField field = new CustomDoubleNumericField(this, value, (NumberFieldType)fieldType);
field.setBoost(boost);
fields.add(field);
}
if (hasDocValues()) {
if (fieldType().hasDocValues()) {
if (useSortedNumericDocValues) {
addDocValue(context, fields, NumericUtils.doubleToSortableLong(value));
addDocValue(context, fields, doubleToSortableLong(value));
} else {
CustomDoubleNumericDocValuesField field = (CustomDoubleNumericDocValuesField) context.doc().getByKey(names().indexName());
CustomDoubleNumericDocValuesField field = (CustomDoubleNumericDocValuesField) context.doc().getByKey(fieldType().names().indexName());
if (field != null) {
field.add(value);
} else {
field = new CustomDoubleNumericDocValuesField(names().indexName(), value);
context.doc().addWithKey(names().indexName(), field);
field = new CustomDoubleNumericDocValuesField(fieldType().names().indexName(), value);
context.doc().addWithKey(fieldType().names().indexName(), field);
}
}
}
@ -325,8 +347,8 @@ public class DoubleFieldMapper extends NumberFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", precisionStep);
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", fieldType.numericPrecisionStep());
}
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
@ -339,22 +361,13 @@ public class DoubleFieldMapper extends NumberFieldMapper {
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
double minValue = NumericUtils.sortableLongToDouble(NumericUtils.getMinLong(terms));
double maxValue = NumericUtils.sortableLongToDouble(NumericUtils.getMaxLong(terms));
return new FieldStats.Double(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
public static class CustomDoubleNumericField extends CustomNumericField {
private final double number;
private final NumberFieldMapper mapper;
public CustomDoubleNumericField(NumberFieldMapper mapper, double number, FieldType fieldType) {
public CustomDoubleNumericField(NumberFieldMapper mapper, double number, NumberFieldType fieldType) {
super(mapper, number, fieldType);
this.mapper = mapper;
this.number = number;
@ -376,12 +389,6 @@ public class DoubleFieldMapper extends NumberFieldMapper {
public static class CustomDoubleNumericDocValuesField extends CustomNumericDocValuesField {
public static final FieldType TYPE = new FieldType();
static {
TYPE.setDocValuesType(DocValuesType.BINARY);
TYPE.freeze();
}
private final DoubleArrayList values;
public CustomDoubleNumericDocValuesField(String name, double value) {

View File

@ -20,11 +20,9 @@
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.FloatArrayList;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
@ -45,21 +43,23 @@ import org.elasticsearch.common.util.ByteUtils;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.apache.lucene.util.NumericUtils.floatToSortableInt;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue;
import static org.elasticsearch.index.mapper.MapperBuilders.floatField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
@ -72,7 +72,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
public static final String CONTENT_TYPE = "float";
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new FloatFieldType();
static {
FIELD_TYPE.freeze();
@ -86,7 +86,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
protected Float nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT);
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT);
builder = this;
}
@ -97,13 +97,22 @@ public class FloatFieldMapper extends NumberFieldMapper {
@Override
public FloatFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
FloatFieldMapper fieldMapper = new FloatFieldMapper(buildNames(context),
fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context),
similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
setupFieldType(context);
FloatFieldMapper fieldMapper = new FloatFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context),
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericFloatAnalyzer.buildNamedAnalyzer(precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 32;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -127,23 +136,83 @@ public class FloatFieldMapper extends NumberFieldMapper {
}
}
public static class FloatFieldType extends NumberFieldType {
public FloatFieldType() {}
protected FloatFieldType(FloatFieldType ref) {
super(ref);
}
@Override
public NumberFieldType clone() {
return new FloatFieldType(this);
}
@Override
public Float value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).floatValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToFloat((BytesRef) value);
}
return Float.parseFloat(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
int intValue = NumericUtils.floatToSortableInt(parseValue(value));
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
float iValue = Float.parseFloat(value);
final float iSim = fuzziness.asFloat();
return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
float minValue = NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms));
float maxValue = NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms));
return new FieldStats.Float(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
}
private Float nullValue;
private String nullValueAsString;
protected FloatFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
protected FloatFieldMapper(MappedFieldType fieldType, Boolean docValues,
Float nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce,
NumericFloatAnalyzer.buildNamedAnalyzer(precisionStep), NumericFloatAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE),
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -152,34 +221,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
return new FieldDataType("float");
}
@Override
protected int maxPrecisionStep() {
return 32;
}
@Override
public Float value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).floatValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToFloat((BytesRef) value);
}
return Float.parseFloat(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
int intValue = NumericUtils.floatToSortableInt(parseValue(value));
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
private float parseValue(Object value) {
private static float parseValue(Object value) {
if (value instanceof Number) {
return ((Number) value).floatValue();
}
@ -189,24 +231,6 @@ public class FloatFieldMapper extends NumberFieldMapper {
return Float.parseFloat(value.toString());
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
float iValue = Float.parseFloat(value);
final float iSim = fuzziness.asFloat();
return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
@ -223,7 +247,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
float value;
float boost = this.boost;
float boost = this.fieldType.boost();
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue == null) {
@ -245,7 +269,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).floatValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), Float.toString(value), boost);
context.allEntries().addText(fieldType.names().fullName(), Float.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -256,7 +280,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
}
value = nullValue;
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(names.fullName(), nullValueAsString, boost);
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -285,26 +309,26 @@ public class FloatFieldMapper extends NumberFieldMapper {
} else {
value = parser.floatValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), parser.text(), boost);
context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost);
}
}
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
CustomFloatNumericField field = new CustomFloatNumericField(this, value, fieldType);
CustomFloatNumericField field = new CustomFloatNumericField(this, value, (NumberFieldType)fieldType);
field.setBoost(boost);
fields.add(field);
}
if (hasDocValues()) {
if (fieldType().hasDocValues()) {
if (useSortedNumericDocValues) {
addDocValue(context, fields, NumericUtils.floatToSortableInt(value));
addDocValue(context, fields, floatToSortableInt(value));
} else {
CustomFloatNumericDocValuesField field = (CustomFloatNumericDocValuesField) context.doc().getByKey(names().indexName());
CustomFloatNumericDocValuesField field = (CustomFloatNumericDocValuesField) context.doc().getByKey(fieldType().names().indexName());
if (field != null) {
field.add(value);
} else {
field = new CustomFloatNumericDocValuesField(names().indexName(), value);
context.doc().addWithKey(names().indexName(), field);
field = new CustomFloatNumericDocValuesField(fieldType().names().indexName(), value);
context.doc().addWithKey(fieldType().names().indexName(), field);
}
}
}
@ -332,8 +356,8 @@ public class FloatFieldMapper extends NumberFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_32_BIT) {
builder.field("precision_step", precisionStep);
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) {
builder.field("precision_step", fieldType.numericPrecisionStep());
}
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
@ -346,22 +370,13 @@ public class FloatFieldMapper extends NumberFieldMapper {
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
float minValue = NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms));
float maxValue = NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms));
return new FieldStats.Float(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
public static class CustomFloatNumericField extends CustomNumericField {
private final float number;
private final NumberFieldMapper mapper;
public CustomFloatNumericField(NumberFieldMapper mapper, float number, FieldType fieldType) {
public CustomFloatNumericField(NumberFieldMapper mapper, float number, NumberFieldType fieldType) {
super(mapper, number, fieldType);
this.mapper = mapper;
this.number = number;
@ -383,12 +398,6 @@ public class FloatFieldMapper extends NumberFieldMapper {
public static class CustomFloatNumericDocValuesField extends CustomNumericDocValuesField {
public static final FieldType TYPE = new FieldType();
static {
TYPE.setDocValuesType(DocValuesType.BINARY);
TYPE.freeze();
}
private final FloatArrayList values;
public CustomFloatNumericDocValuesField(String name, float value) {

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
@ -40,15 +39,16 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
@ -67,7 +67,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
public static final String CONTENT_TYPE = "integer";
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new IntegerFieldType();
static {
FIELD_TYPE.freeze();
@ -81,7 +81,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
protected Integer nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT);
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT);
builder = this;
}
@ -92,13 +92,23 @@ public class IntegerFieldMapper extends NumberFieldMapper {
@Override
public IntegerFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
IntegerFieldMapper fieldMapper = new IntegerFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues,
nullValue, ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings,
setupFieldType(context);
IntegerFieldMapper fieldMapper = new IntegerFieldMapper(fieldType, docValues,
nullValue, ignoreMalformed(context), coerce(context), fieldDataSettings,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 32;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -122,23 +132,83 @@ public class IntegerFieldMapper extends NumberFieldMapper {
}
}
public static class IntegerFieldType extends NumberFieldType {
public IntegerFieldType() {}
protected IntegerFieldType(IntegerFieldType ref) {
super(ref);
}
@Override
public NumberFieldType clone() {
return new IntegerFieldType(this);
}
@Override
public Integer value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).intValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToInt((BytesRef) value);
}
return Integer.parseInt(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
int iValue = Integer.parseInt(value);
int iSim = fuzziness.asInt();
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinInt(terms);
long maxValue = NumericUtils.getMaxInt(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
}
private Integer nullValue;
private String nullValueAsString;
protected IntegerFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
protected IntegerFieldMapper(MappedFieldType fieldType, Boolean docValues,
Integer nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings,
@Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce,
NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep), NumericIntegerAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE),
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -147,33 +217,9 @@ public class IntegerFieldMapper extends NumberFieldMapper {
return new FieldDataType("int");
}
@Override
protected int maxPrecisionStep() {
return 32;
}
@Override
public Integer value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).intValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToInt((BytesRef) value);
}
return Integer.parseInt(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
private int parseValue(Object value) {
private static int parseValue(Object value) {
if (value instanceof Number) {
return ((Number) value).intValue();
}
@ -183,24 +229,6 @@ public class IntegerFieldMapper extends NumberFieldMapper {
return Integer.parseInt(value.toString());
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
int iValue = Integer.parseInt(value);
int iSim = fuzziness.asInt();
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
@ -217,7 +245,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
int value;
float boost = this.boost;
float boost = this.fieldType.boost();
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue == null) {
@ -239,7 +267,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).intValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), Integer.toString(value), boost);
context.allEntries().addText(fieldType.names().fullName(), Integer.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -250,7 +278,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
}
value = nullValue;
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(names.fullName(), nullValueAsString, boost);
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -279,7 +307,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
} else {
value = parser.intValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), parser.text(), boost);
context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost);
}
}
}
@ -288,11 +316,11 @@ public class IntegerFieldMapper extends NumberFieldMapper {
protected void addIntegerFields(ParseContext context, List<Field> fields, int value, float boost) {
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, fieldType);
CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, (NumberFieldType)fieldType);
field.setBoost(boost);
fields.add(field);
}
if (hasDocValues()) {
if (fieldType().hasDocValues()) {
addDocValue(context, fields, value);
}
}
@ -322,8 +350,8 @@ public class IntegerFieldMapper extends NumberFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_32_BIT) {
builder.field("precision_step", precisionStep);
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) {
builder.field("precision_step", fieldType.numericPrecisionStep());
}
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
@ -336,22 +364,13 @@ public class IntegerFieldMapper extends NumberFieldMapper {
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinInt(terms);
long maxValue = NumericUtils.getMaxInt(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
public static class CustomIntegerNumericField extends CustomNumericField {
private final int number;
private final NumberFieldMapper mapper;
public CustomIntegerNumericField(NumberFieldMapper mapper, int number, FieldType fieldType) {
public CustomIntegerNumericField(NumberFieldMapper mapper, int number, MappedFieldType fieldType) {
super(mapper, number, fieldType);
this.mapper = mapper;
this.number = number;

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
@ -40,15 +39,16 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
@ -67,7 +67,7 @@ public class LongFieldMapper extends NumberFieldMapper {
public static final String CONTENT_TYPE = "long";
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new LongFieldType();
static {
FIELD_TYPE.freeze();
@ -81,7 +81,7 @@ public class LongFieldMapper extends NumberFieldMapper {
protected Long nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT);
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
builder = this;
}
@ -92,13 +92,22 @@ public class LongFieldMapper extends NumberFieldMapper {
@Override
public LongFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
LongFieldMapper fieldMapper = new LongFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue,
ignoreMalformed(context), coerce(context), similarity, normsLoading,
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
setupFieldType(context);
LongFieldMapper fieldMapper = new LongFieldMapper(fieldType, docValues, nullValue,
ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 64;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -122,23 +131,83 @@ public class LongFieldMapper extends NumberFieldMapper {
}
}
public static class LongFieldType extends NumberFieldType {
public LongFieldType() {}
protected LongFieldType(LongFieldType ref) {
super(ref);
}
@Override
public NumberFieldType clone() {
return new LongFieldType(this);
}
@Override
public Long value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToLong((BytesRef) value);
}
return Long.parseLong(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : parseLongValue(lowerTerm),
upperTerm == null ? null : parseLongValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
long iValue = Long.parseLong(value);
final long iSim = fuzziness.asLong();
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinLong(terms);
long maxValue = NumericUtils.getMaxLong(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
}
private Long nullValue;
private String nullValueAsString;
protected LongFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
protected LongFieldMapper(MappedFieldType fieldType, Boolean docValues,
Long nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings,
@Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce,
NumericLongAnalyzer.buildNamedAnalyzer(precisionStep), NumericLongAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE),
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -147,50 +216,6 @@ public class LongFieldMapper extends NumberFieldMapper {
return new FieldDataType("long");
}
@Override
protected int maxPrecisionStep() {
return 64;
}
@Override
public Long value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToLong((BytesRef) value);
}
return Long.parseLong(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
long iValue = Long.parseLong(value);
final long iSim = fuzziness.asLong();
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseLongValue(lowerTerm),
upperTerm == null ? null : parseLongValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
@ -207,7 +232,7 @@ public class LongFieldMapper extends NumberFieldMapper {
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
long value;
float boost = this.boost;
float boost = this.fieldType.boost();
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue == null) {
@ -229,7 +254,7 @@ public class LongFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).longValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), Long.toString(value), boost);
context.allEntries().addText(fieldType.names().fullName(), Long.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -240,7 +265,7 @@ public class LongFieldMapper extends NumberFieldMapper {
}
value = nullValue;
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(names.fullName(), nullValueAsString, boost);
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -269,16 +294,16 @@ public class LongFieldMapper extends NumberFieldMapper {
} else {
value = parser.longValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), parser.text(), boost);
context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost);
}
}
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType);
CustomLongNumericField field = new CustomLongNumericField(this, value, (NumberFieldType)fieldType);
field.setBoost(boost);
fields.add(field);
}
if (hasDocValues()) {
if (fieldType().hasDocValues()) {
addDocValue(context, fields, value);
}
}
@ -304,8 +329,8 @@ public class LongFieldMapper extends NumberFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", precisionStep);
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", fieldType.numericPrecisionStep());
}
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
@ -317,22 +342,13 @@ public class LongFieldMapper extends NumberFieldMapper {
}
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinLong(terms);
long maxValue = NumericUtils.getMaxLong(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
public static class CustomLongNumericField extends CustomNumericField {
private final long number;
private final NumberFieldMapper mapper;
public CustomLongNumericField(NumberFieldMapper mapper, long number, FieldType fieldType) {
public CustomLongNumericField(NumberFieldMapper mapper, long number, MappedFieldType fieldType) {
super(mapper, number, fieldType);
this.mapper = mapper;
this.number = number;

View File

@ -20,13 +20,16 @@
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.hash.MurmurHash3;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
@ -36,7 +39,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.murmur3Field;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
@ -50,20 +52,30 @@ public class Murmur3FieldMapper extends LongFieldMapper {
public static class Builder extends NumberFieldMapper.Builder<Builder, Murmur3FieldMapper> {
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Integer.MAX_VALUE);
super(name, Defaults.FIELD_TYPE, Integer.MAX_VALUE);
builder = this;
builder.precisionStep(Integer.MAX_VALUE);
}
@Override
public Murmur3FieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, null,
ignoreMalformed(context), coerce(context), similarity, normsLoading,
setupFieldType(context);
Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(fieldType, docValues, null,
ignoreMalformed(context), coerce(context),
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 64;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -92,13 +104,12 @@ public class Murmur3FieldMapper extends LongFieldMapper {
}
}
protected Murmur3FieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
protected Murmur3FieldMapper(MappedFieldType fieldType, Boolean docValues,
Long nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings,
@Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues, nullValue, ignoreMalformed, coerce,
similarity, normsLoading, fieldDataSettings,
indexSettings, multiFields, copyTo);
super(fieldType, docValues, nullValue, ignoreMalformed, coerce,
fieldDataSettings, indexSettings, multiFields, copyTo);
}
@Override

View File

@ -31,9 +31,7 @@ import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
@ -45,14 +43,13 @@ import org.elasticsearch.common.util.ByteUtils;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.io.Reader;
@ -70,16 +67,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
public static final int PRECISION_STEP_32_BIT = 8; // 4tpv
public static final int PRECISION_STEP_64_BIT = 16; // 4tpv
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.freeze();
}
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit<>(false, false);
public static final Explicit<Boolean> COERCE = new Explicit<>(true, false);
}
@ -90,9 +77,9 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
private Boolean coerce;
public Builder(String name, FieldType fieldType, int defaultPrecisionStep) {
public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) {
super(name, fieldType);
fieldType.setNumericPrecisionStep(defaultPrecisionStep);
this.fieldType.setNumericPrecisionStep(defaultPrecisionStep);
}
public T precisionStep(int precisionStep) {
@ -129,10 +116,60 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
}
return Defaults.COERCE;
}
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
fieldType.setOmitNorms(fieldType.omitNorms() && fieldType.boost() == 1.0f);
int precisionStep = fieldType.numericPrecisionStep();
if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) {
fieldType.setNumericPrecisionStep(Integer.MAX_VALUE);
}
fieldType.setIndexAnalyzer(makeNumberAnalyzer(fieldType.numericPrecisionStep()));
fieldType.setSearchAnalyzer(makeNumberAnalyzer(Integer.MAX_VALUE));
}
protected abstract NamedAnalyzer makeNumberAnalyzer(int precisionStep);
protected abstract int maxPrecisionStep();
}
protected int precisionStep;
public static abstract class NumberFieldType extends MappedFieldType {
public NumberFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
setTokenized(false);
setOmitNorms(true);
setIndexOptions(IndexOptions.DOCS);
setStoreTermVectors(false);
}
protected NumberFieldType(NumberFieldType ref) {
super(ref);
}
public abstract NumberFieldType clone();
@Override
public abstract Object value(Object value);
@Override
public Object valueForSearch(Object value) {
return value(value);
}
@Override
public abstract Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions);
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public boolean isNumeric() {
return true;
}
}
protected Boolean includeInAll;
@ -151,7 +188,7 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
private ThreadLocal<NumericTokenStream> tokenStream = new ThreadLocal<NumericTokenStream>() {
@Override
protected NumericTokenStream initialValue() {
return new NumericTokenStream(precisionStep);
return new NumericTokenStream(fieldType.numericPrecisionStep());
}
};
@ -183,23 +220,14 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
}
};
protected NumberFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings,
protected NumberFieldMapper(MappedFieldType fieldType, Boolean docValues,
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, @Nullable Settings fieldDataSettings, Settings indexSettings,
MultiFields multiFields, CopyTo copyTo) {
// LUCENE 4 UPGRADE: Since we can't do anything before the super call, we have to push the boost check down to subclasses
super(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer,
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) {
this.precisionStep = Integer.MAX_VALUE;
} else {
this.precisionStep = precisionStep;
}
super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
this.ignoreMalformed = ignoreMalformed;
this.coerce = coerce;
Version v = Version.indexCreated(indexSettings);
this.useSortedNumericDocValues = v.onOrAfter(Version.V_1_4_0_Beta1);
this.useSortedNumericDocValues = Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1);
}
@Override
@ -221,12 +249,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
includeInAll = null;
}
protected abstract int maxPrecisionStep();
public int precisionStep() {
return this.precisionStep;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
RuntimeException e = null;
@ -247,41 +269,22 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
protected final void addDocValue(ParseContext context, List<Field> fields, long value) {
if (useSortedNumericDocValues) {
fields.add(new SortedNumericDocValuesField(names().indexName(), value));
fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value));
} else {
CustomLongNumericDocValuesField field = (CustomLongNumericDocValuesField) context.doc().getByKey(names().indexName());
CustomLongNumericDocValuesField field = (CustomLongNumericDocValuesField) context.doc().getByKey(fieldType().names().indexName());
if (field != null) {
field.add(value);
} else {
field = new CustomLongNumericDocValuesField(names().indexName(), value);
context.doc().addWithKey(names().indexName(), field);
field = new CustomLongNumericDocValuesField(fieldType().names().indexName(), value);
context.doc().addWithKey(fieldType().names().indexName(), field);
}
}
}
/**
* Use the field query created here when matching on numbers.
*/
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public final Query termQuery(Object value, @Nullable QueryParseContext context) {
return new TermQuery(new Term(names.indexName(), indexedValueForSearch(value)));
}
@Override
public abstract Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
@Override
public abstract Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions);
/**
* Converts an object value into a double
*/
public double parseDoubleValue(Object value) {
public static double parseDoubleValue(Object value) {
if (value instanceof Number) {
return ((Number) value).doubleValue();
}
@ -296,7 +299,7 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
/**
* Converts an object value into a long
*/
public long parseLongValue(Object value) {
public static long parseLongValue(Object value) {
if (value instanceof Number) {
return ((Number) value).longValue();
}
@ -308,16 +311,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
return Long.parseLong(value.toString());
}
/**
* Override the default behavior (to return the string, and return the actual Number instance).
*
* @param value
*/
@Override
public Object valueForSearch(Object value) {
return value(value);
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
@ -326,7 +319,9 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
}
if (!mergeResult.simulate()) {
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
this.precisionStep = nfmMergeWith.precisionStep;
this.fieldType = this.fieldType.clone();
this.fieldType.setNumericPrecisionStep(nfmMergeWith.fieldType.numericPrecisionStep());
this.fieldType.freeze();
this.includeInAll = nfmMergeWith.includeInAll;
if (nfmMergeWith.ignoreMalformed.explicit()) {
this.ignoreMalformed = nfmMergeWith.ignoreMalformed;
@ -342,13 +337,13 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
}
protected NumericTokenStream popCachedStream() {
if (precisionStep == 4) {
if (fieldType.numericPrecisionStep() == 4) {
return tokenStream4.get();
} else if (precisionStep == 8) {
} else if (fieldType.numericPrecisionStep() == 8) {
return tokenStream8.get();
} else if (precisionStep == 16) {
} else if (fieldType.numericPrecisionStep() == 16) {
return tokenStream16.get();
} else if (precisionStep == Integer.MAX_VALUE) {
} else if (fieldType.numericPrecisionStep() == Integer.MAX_VALUE) {
return tokenStreamMax.get();
}
return tokenStream.get();
@ -359,8 +354,8 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
protected final NumberFieldMapper mapper;
public CustomNumericField(NumberFieldMapper mapper, Number value, FieldType fieldType) {
super(mapper.names().indexName(), fieldType);
public CustomNumericField(NumberFieldMapper mapper, Number value, MappedFieldType fieldType) {
super(mapper.fieldType().names().indexName(), fieldType);
this.mapper = mapper;
if (value != null) {
this.fieldsData = value;
@ -431,13 +426,8 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
}
public static class CustomLongNumericDocValuesField extends CustomNumericDocValuesField {
public static final FieldType TYPE = new FieldType();
static {
TYPE.setDocValuesType(DocValuesType.BINARY);
TYPE.freeze();
}
public static class CustomLongNumericDocValuesField extends CustomNumericDocValuesField {
private final LongArrayList values;
@ -481,9 +471,4 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
builder.field("coerce", coerce.value());
}
}
@Override
public boolean isNumeric() {
return true;
}
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
@ -43,13 +42,13 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
@ -69,7 +68,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
public static final int DEFAULT_PRECISION_STEP = 8;
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new ShortFieldType();
static {
FIELD_TYPE.freeze();
@ -83,7 +82,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
protected Short nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), DEFAULT_PRECISION_STEP);
super(name, Defaults.FIELD_TYPE, DEFAULT_PRECISION_STEP);
builder = this;
}
@ -94,13 +93,24 @@ public class ShortFieldMapper extends NumberFieldMapper {
@Override
public ShortFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
ShortFieldMapper fieldMapper = new ShortFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue,
ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings,
setupFieldType(context);
ShortFieldMapper fieldMapper = new ShortFieldMapper(fieldType, docValues, nullValue,
ignoreMalformed(context), coerce(context), fieldDataSettings,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
String name = precisionStep == Integer.MAX_VALUE ? "_short/max" : ("_short/" + precisionStep);
return new NamedAnalyzer(name, new NumericIntegerAnalyzer(precisionStep));
}
@Override
protected int maxPrecisionStep() {
return 32;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -124,23 +134,84 @@ public class ShortFieldMapper extends NumberFieldMapper {
}
}
public static class ShortFieldType extends NumberFieldType {
public ShortFieldType() {}
protected ShortFieldType(ShortFieldType ref) {
super(ref);
}
@Override
public NumberFieldType clone() {
return new ShortFieldType(this);
}
@Override
public Short value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).shortValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToShort((BytesRef) value);
}
return Short.parseShort(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : (int)parseValue(lowerTerm),
upperTerm == null ? null : (int)parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
short iValue = Short.parseShort(value);
short iSim = fuzziness.asShort();
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinInt(terms);
long maxValue = NumericUtils.getMaxInt(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
}
private Short nullValue;
private String nullValueAsString;
protected ShortFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
protected ShortFieldMapper(MappedFieldType fieldType, Boolean docValues,
Short nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings,
@Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, new NamedAnalyzer("_short/" + precisionStep,
new NumericIntegerAnalyzer(precisionStep)), new NamedAnalyzer("_short/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)),
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
super(fieldType, docValues, ignoreMalformed, coerce,
fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -149,33 +220,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
return new FieldDataType("short");
}
@Override
protected int maxPrecisionStep() {
return 32;
}
@Override
public Short value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).shortValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToShort((BytesRef) value);
}
return Short.parseShort(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
private short parseValue(Object value) {
private static short parseValue(Object value) {
if (value instanceof Number) {
return ((Number) value).shortValue();
}
@ -185,28 +230,6 @@ public class ShortFieldMapper extends NumberFieldMapper {
return Short.parseShort(value.toString());
}
private int parseValueAsInt(Object value) {
return parseValue(value);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
short iValue = Short.parseShort(value);
short iSim = fuzziness.asShort();
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newIntRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseValueAsInt(lowerTerm),
upperTerm == null ? null : parseValueAsInt(upperTerm),
includeLower, includeUpper);
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
@ -223,7 +246,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
short value;
float boost = this.boost;
float boost = this.fieldType.boost();
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue == null) {
@ -245,7 +268,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).shortValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), Short.toString(value), boost);
context.allEntries().addText(fieldType.names().fullName(), Short.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -256,7 +279,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
}
value = nullValue;
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(names.fullName(), nullValueAsString, boost);
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -285,16 +308,16 @@ public class ShortFieldMapper extends NumberFieldMapper {
} else {
value = parser.shortValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), parser.text(), boost);
context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost);
}
}
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
CustomShortNumericField field = new CustomShortNumericField(this, value, fieldType);
CustomShortNumericField field = new CustomShortNumericField(this, value, (NumberFieldType)fieldType);
field.setBoost(boost);
fields.add(field);
}
if (hasDocValues()) {
if (fieldType().hasDocValues()) {
addDocValue(context, fields, value);
}
}
@ -320,8 +343,8 @@ public class ShortFieldMapper extends NumberFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || precisionStep != DEFAULT_PRECISION_STEP) {
builder.field("precision_step", precisionStep);
if (includeDefaults || fieldType.numericPrecisionStep() != DEFAULT_PRECISION_STEP) {
builder.field("precision_step", fieldType.numericPrecisionStep());
}
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
@ -334,22 +357,13 @@ public class ShortFieldMapper extends NumberFieldMapper {
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinInt(terms);
long maxValue = NumericUtils.getMaxInt(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
public static class CustomShortNumericField extends CustomNumericField {
private final short number;
private final NumberFieldMapper mapper;
public CustomShortNumericField(NumberFieldMapper mapper, short number, FieldType fieldType) {
public CustomShortNumericField(NumberFieldMapper mapper, short number, NumberFieldType fieldType) {
super(mapper, number, fieldType);
this.mapper = mapper;
this.number = number;

View File

@ -19,9 +19,7 @@
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query;
@ -34,19 +32,20 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.apache.lucene.index.IndexOptions.NONE;
import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
@ -59,7 +58,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
public static final String CONTENT_TYPE = "string";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new StringFieldType();
static {
FIELD_TYPE.freeze();
@ -77,12 +76,10 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
protected int positionOffsetGap = Defaults.POSITION_OFFSET_GAP;
protected NamedAnalyzer searchQuotedAnalyzer;
protected int ignoreAbove = Defaults.IGNORE_ABOVE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
super(name, Defaults.FIELD_TYPE);
builder = this;
}
@ -94,9 +91,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
@Override
public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) {
super.searchAnalyzer(searchAnalyzer);
if (searchQuotedAnalyzer == null) {
searchQuotedAnalyzer = searchAnalyzer;
}
return this;
}
@ -106,7 +100,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
}
public Builder searchQuotedAnalyzer(NamedAnalyzer analyzer) {
this.searchQuotedAnalyzer = analyzer;
this.fieldType.setSearchQuoteAnalyzer(analyzer);
return builder;
}
@ -118,20 +112,20 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
@Override
public StringFieldMapper build(BuilderContext context) {
if (positionOffsetGap > 0) {
indexAnalyzer = new NamedAnalyzer(indexAnalyzer, positionOffsetGap);
searchAnalyzer = new NamedAnalyzer(searchAnalyzer, positionOffsetGap);
searchQuotedAnalyzer = new NamedAnalyzer(searchQuotedAnalyzer, positionOffsetGap);
fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionOffsetGap));
fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionOffsetGap));
fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionOffsetGap));
}
// if the field is not analyzed, then by default, we should omit norms and have docs only
// index options, as probably what the user really wants
// if they are set explicitly, we will use those values
// we also change the values on the default field type so that toXContent emits what
// differs from the defaults
FieldType defaultFieldType = new FieldType(Defaults.FIELD_TYPE);
MappedFieldType defaultFieldType = Defaults.FIELD_TYPE.clone();
if (fieldType.indexOptions() != IndexOptions.NONE && !fieldType.tokenized()) {
defaultFieldType.setOmitNorms(true);
defaultFieldType.setIndexOptions(IndexOptions.DOCS);
if (!omitNormsSet && boost == Defaults.BOOST) {
if (!omitNormsSet && fieldType.boost() == Defaults.BOOST) {
fieldType.setOmitNorms(true);
}
if (!indexOptionsSet) {
@ -139,9 +133,9 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
}
}
defaultFieldType.freeze();
StringFieldMapper fieldMapper = new StringFieldMapper(buildNames(context),
boost, fieldType, defaultFieldType, docValues, nullValue, indexAnalyzer, searchAnalyzer, searchQuotedAnalyzer,
positionOffsetGap, ignoreAbove, similarity, normsLoading,
setupFieldType(context);
StringFieldMapper fieldMapper = new StringFieldMapper(
fieldType, defaultFieldType, docValues, nullValue, positionOffsetGap, ignoreAbove,
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
@ -174,14 +168,14 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
builder.positionOffsetGap(XContentMapValues.nodeIntegerValue(propNode, -1));
// we need to update to actual analyzers if they are not set in this case...
// so we can inject the position offset gap...
if (builder.indexAnalyzer == null) {
builder.indexAnalyzer = parserContext.analysisService().defaultIndexAnalyzer();
if (builder.fieldType.indexAnalyzer() == null) {
builder.fieldType.setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
}
if (builder.searchAnalyzer == null) {
builder.searchAnalyzer = parserContext.analysisService().defaultSearchAnalyzer();
if (builder.fieldType.searchAnalyzer() == null) {
builder.fieldType.setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
}
if (builder.searchQuotedAnalyzer == null) {
builder.searchQuotedAnalyzer = parserContext.analysisService().defaultSearchQuoteAnalyzer();
if (builder.fieldType.searchQuoteAnalyzer() == null) {
builder.fieldType.setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
}
iterator.remove();
} else if (propName.equals("ignore_above")) {
@ -195,32 +189,50 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
}
}
public static class StringFieldType extends MappedFieldType {
public StringFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected StringFieldType(StringFieldType ref) {
super(ref);
}
public StringFieldType clone() {
return new StringFieldType(this);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
}
private String nullValue;
private Boolean includeInAll;
private int positionOffsetGap;
private NamedAnalyzer searchQuotedAnalyzer;
private int ignoreAbove;
private final FieldType defaultFieldType;
private final MappedFieldType defaultFieldType;
protected StringFieldMapper(Names names, float boost, FieldType fieldType, FieldType defaultFieldType, Boolean docValues,
String nullValue, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer,
NamedAnalyzer searchQuotedAnalyzer, int positionOffsetGap, int ignoreAbove,
SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings,
protected StringFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, Boolean docValues,
String nullValue, int positionOffsetGap, int ignoreAbove, @Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer,
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
if (fieldType.tokenized() && fieldType.indexOptions() != IndexOptions.NONE && hasDocValues()) {
throw new MapperParsingException("Field [" + names.fullName() + "] cannot be analyzed and have doc values");
super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) {
throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values");
}
this.defaultFieldType = defaultFieldType;
this.nullValue = nullValue;
this.positionOffsetGap = positionOffsetGap;
this.searchQuotedAnalyzer = searchQuotedAnalyzer != null ? searchQuotedAnalyzer : this.searchAnalyzer;
this.ignoreAbove = ignoreAbove;
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return defaultFieldType;
}
@ -248,14 +260,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
includeInAll = null;
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
protected boolean customBoost() {
return true;
@ -269,11 +273,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
return ignoreAbove;
}
@Override
public Analyzer searchQuoteAnalyzer() {
return this.searchQuotedAnalyzer;
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
@ -284,7 +283,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
ValueAndBoost valueAndBoost = parseCreateFieldForString(context, nullValue, boost);
ValueAndBoost valueAndBoost = parseCreateFieldForString(context, nullValue, fieldType.boost());
if (valueAndBoost.value() == null) {
return;
}
@ -292,19 +291,19 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
return;
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), valueAndBoost.value(), valueAndBoost.boost());
context.allEntries().addText(fieldType.names().fullName(), valueAndBoost.value(), valueAndBoost.boost());
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
Field field = new Field(names.indexName(), valueAndBoost.value(), fieldType);
Field field = new Field(fieldType.names().indexName(), valueAndBoost.value(), fieldType);
field.setBoost(valueAndBoost.boost());
fields.add(field);
}
if (hasDocValues()) {
fields.add(new SortedSetDocValuesField(names.indexName(), new BytesRef(valueAndBoost.value())));
if (fieldType().hasDocValues()) {
fields.add(new SortedSetDocValuesField(fieldType.names().indexName(), new BytesRef(valueAndBoost.value())));
}
if (fields.isEmpty()) {
context.ignoredValue(names.indexName(), valueAndBoost.value());
context.ignoredValue(fieldType.names().indexName(), valueAndBoost.value());
}
}
@ -381,13 +380,14 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
if (includeDefaults || positionOffsetGap != Defaults.POSITION_OFFSET_GAP) {
builder.field("position_offset_gap", positionOffsetGap);
}
if (searchQuotedAnalyzer != null && !searchQuotedAnalyzer.name().equals(searchAnalyzer.name())) {
builder.field("search_quote_analyzer", searchQuotedAnalyzer.name());
NamedAnalyzer searchQuoteAnalyzer = fieldType.searchQuoteAnalyzer();
if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType.searchAnalyzer().name())) {
builder.field("search_quote_analyzer", searchQuoteAnalyzer.name());
} else if (includeDefaults) {
if (searchQuotedAnalyzer == null) {
if (searchQuoteAnalyzer == null) {
builder.field("search_quote_analyzer", "default");
} else {
builder.field("search_quote_analyzer", searchQuotedAnalyzer.name());
builder.field("search_quote_analyzer", searchQuoteAnalyzer.name());
}
}
if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) {

View File

@ -22,18 +22,18 @@ package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost;
import org.elasticsearch.index.similarity.SimilarityProvider;
@ -43,6 +43,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.apache.lucene.index.IndexOptions.NONE;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue;
import static org.elasticsearch.index.mapper.MapperBuilders.tokenCountField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
@ -55,6 +56,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
public static final String CONTENT_TYPE = "token_count";
public static class Defaults extends IntegerFieldMapper.Defaults {
}
public static class Builder extends NumberFieldMapper.Builder<Builder, TokenCountFieldMapper> {
@ -62,7 +64,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
private NamedAnalyzer analyzer;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT);
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT);
builder = this;
}
@ -82,13 +84,23 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
@Override
public TokenCountFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue,
ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings, context.indexSettings(),
setupFieldType(context);
TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(fieldType, docValues, nullValue,
ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(),
analyzer, multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 32;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -122,34 +134,33 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
private NamedAnalyzer analyzer;
protected TokenCountFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, Integer nullValue,
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading, Settings fieldDataSettings, Settings indexSettings, NamedAnalyzer analyzer,
protected TokenCountFieldMapper(MappedFieldType fieldType, Boolean docValues, Integer nullValue,
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, Settings fieldDataSettings, Settings indexSettings, NamedAnalyzer analyzer,
MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues, nullValue, ignoreMalformed, coerce,
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
super(fieldType, docValues, nullValue, ignoreMalformed, coerce,
fieldDataSettings, indexSettings, multiFields, copyTo);
this.analyzer = analyzer;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, boost);
ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType.boost());
if (valueAndBoost.value() == null && nullValue() == null) {
return;
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored() || hasDocValues()) {
if (fieldType.indexOptions() != NONE || fieldType.stored() || fieldType().hasDocValues()) {
int count;
if (valueAndBoost.value() == null) {
count = nullValue();
} else {
count = countPositions(analyzer.analyzer().tokenStream(names().shortName(), valueAndBoost.value()));
count = countPositions(analyzer.analyzer().tokenStream(fieldType().names().shortName(), valueAndBoost.value()));
}
addIntegerFields(context, fields, count, valueAndBoost.boost());
}
if (fields.isEmpty()) {
context.ignoredValue(names.indexName(), valueAndBoost.value());
context.ignoredValue(fieldType.names().indexName(), valueAndBoost.value());
}
}

View File

@ -30,7 +30,7 @@ import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper.Loading;
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.object.ObjectMapper;

View File

@ -24,10 +24,9 @@ import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.google.common.base.Objects;
import com.google.common.collect.Iterators;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
@ -41,10 +40,9 @@ import org.elasticsearch.common.util.ByteUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -56,7 +54,6 @@ import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.ArrayList;
@ -97,7 +94,6 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
public static class Defaults {
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
public static final boolean STORE = false;
public static final boolean ENABLE_LATLON = false;
public static final boolean ENABLE_GEOHASH = false;
public static final boolean ENABLE_GEOHASH_PREFIX = false;
@ -107,7 +103,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
public static final boolean VALIDATE_LAT = true;
public static final boolean VALIDATE_LON = true;
public static final FieldType FIELD_TYPE = new FieldType(StringFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
@ -131,16 +127,15 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
private int geoHashPrecision = Defaults.GEO_HASH_PRECISION;
boolean validateLat = Defaults.VALIDATE_LAT;
boolean validateLon = Defaults.VALIDATE_LON;
boolean normalizeLat = Defaults.NORMALIZE_LAT;
boolean normalizeLon = Defaults.NORMALIZE_LON;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
super(name, Defaults.FIELD_TYPE);
this.builder = this;
}
GeoPointFieldType fieldType() {
return (GeoPointFieldType)fieldType;
}
@Override
public Builder multiFieldPathType(ContentPath.Type pathType) {
this.pathType = pathType;
@ -185,6 +180,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
DoubleFieldMapper latMapper = null;
DoubleFieldMapper lonMapper = null;
GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType;
context.path().add(name);
if (enableLatLon) {
@ -196,10 +192,13 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
}
latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType());
}
StringFieldMapper geohashMapper = null;
if (enableGeoHash) {
if (enableGeoHash || enableGeohashPrefix) {
// TODO: possible also implicitly enable geohash if geohash precision is set
geohashMapper = stringField(Names.GEOHASH).index(true).tokenized(false).includeInAll(false).omitNorms(true).indexOptions(IndexOptions.DOCS).build(context);
geoPointFieldType.setGeohashEnabled(geohashMapper.fieldType(), geoHashPrecision, enableGeohashPrefix);
}
context.path().remove();
@ -208,11 +207,11 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
// this is important: even if geo points feel like they need to be tokenized to distinguish lat from lon, we actually want to
// store them as a single token.
fieldType.setTokenized(false);
fieldType.setHasDocValues(false);
setupFieldType(context);
return new GeoPointFieldMapper(buildNames(context), fieldType, docValues, indexAnalyzer, searchAnalyzer,
similarity, fieldDataSettings, context.indexSettings(), origPathType, enableLatLon, enableGeoHash, enableGeohashPrefix, precisionStep,
geoHashPrecision, latMapper, lonMapper, geohashMapper, validateLon, validateLat, normalizeLon, normalizeLat
, multiFieldsBuilder.build(this, context));
return new GeoPointFieldMapper(fieldType, docValues, fieldDataSettings, context.indexSettings(), origPathType,
latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context));
}
}
@ -251,24 +250,24 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
}
iterator.remove();
} else if (fieldName.equals("validate")) {
builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode);
builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode);
builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode));
builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("validate_lon")) {
builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode);
builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("validate_lat")) {
builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode);
builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("normalize")) {
builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode);
builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode);
builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode));
builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("normalize_lat")) {
builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode);
builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("normalize_lon")) {
builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode);
builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
iterator.remove();
@ -278,6 +277,128 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
}
}
public static class GeoPointFieldType extends MappedFieldType {
private MappedFieldType geohashFieldType;
private int geohashPrecision;
private boolean geohashPrefixEnabled;
private MappedFieldType latFieldType;
private MappedFieldType lonFieldType;
private boolean validateLon = true;
private boolean validateLat = true;
private boolean normalizeLon = true;
private boolean normalizeLat = true;
public GeoPointFieldType() {
super(StringFieldMapper.Defaults.FIELD_TYPE);
}
protected GeoPointFieldType(GeoPointFieldType ref) {
super(ref);
this.geohashFieldType = ref.geohashFieldType; // copying ref is ok, this can never be modified
this.geohashPrecision = ref.geohashPrecision;
this.geohashPrefixEnabled = ref.geohashPrefixEnabled;
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
this.validateLon = ref.validateLon;
this.validateLat = ref.validateLat;
this.normalizeLon = ref.normalizeLon;
this.normalizeLat = ref.normalizeLat;
}
@Override
public MappedFieldType clone() {
return new GeoPointFieldType(this);
}
public boolean isGeohashEnabled() {
return geohashFieldType != null;
}
public MappedFieldType geohashFieldType() {
return geohashFieldType;
}
public int geohashPrecision() {
return geohashPrecision;
}
public boolean isGeohashPrefixEnabled() {
return geohashPrefixEnabled;
}
public void setGeohashEnabled(MappedFieldType geohashFieldType, int geohashPrecision, boolean geohashPrefixEnabled) {
checkIfFrozen();
this.geohashFieldType = geohashFieldType;
this.geohashPrecision = geohashPrecision;
this.geohashPrefixEnabled = geohashPrefixEnabled;
}
public boolean isLatLonEnabled() {
return latFieldType != null;
}
public MappedFieldType latFieldType() {
return latFieldType;
}
public MappedFieldType lonFieldType() {
return lonFieldType;
}
public void setLatLonEnabled(MappedFieldType latFieldType, MappedFieldType lonFieldType) {
checkIfFrozen();
this.latFieldType = latFieldType;
this.lonFieldType = lonFieldType;
}
public boolean validateLon() {
return validateLon;
}
public void setValidateLon(boolean validateLon) {
checkIfFrozen();
this.validateLon = validateLon;
}
public boolean validateLat() {
return validateLat;
}
public void setValidateLat(boolean validateLat) {
checkIfFrozen();
this.validateLat = validateLat;
}
public boolean normalizeLon() {
return normalizeLon;
}
public void setNormalizeLon(boolean normalizeLon) {
checkIfFrozen();
this.normalizeLon = normalizeLon;
}
public boolean normalizeLat() {
return normalizeLat;
}
public void setNormalizeLat(boolean normalizeLat) {
checkIfFrozen();
this.normalizeLat = normalizeLat;
}
@Override
public GeoPoint value(Object value) {
if (value instanceof GeoPoint) {
return (GeoPoint) value;
} else {
return GeoPoint.parseFromLatLon(value.toString());
}
}
}
/**
* A byte-aligned fixed-length encoding for latitudes and longitudes.
*/
@ -405,52 +526,19 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
private final ContentPath.Type pathType;
private final boolean enableLatLon;
private final boolean enableGeoHash;
private final boolean enableGeohashPrefix;
private final Integer precisionStep;
private final int geoHashPrecision;
private final DoubleFieldMapper latMapper;
private final DoubleFieldMapper lonMapper;
private final StringFieldMapper geohashMapper;
private boolean validateLon;
private boolean validateLat;
private final boolean normalizeLon;
private final boolean normalizeLat;
public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean docValues,
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer,
SimilarityProvider similarity, @Nullable Settings fieldDataSettings, Settings indexSettings,
ContentPath.Type pathType, boolean enableLatLon, boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int geoHashPrecision,
DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,
boolean validateLon, boolean validateLat,
boolean normalizeLon, boolean normalizeLat, MultiFields multiFields) {
super(names, 1f, fieldType, docValues, null, indexAnalyzer, similarity, null, fieldDataSettings, indexSettings, multiFields, null);
public GeoPointFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings,
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,MultiFields multiFields) {
super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, null);
this.pathType = pathType;
this.enableLatLon = enableLatLon;
this.enableGeoHash = enableGeoHash || enableGeohashPrefix; // implicitly enable geohashes if geohash_prefix is set
this.enableGeohashPrefix = enableGeohashPrefix;
this.precisionStep = precisionStep;
this.geoHashPrecision = geoHashPrecision;
this.latMapper = latMapper;
this.lonMapper = lonMapper;
this.geohashMapper = geohashMapper;
this.validateLat = validateLat;
this.validateLon = validateLon;
this.normalizeLat = normalizeLat;
this.normalizeLon = normalizeLon;
}
@Override
@ -459,7 +547,12 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
}
@Override
public FieldType defaultFieldType() {
public GeoPointFieldType fieldType() {
return (GeoPointFieldType)fieldType;
}
@Override
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -473,39 +566,6 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
return false;
}
public DoubleFieldMapper latMapper() {
return latMapper;
}
public DoubleFieldMapper lonMapper() {
return lonMapper;
}
public StringFieldMapper geoHashStringMapper() {
return this.geohashMapper;
}
int geoHashPrecision() {
return geoHashPrecision;
}
public boolean isEnableLatLon() {
return enableLatLon;
}
public boolean isEnableGeohashPrefix() {
return enableGeohashPrefix;
}
@Override
public GeoPoint value(Object value) {
if (value instanceof GeoPoint) {
return (GeoPoint) value;
} else {
return GeoPoint.parseFromLatLon(value.toString());
}
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
@ -515,7 +575,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
public Mapper parse(ParseContext context) throws IOException {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(names().shortName());
context.path().add(fieldType().names().shortName());
GeoPoint sparse = context.parseExternalValue(GeoPoint.class);
@ -565,9 +625,9 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
return null;
}
private void parseGeohashField(ParseContext context, String geohash) throws IOException {
int len = Math.min(geoHashPrecision, geohash.length());
int min = enableGeohashPrefix ? 1 : geohash.length();
private void addGeohashField(ParseContext context, String geohash) throws IOException {
int len = Math.min(fieldType().geohashPrecision(), geohash.length());
int min = fieldType().isGeohashPrefixEnabled() ? 1 : geohash.length();
for (int i = len; i >= min; i--) {
// side effect of this call is adding the field
@ -584,40 +644,40 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
}
private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException {
if (normalizeLat || normalizeLon) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
if (fieldType().normalizeLat() || fieldType().normalizeLon()) {
GeoUtils.normalizePoint(point, fieldType().normalizeLat(), fieldType().normalizeLon());
}
if (validateLat) {
if (fieldType().validateLat()) {
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
}
}
if (validateLon) {
if (fieldType().validateLon()) {
if (point.lon() > 180.0 || point.lon() < -180) {
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
}
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
Field field = new Field(names.indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType);
Field field = new Field(fieldType.names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType);
context.doc().add(field);
}
if (enableGeoHash) {
if (fieldType().isGeohashEnabled()) {
if (geohash == null) {
geohash = GeoHashUtils.encode(point.lat(), point.lon());
}
parseGeohashField(context, geohash);
addGeohashField(context, geohash);
}
if (enableLatLon) {
if (fieldType().isLatLonEnabled()) {
latMapper.parse(context.createExternalValueContext(point.lat()));
lonMapper.parse(context.createExternalValueContext(point.lon()));
}
if (hasDocValues()) {
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(names().indexName());
if (fieldType().hasDocValues()) {
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName());
if (field == null) {
field = new CustomGeoPointDocValuesField(names().indexName(), point.lat(), point.lon());
context.doc().addWithKey(names().indexName(), field);
field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon());
context.doc().addWithKey(fieldType().names().indexName(), field);
} else {
field.add(point.lat(), point.lon());
}
@ -647,42 +707,43 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
}
GeoPointFieldMapper fieldMergeWith = (GeoPointFieldMapper) mergeWith;
if (this.enableLatLon != fieldMergeWith.enableLatLon) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different lat_lon");
if (this.fieldType().isLatLonEnabled() != fieldMergeWith.fieldType().isLatLonEnabled()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different lat_lon");
}
if (this.enableGeoHash != fieldMergeWith.enableGeoHash) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash");
if (this.fieldType().isGeohashEnabled() != fieldMergeWith.fieldType().isGeohashEnabled()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash");
}
if (this.geoHashPrecision != fieldMergeWith.geoHashPrecision) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash_precision");
if (this.fieldType().geohashPrecision() != fieldMergeWith.fieldType().geohashPrecision()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash_precision");
}
if (this.enableGeohashPrefix != fieldMergeWith.enableGeohashPrefix) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash_prefix");
if (this.fieldType().isGeohashPrefixEnabled() != fieldMergeWith.fieldType().isGeohashPrefixEnabled()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash_prefix");
}
if (this.normalizeLat != fieldMergeWith.normalizeLat) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different normalize_lat");
if (this.fieldType().normalizeLat() != fieldMergeWith.fieldType().normalizeLat()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different normalize_lat");
}
if (this.normalizeLon != fieldMergeWith.normalizeLon) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different normalize_lon");
if (this.fieldType().normalizeLon() != fieldMergeWith.fieldType().normalizeLon()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different normalize_lon");
}
if (!Objects.equal(this.precisionStep, fieldMergeWith.precisionStep)) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different precision_step");
if (fieldType().isLatLonEnabled() &&
this.fieldType().latFieldType().numericPrecisionStep() != fieldMergeWith.fieldType().latFieldType().numericPrecisionStep()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different precision_step");
}
if (this.validateLat != fieldMergeWith.validateLat) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different validate_lat");
if (this.fieldType().validateLat() != fieldMergeWith.fieldType().validateLat()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different validate_lat");
}
if (this.validateLon != fieldMergeWith.validateLon) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different validate_lon");
if (this.fieldType().validateLon() != fieldMergeWith.fieldType().validateLon()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different validate_lon");
}
}
@Override
public Iterator<Mapper> iterator() {
List<Mapper> extras = new ArrayList<>();
if (enableGeoHash) {
if (fieldType().isGeohashEnabled()) {
extras.add(geohashMapper);
}
if (enableLatLon) {
if (fieldType().isLatLonEnabled()) {
extras.add(latMapper);
extras.add(lonMapper);
}
@ -695,46 +756,46 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
if (includeDefaults || pathType != Defaults.PATH_TYPE) {
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
}
if (includeDefaults || enableLatLon != Defaults.ENABLE_LATLON) {
builder.field("lat_lon", enableLatLon);
if (includeDefaults || fieldType().isLatLonEnabled() != Defaults.ENABLE_LATLON) {
builder.field("lat_lon", fieldType().isLatLonEnabled());
}
if (includeDefaults || enableGeoHash != Defaults.ENABLE_GEOHASH) {
builder.field("geohash", enableGeoHash);
if (includeDefaults || fieldType().isGeohashEnabled() != Defaults.ENABLE_GEOHASH) {
builder.field("geohash", fieldType().isGeohashEnabled());
}
if (includeDefaults || enableGeohashPrefix != Defaults.ENABLE_GEOHASH_PREFIX) {
builder.field("geohash_prefix", enableGeohashPrefix);
if (includeDefaults || fieldType().isGeohashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) {
builder.field("geohash_prefix", fieldType().isGeohashPrefixEnabled());
}
if (includeDefaults || geoHashPrecision != Defaults.GEO_HASH_PRECISION) {
builder.field("geohash_precision", geoHashPrecision);
if (fieldType().isGeohashEnabled() && (includeDefaults || fieldType().geohashPrecision() != Defaults.GEO_HASH_PRECISION)) {
builder.field("geohash_precision", fieldType().geohashPrecision());
}
if (includeDefaults || precisionStep != null) {
builder.field("precision_step", precisionStep);
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
}
if (includeDefaults || validateLat != Defaults.VALIDATE_LAT || validateLon != Defaults.VALIDATE_LON) {
if (validateLat && validateLon) {
if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT || fieldType().validateLon() != Defaults.VALIDATE_LON) {
if (fieldType().validateLat() && fieldType().validateLon()) {
builder.field("validate", true);
} else if (!validateLat && !validateLon) {
} else if (!fieldType().validateLat() && !fieldType().validateLon()) {
builder.field("validate", false);
} else {
if (includeDefaults || validateLat != Defaults.VALIDATE_LAT) {
builder.field("validate_lat", validateLat);
if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT) {
builder.field("validate_lat", fieldType().validateLat());
}
if (includeDefaults || validateLon != Defaults.VALIDATE_LON) {
builder.field("validate_lon", validateLon);
if (includeDefaults || fieldType().validateLon() != Defaults.VALIDATE_LON) {
builder.field("validate_lon", fieldType().validateLon());
}
}
}
if (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT || normalizeLon != Defaults.NORMALIZE_LON) {
if (normalizeLat && normalizeLon) {
if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) {
if (fieldType().normalizeLat() && fieldType().normalizeLon()) {
builder.field("normalize", true);
} else if (!normalizeLat && !normalizeLon) {
} else if (!fieldType().normalizeLat() && !fieldType().normalizeLon()) {
builder.field("normalize", false);
} else {
if (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT) {
builder.field("normalize_lat", normalizeLat);
if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT) {
builder.field("normalize_lat", fieldType().normalizeLat());
}
if (includeDefaults || normalizeLon != Defaults.NORMALIZE_LON) {
builder.field("normalize_lon", normalizeLat);
if (includeDefaults || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) {
builder.field("normalize_lon", fieldType().normalizeLon());
}
}
}
@ -742,15 +803,9 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
public static class CustomGeoPointDocValuesField extends CustomNumericDocValuesField {
public static final FieldType TYPE = new FieldType();
static {
TYPE.setDocValuesType(DocValuesType.BINARY);
TYPE.freeze();
}
private final ObjectHashSet<GeoPoint> points;
public CustomGeoPointDocValuesField(String name, double lat, double lon) {
public CustomGeoPointDocValuesField(String name, double lat, double lon) {
super(name);
points = new ObjectHashSet<>(2);
points.add(new GeoPoint(lat, lon));

View File

@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.geo;
import com.spatial4j.core.shape.Shape;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
@ -40,10 +39,11 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
@ -94,7 +94,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
public static final double DISTANCE_ERROR_PCT = 0.025d;
public static final Orientation ORIENTATION = Orientation.RIGHT;
public static final FieldType FIELD_TYPE = new FieldType();
public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
@ -119,7 +119,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
private SpatialPrefixTree prefixTree;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
super(name, Defaults.FIELD_TYPE);
}
public Builder tree(String tree) {
@ -155,7 +155,6 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
@Override
public GeoShapeFieldMapper build(BuilderContext context) {
final FieldMapper.Names names = buildNames(context);
if (Names.TREE_GEOHASH.equals(tree)) {
prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true));
} else if (Names.TREE_QUADTREE.equals(tree)) {
@ -169,9 +168,19 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
} else {
throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]");
}
setupFieldType(context);
return new GeoShapeFieldMapper(names, prefixTree, strategyName, distanceErrorPct, orientation, fieldType,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
RecursivePrefixTreeStrategy recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, fieldType.names().indexName());
recursiveStrategy.setDistErrPct(distanceErrorPct);
recursiveStrategy.setPruneLeafyBranches(false);
TermQueryPrefixTreeStrategy termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, fieldType.names().indexName());
termStrategy.setDistErrPct(distanceErrorPct);
GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType;
geoShapeFieldType.setStrategies(strategyName, recursiveStrategy, termStrategy);
geoShapeFieldType.setOrientation(orientation);
return new GeoShapeFieldMapper(fieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
private final int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) {
@ -223,25 +232,83 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
}
}
private final PrefixTreeStrategy defaultStrategy;
private final RecursivePrefixTreeStrategy recursiveStrategy;
private final TermQueryPrefixTreeStrategy termStrategy;
private Orientation shapeOrientation;
public static class GeoShapeFieldType extends MappedFieldType {
public GeoShapeFieldMapper(FieldMapper.Names names, SpatialPrefixTree tree, String defaultStrategyName, double distanceErrorPct,
Orientation shapeOrientation, FieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, 1, fieldType, false, null, null, null, null, null, indexSettings, multiFields, copyTo);
this.recursiveStrategy = new RecursivePrefixTreeStrategy(tree, names.indexName());
this.recursiveStrategy.setDistErrPct(distanceErrorPct);
this.recursiveStrategy.setPruneLeafyBranches(false);
this.termStrategy = new TermQueryPrefixTreeStrategy(tree, names.indexName());
this.termStrategy.setDistErrPct(distanceErrorPct);
this.defaultStrategy = resolveStrategy(defaultStrategyName);
this.shapeOrientation = shapeOrientation;
private PrefixTreeStrategy defaultStrategy;
private RecursivePrefixTreeStrategy recursiveStrategy;
private TermQueryPrefixTreeStrategy termStrategy;
private Orientation orientation;
public GeoShapeFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected GeoShapeFieldType(GeoShapeFieldType ref) {
super(ref);
// TODO: this shallow copy is probably not good...need to extract the parameters and recreate the tree and strategies?
this.defaultStrategy = ref.defaultStrategy;
this.recursiveStrategy = ref.recursiveStrategy;
this.termStrategy = ref.termStrategy;
this.orientation = ref.orientation;
}
@Override
public MappedFieldType clone() {
return new GeoShapeFieldType(this);
}
public PrefixTreeStrategy defaultStrategy() {
return this.defaultStrategy;
}
public PrefixTreeStrategy resolveStrategy(String strategyName) {
if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) {
return recursiveStrategy;
}
if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) {
return termStrategy;
}
throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]");
}
public void setStrategies(String defaultStrategy, RecursivePrefixTreeStrategy recursiveStrategy, TermQueryPrefixTreeStrategy termStrategy) {
checkIfFrozen();
this.recursiveStrategy = recursiveStrategy;
this.termStrategy = termStrategy;
this.defaultStrategy = resolveStrategy(defaultStrategy);
}
public void setDistErrPct(double distErrPct) {
checkIfFrozen();
this.recursiveStrategy.setDistErrPct(distErrPct);
this.termStrategy.setDistErrPct(distErrPct);
}
public Orientation orientation() { return this.orientation; }
public void setOrientation(Orientation orientation) {
checkIfFrozen();
this.orientation = orientation;
}
@Override
public String value(Object value) {
throw new UnsupportedOperationException("GeoShape fields cannot be converted to String values");
}
}
public GeoShapeFieldMapper(MappedFieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(fieldType, false, null, indexSettings, multiFields, copyTo);
}
@Override
public FieldType defaultFieldType() {
public GeoShapeFieldType fieldType() {
return (GeoShapeFieldType)fieldType;
}
@Override
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -261,18 +328,18 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
}
shape = shapeBuilder.build();
}
Field[] fields = defaultStrategy.createIndexableFields(shape);
Field[] fields = fieldType().defaultStrategy().createIndexableFields(shape);
if (fields == null || fields.length == 0) {
return null;
}
for (Field field : fields) {
if (!customBoost()) {
field.setBoost(boost);
field.setBoost(fieldType.boost());
}
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
throw new MapperParsingException("failed to parse [" + fieldType.names().fullName() + "]", e);
}
return null;
}
@ -281,29 +348,29 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different field type");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different field type");
return;
}
final GeoShapeFieldMapper fieldMergeWith = (GeoShapeFieldMapper) mergeWith;
final PrefixTreeStrategy mergeWithStrategy = fieldMergeWith.defaultStrategy;
final PrefixTreeStrategy mergeWithStrategy = fieldMergeWith.fieldType().defaultStrategy();
// prevent user from changing strategies
if (!(this.defaultStrategy.getClass().equals(mergeWithStrategy.getClass()))) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different strategy");
if (!(this.fieldType().defaultStrategy().getClass().equals(mergeWithStrategy.getClass()))) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different strategy");
}
final SpatialPrefixTree grid = this.defaultStrategy.getGrid();
final SpatialPrefixTree grid = this.fieldType().defaultStrategy().getGrid();
final SpatialPrefixTree mergeGrid = mergeWithStrategy.getGrid();
// prevent user from changing trees (changes encoding)
if (!grid.getClass().equals(mergeGrid.getClass())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different tree");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree");
}
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
if (grid.getMaxLevels() != mergeGrid.getMaxLevels()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different tree_levels or precision");
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree_levels or precision");
}
// bail if there were merge conflicts
@ -312,11 +379,12 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
}
// change distance error percent
this.defaultStrategy.setDistErrPct(mergeWithStrategy.getDistErrPct());
this.fieldType = this.fieldType.clone();
this.fieldType().setDistErrPct(mergeWithStrategy.getDistErrPct());
// change orientation - this is allowed because existing dateline spanning shapes
// have already been unwound and segmented
this.shapeOrientation = fieldMergeWith.shapeOrientation;
this.fieldType().setOrientation(fieldMergeWith.fieldType().orientation());
this.fieldType.freeze();
}
@Override
@ -328,25 +396,25 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
builder.field("type", contentType());
// TODO: Come up with a better way to get the name, maybe pass it from builder
if (defaultStrategy.getGrid() instanceof GeohashPrefixTree) {
if (fieldType().defaultStrategy().getGrid() instanceof GeohashPrefixTree) {
// Don't emit the tree name since GeohashPrefixTree is the default
// Only emit the tree levels if it isn't the default value
if (includeDefaults || defaultStrategy.getGrid().getMaxLevels() != Defaults.GEOHASH_LEVELS) {
builder.field(Names.TREE_LEVELS, defaultStrategy.getGrid().getMaxLevels());
if (includeDefaults || fieldType().defaultStrategy().getGrid().getMaxLevels() != Defaults.GEOHASH_LEVELS) {
builder.field(Names.TREE_LEVELS, fieldType().defaultStrategy().getGrid().getMaxLevels());
}
} else {
builder.field(Names.TREE, Names.TREE_QUADTREE);
if (includeDefaults || defaultStrategy.getGrid().getMaxLevels() != Defaults.QUADTREE_LEVELS) {
builder.field(Names.TREE_LEVELS, defaultStrategy.getGrid().getMaxLevels());
if (includeDefaults || fieldType().defaultStrategy().getGrid().getMaxLevels() != Defaults.QUADTREE_LEVELS) {
builder.field(Names.TREE_LEVELS, fieldType().defaultStrategy().getGrid().getMaxLevels());
}
}
if (includeDefaults || defaultStrategy.getDistErrPct() != Defaults.DISTANCE_ERROR_PCT) {
builder.field(Names.DISTANCE_ERROR_PCT, defaultStrategy.getDistErrPct());
if (includeDefaults || fieldType().defaultStrategy().getDistErrPct() != Defaults.DISTANCE_ERROR_PCT) {
builder.field(Names.DISTANCE_ERROR_PCT, fieldType().defaultStrategy().getDistErrPct());
}
if (includeDefaults || orientation() != Defaults.ORIENTATION) {
builder.field(Names.ORIENTATION, orientation());
if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) {
builder.field(Names.ORIENTATION, fieldType().orientation());
}
}
@ -354,34 +422,4 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public String value(Object value) {
throw new UnsupportedOperationException("GeoShape fields cannot be converted to String values");
}
public PrefixTreeStrategy defaultStrategy() {
return this.defaultStrategy;
}
public PrefixTreeStrategy recursiveStrategy() {
return this.recursiveStrategy;
}
public PrefixTreeStrategy termStrategy() {
return this.termStrategy;
}
public Orientation orientation() { return this.shapeOrientation; }
public PrefixTreeStrategy resolveStrategy(String strategyName) {
if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) {
return recursiveStrategy;
}
if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) {
return termStrategy;
}
throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]");
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
@ -34,8 +33,8 @@ import org.elasticsearch.common.lucene.all.AllField;
import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -45,7 +44,6 @@ import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
@ -80,11 +78,12 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
public static final String INDEX_NAME = AllFieldMapper.NAME;
public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_ENABLED;
public static final FieldType FIELD_TYPE = new FieldType();
public static final MappedFieldType FIELD_TYPE = new AllFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
}
}
@ -94,7 +93,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
private EnabledAttributeMapper enabled = Defaults.ENABLED;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE));
super(Defaults.NAME, Defaults.FIELD_TYPE);
builder = this;
indexName = Defaults.INDEX_NAME;
}
@ -113,7 +112,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
}
fieldType.setTokenized(true);
return new AllFieldMapper(name, fieldType, indexAnalyzer, searchAnalyzer, enabled, similarity, normsLoading, fieldDataSettings, context.indexSettings());
return new AllFieldMapper(fieldType, enabled, fieldDataSettings, context.indexSettings());
}
}
@ -156,18 +155,49 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
}
}
public static class AllFieldType extends MappedFieldType {
public AllFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected AllFieldType(AllFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new AllFieldType(this);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public Query queryStringTermQuery(Term term) {
return new AllTermQuery(term);
}
@Override
public Query termQuery(Object value, QueryParseContext context) {
return queryStringTermQuery(createTerm(value));
}
}
private EnabledAttributeMapper enabledState;
public AllFieldMapper(Settings indexSettings) {
this(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), null, null, Defaults.ENABLED, null, null, null, indexSettings);
this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED, null, indexSettings);
}
protected AllFieldMapper(String name, FieldType fieldType, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer,
EnabledAttributeMapper enabled, SimilarityProvider similarity, Loading normsLoading,
protected AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled,
@Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, name, name, name), 1.0f, fieldType, false, indexAnalyzer, searchAnalyzer,
similarity, normsLoading, fieldDataSettings, indexSettings);
super(fieldType, false, fieldDataSettings, indexSettings);
this.enabledState = enabled;
}
@ -177,7 +207,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -186,16 +216,6 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
return new FieldDataType("string");
}
@Override
public Query queryStringTermQuery(Term term) {
return new AllTermQuery(term);
}
@Override
public Query termQuery(Object value, QueryParseContext context) {
return queryStringTermQuery(createTerm(value));
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@ -219,11 +239,11 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
// reset the entries
context.allEntries().reset();
Analyzer analyzer = findAnalyzer(context);
fields.add(new AllField(names.indexName(), context.allEntries(), analyzer, fieldType));
fields.add(new AllField(fieldType.names().indexName(), context.allEntries(), analyzer, fieldType));
}
private Analyzer findAnalyzer(ParseContext context) {
Analyzer analyzer = indexAnalyzer;
Analyzer analyzer = fieldType.indexAnalyzer();
if (analyzer == null) {
analyzer = context.docMapper().mappers().indexAnalyzer();
if (analyzer == null) {
@ -233,14 +253,6 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
}
return analyzer;
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
protected String contentType() {
@ -294,8 +306,8 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
doXContentAnalyzers(builder, includeDefaults);
if (similarity() != null) {
builder.field("similarity", similarity().name());
if (fieldType().similarity() != null) {
builder.field("similarity", fieldType().similarity().name());
} else if (includeDefaults) {
builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY);
}
@ -303,14 +315,14 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
}
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) {
mergeResult.addConflict("mapper [" + names.fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
}
super.merge(mergeWith, mergeResult);
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -38,6 +39,7 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.search.highlight.HighlightBuilder;
import java.io.IOException;
import java.util.ArrayList;
@ -65,13 +67,16 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
public static final String NAME = FieldNamesFieldMapper.NAME;
public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_ENABLED;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new FieldNamesFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
}
}
@ -80,7 +85,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
private EnabledAttributeMapper enabledState = Defaults.ENABLED_STATE;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE));
super(Defaults.NAME, Defaults.FIELD_TYPE);
indexName = Defaults.NAME;
}
@ -98,7 +103,8 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
@Override
public FieldNamesFieldMapper build(BuilderContext context) {
return new FieldNamesFieldMapper(name, indexName, boost, fieldType, enabledState, fieldDataSettings, context.indexSettings());
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
return new FieldNamesFieldMapper(fieldType, enabledState, fieldDataSettings, context.indexSettings());
}
}
@ -127,17 +133,45 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
}
}
private final FieldType defaultFieldType;
public static class FieldNamesFieldType extends MappedFieldType {
public FieldNamesFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected FieldNamesFieldType(FieldNamesFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new FieldNamesFieldType(this);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
}
private final MappedFieldType defaultFieldType;
private EnabledAttributeMapper enabledState;
private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled
public FieldNamesFieldMapper(Settings indexSettings) {
this(Defaults.NAME, Defaults.NAME, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), Defaults.ENABLED_STATE, null, indexSettings);
this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings);
}
public FieldNamesFieldMapper(String name, String indexName, float boost, FieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), boost, fieldType, false, Lucene.KEYWORD_ANALYZER,
Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings);
public FieldNamesFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, false, fieldDataSettings, indexSettings);
this.defaultFieldType = Defaults.FIELD_TYPE;
this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0);
this.enabledState = enabledState;
@ -148,7 +182,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return defaultFieldType;
}
@ -157,19 +191,6 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
return new FieldDataType("string");
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@ -230,7 +251,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
for (String path : paths) {
for (String fieldName : extractFieldNames(path)) {
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
document.add(new Field(names().indexName(), fieldName, fieldType));
document.add(new Field(fieldType().names().indexName(), fieldName, fieldType));
}
}
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal;
import com.google.common.collect.Iterables;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermsQuery;
@ -43,6 +42,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -73,14 +73,16 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = IdFieldMapper.NAME;
public static final String INDEX_NAME = IdFieldMapper.NAME;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new IdFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
}
@ -92,8 +94,8 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
private String path = Defaults.PATH;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE));
indexName = Defaults.INDEX_NAME;
super(Defaults.NAME, Defaults.FIELD_TYPE);
indexName = Defaults.NAME;
}
public Builder path(String path) {
@ -108,7 +110,8 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
@Override
public IdFieldMapper build(BuilderContext context) {
return new IdFieldMapper(name, indexName, boost, fieldType, docValues, path, fieldDataSettings, context.indexSettings());
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
return new IdFieldMapper(fieldType, docValues, path, fieldDataSettings, context.indexSettings());
}
}
@ -133,21 +136,109 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
}
}
public static class IdFieldType extends MappedFieldType {
public IdFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected IdFieldType(IdFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new IdFieldType(this);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.termQuery(value, context);
}
final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value);
return new TermsQuery(UidFieldMapper.NAME, uids);
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.termsQuery(values, context);
}
return new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(context.queryTypes(), values));
}
@Override
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.prefixQuery(value, method, context);
}
Collection<String> queryTypes = context.queryTypes();
BooleanQuery query = new BooleanQuery();
for (String queryType : queryTypes) {
PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))));
if (method != null) {
prefixQuery.setRewriteMethod(method);
}
query.add(prefixQuery, BooleanClause.Occur.SHOULD);
}
return query;
}
@Override
public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.regexpQuery(value, flags, maxDeterminizedStates, method, context);
}
Collection<String> queryTypes = context.queryTypes();
if (queryTypes.size() == 1) {
RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))),
flags, maxDeterminizedStates);
if (method != null) {
regexpQuery.setRewriteMethod(method);
}
return regexpQuery;
}
BooleanQuery query = new BooleanQuery();
for (String queryType : queryTypes) {
RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates);
if (method != null) {
regexpQuery.setRewriteMethod(method);
}
query.add(regexpQuery, BooleanClause.Occur.SHOULD);
}
return query;
}
}
private final String path;
public IdFieldMapper(Settings indexSettings) {
this(Defaults.NAME, Defaults.INDEX_NAME, Defaults.BOOST, idFieldType(indexSettings), null, Defaults.PATH, null, indexSettings);
this(idFieldType(indexSettings), null, Defaults.PATH, null, indexSettings);
}
protected IdFieldMapper(String name, String indexName, float boost, FieldType fieldType, Boolean docValues, String path,
protected IdFieldMapper(MappedFieldType fieldType, Boolean docValues, String path,
@Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), boost, fieldType, docValues, Lucene.KEYWORD_ANALYZER,
Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings);
super(fieldType, docValues, fieldDataSettings, indexSettings);
this.path = path;
}
private static FieldType idFieldType(Settings indexSettings) {
FieldType fieldType = new FieldType(Defaults.FIELD_TYPE);
private static MappedFieldType idFieldType(Settings indexSettings) {
MappedFieldType fieldType = Defaults.FIELD_TYPE.clone();
boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0);
if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) {
fieldType.setTokenized(false);
@ -160,7 +251,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -169,78 +260,6 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
return new FieldDataType("string");
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
if (fieldType.indexOptions() != IndexOptions.NONE || context == null) {
return super.termQuery(value, context);
}
final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value);
return new TermsQuery(UidFieldMapper.NAME, uids);
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
if (fieldType.indexOptions() != IndexOptions.NONE || context == null) {
return super.termsQuery(values, context);
}
return new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(context.queryTypes(), values));
}
@Override
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
if (fieldType.indexOptions() != IndexOptions.NONE || context == null) {
return super.prefixQuery(value, method, context);
}
Collection<String> queryTypes = context.queryTypes();
BooleanQuery query = new BooleanQuery();
for (String queryType : queryTypes) {
PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))));
if (method != null) {
prefixQuery.setRewriteMethod(method);
}
query.add(prefixQuery, BooleanClause.Occur.SHOULD);
}
return query;
}
@Override
public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
if (fieldType.indexOptions() != IndexOptions.NONE || context == null) {
return super.regexpQuery(value, flags, maxDeterminizedStates, method, context);
}
Collection<String> queryTypes = context.queryTypes();
if (queryTypes.size() == 1) {
RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))),
flags, maxDeterminizedStates);
if (method != null) {
regexpQuery.setRewriteMethod(method);
}
return regexpQuery;
}
BooleanQuery query = new BooleanQuery();
for (String queryType : queryTypes) {
RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates);
if (method != null) {
regexpQuery.setRewriteMethod(method);
}
query.add(regexpQuery, BooleanClause.Occur.SHOULD);
}
return query;
}
@Override
public void preParse(ParseContext context) throws IOException {
if (context.sourceToParse().id() != null) {
@ -270,10 +289,10 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
} // else we are in the pre/post parse phase
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
fields.add(new Field(names.indexName(), context.id(), fieldType));
fields.add(new Field(fieldType.names().indexName(), context.id(), fieldType));
}
if (hasDocValues()) {
fields.add(new BinaryDocValuesField(names.indexName(), new BytesRef(context.id())));
if (fieldType().hasDocValues()) {
fields.add(new BinaryDocValuesField(fieldType.names().indexName(), new BytesRef(context.id())));
}
}
@ -310,7 +329,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
}
builder.endObject();
return builder;

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
@ -30,6 +29,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -38,6 +38,7 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.search.highlight.HighlightBuilder;
import java.io.IOException;
import java.util.Iterator;
@ -59,13 +60,16 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = IndexFieldMapper.NAME;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new IndexFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
}
@ -77,7 +81,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE));
super(Defaults.NAME, Defaults.FIELD_TYPE);
indexName = Defaults.NAME;
}
@ -88,7 +92,8 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
@Override
public IndexFieldMapper build(BuilderContext context) {
return new IndexFieldMapper(name, indexName, boost, fieldType, enabledState, fieldDataSettings, context.indexSettings());
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
return new IndexFieldMapper(fieldType, enabledState, fieldDataSettings, context.indexSettings());
}
}
@ -114,16 +119,39 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
}
}
public static class IndexFieldType extends MappedFieldType {
public IndexFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected IndexFieldType(IndexFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new IndexFieldType(this);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
}
private EnabledAttributeMapper enabledState;
public IndexFieldMapper(Settings indexSettings) {
this(Defaults.NAME, Defaults.NAME, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), Defaults.ENABLED_STATE, null, indexSettings);
this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings);
}
public IndexFieldMapper(String name, String indexName, float boost, FieldType fieldType, EnabledAttributeMapper enabledState,
public IndexFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState,
@Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), boost, fieldType, false, Lucene.KEYWORD_ANALYZER,
Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings);
super(fieldType, false, fieldDataSettings, indexSettings);
this.enabledState = enabledState;
}
@ -132,7 +160,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -142,16 +170,8 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
}
public String value(Document document) {
Field field = (Field) document.getField(names.indexName());
return field == null ? null : value(field);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
Field field = (Field) document.getField(fieldType.names().indexName());
return field == null ? null : (String)fieldType().value(field);
}
@Override
@ -174,7 +194,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
if (!enabledState.enabled) {
return;
}
fields.add(new Field(names.indexName(), context.index(), fieldType));
fields.add(new Field(fieldType.names().indexName(), context.index(), fieldType));
}
@Override
@ -202,7 +222,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
}
}
builder.endObject();

View File

@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal;
import com.google.common.base.Objects;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.Query;
@ -35,6 +34,7 @@ import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -70,18 +70,21 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = ParentFieldMapper.NAME;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new ParentFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
}
}
public static class Builder extends Mapper.Builder<Builder, ParentFieldMapper> {
public static class Builder extends AbstractFieldMapper.Builder<Builder, ParentFieldMapper> {
protected String indexName;
@ -89,7 +92,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
protected Settings fieldDataSettings;
public Builder() {
super(Defaults.NAME);
super(Defaults.NAME, Defaults.FIELD_TYPE);
this.indexName = name;
builder = this;
}
@ -109,7 +112,8 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
if (type == null) {
throw new MapperParsingException("Parent mapping must contain the parent type");
}
return new ParentFieldMapper(name, indexName, type, fieldDataSettings, context.indexSettings());
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
return new ParentFieldMapper(fieldType, type, fieldDataSettings, context.indexSettings());
}
}
@ -130,8 +134,8 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
} else if (fieldName.equals("fielddata")) {
// Only take over `loading`, since that is the only option now that is configurable:
Map<String, String> fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata"));
if (fieldDataSettings.containsKey(Loading.KEY)) {
Settings settings = settingsBuilder().put(Loading.KEY, fieldDataSettings.get(Loading.KEY)).build();
if (fieldDataSettings.containsKey(MappedFieldType.Loading.KEY)) {
Settings settings = settingsBuilder().put(MappedFieldType.Loading.KEY, fieldDataSettings.get(MappedFieldType.Loading.KEY)).build();
builder.fieldDataSettings(settings);
}
iterator.remove();
@ -141,19 +145,101 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
}
}
public static class ParentFieldType extends MappedFieldType {
public ParentFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected ParentFieldType(ParentFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new ParentFieldType(this);
}
@Override
public Uid value(Object value) {
if (value == null) {
return null;
}
return Uid.createUid(value.toString());
}
@Override
public Object valueForSearch(Object value) {
if (value == null) {
return null;
}
String sValue = value.toString();
if (sValue == null) {
return null;
}
int index = sValue.indexOf(Uid.DELIMITER);
if (index == -1) {
return sValue;
}
return sValue.substring(index + 1);
}
/**
* We don't need to analyzer the text, and we need to convert it to UID...
*/
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return termsQuery(Collections.singletonList(value), context);
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
if (context == null) {
return super.termsQuery(values, context);
}
List<String> types = new ArrayList<>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
List<BytesRef> bValues = new ArrayList<>(values.size());
for (Object value : values) {
BytesRef bValue = BytesRefs.toBytesRef(value);
if (Uid.hasDelimiter(bValue)) {
bValues.add(bValue);
} else {
// we use all non child types, cause we don't know if its exact or not...
for (String type : types) {
bValues.add(Uid.createUidAsBytes(type, bValue));
}
}
}
return new TermsQuery(names().indexName(), bValues);
}
}
private final String type;
private final BytesRef typeAsBytes;
protected ParentFieldMapper(String name, String indexName, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), false,
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings);
protected ParentFieldMapper(MappedFieldType fieldType, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, false, fieldDataSettings, indexSettings);
this.type = type;
this.typeAsBytes = type == null ? null : new BytesRef(type);
}
public ParentFieldMapper(Settings indexSettings) {
this(Defaults.NAME, Defaults.NAME, null, null, indexSettings);
this.fieldDataType = new FieldDataType("_parent", settingsBuilder().put(Loading.KEY, Loading.LAZY_VALUE));
this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings);
this.fieldType = this.fieldType.clone();
this.fieldType.setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE)));
this.fieldType.freeze();
}
public String type() {
@ -161,13 +247,13 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
return new FieldDataType("_parent", settingsBuilder().put(Loading.KEY, Loading.EAGER_VALUE));
return new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.EAGER_VALUE));
}
@Override
@ -189,7 +275,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
// we are in the parsing of _parent phase
String parentId = context.parser().text();
context.sourceToParse().parent(parentId);
fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
} else {
// otherwise, we are running it post processing of the xcontent
String parsedParentId = context.doc().get(Defaults.NAME);
@ -200,7 +286,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
throw new MapperParsingException("No parent id provided, not within the document, and not externally");
}
// we did not add it in the parsing phase, add it now
fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), type, parentId))) {
throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]");
}
@ -209,87 +295,6 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
// we have parent mapping, yet no value was set, ignore it...
}
@Override
public Uid value(Object value) {
if (value == null) {
return null;
}
return Uid.createUid(value.toString());
}
@Override
public Object valueForSearch(Object value) {
if (value == null) {
return null;
}
String sValue = value.toString();
if (sValue == null) {
return null;
}
int index = sValue.indexOf(Uid.DELIMITER);
if (index == -1) {
return sValue;
}
return sValue.substring(index + 1);
}
@Override
public BytesRef indexedValueForSearch(Object value) {
if (value instanceof BytesRef) {
BytesRef bytesRef = (BytesRef) value;
if (Uid.hasDelimiter(bytesRef)) {
return bytesRef;
}
return Uid.createUidAsBytes(typeAsBytes, bytesRef);
}
String sValue = value.toString();
if (sValue.indexOf(Uid.DELIMITER) == -1) {
return Uid.createUidAsBytes(type, sValue);
}
return super.indexedValueForSearch(value);
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return termsQuery(Collections.singletonList(value), context);
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
if (context == null) {
return super.termsQuery(values, context);
}
List<String> types = new ArrayList<>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
List<BytesRef> bValues = new ArrayList<>(values.size());
for (Object value : values) {
BytesRef bValue = BytesRefs.toBytesRef(value);
if (Uid.hasDelimiter(bValue)) {
bValues.add(bValue);
} else {
// we use all non child types, cause we don't know if its exact or not...
for (String type : types) {
bValues.add(Uid.createUidAsBytes(type, bValue));
}
}
}
return new TermsQuery(names.indexName(), bValues);
}
/**
* We don't need to analyzer the text, and we need to convert it to UID...
*/
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
@ -307,12 +312,28 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
}
builder.endObject();
return builder;
}
@Override
public BytesRef indexedValueForSearch(Object value) {
if (value instanceof BytesRef) {
BytesRef bytesRef = (BytesRef) value;
if (Uid.hasDelimiter(bytesRef)) {
return bytesRef;
}
return Uid.createUidAsBytes(typeAsBytes, bytesRef);
}
String sValue = value.toString();
if (sValue.indexOf(Uid.DELIMITER) == -1) {
return Uid.createUidAsBytes(type, sValue);
}
return super.indexedValueForSearch(value);
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
@ -322,14 +343,16 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
if (!mergeResult.simulate()) {
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
this.fieldType = this.fieldType.clone();
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
this.fieldType.setFieldDataType(new FieldDataType(defaultFieldDataType().getType(),
builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
);
));
}
}
this.fieldType.freeze();
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
@ -30,6 +29,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -58,13 +58,16 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = "_routing";
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new RoutingFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
}
@ -79,7 +82,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
private String path = Defaults.PATH;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE));
super(Defaults.NAME, Defaults.FIELD_TYPE);
}
public Builder required(boolean required) {
@ -121,6 +124,29 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
}
}
public static class RoutingFieldType extends MappedFieldType {
public RoutingFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected RoutingFieldType(RoutingFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new RoutingFieldType(this);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
}
private boolean required;
private final String path;
@ -129,15 +155,14 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
this(Defaults.FIELD_TYPE, Defaults.REQUIRED, Defaults.PATH, null, indexSettings);
}
protected RoutingFieldMapper(FieldType fieldType, boolean required, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), 1.0f, fieldType, false, Lucene.KEYWORD_ANALYZER,
Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings);
protected RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, false, fieldDataSettings, indexSettings);
this.required = required;
this.path = path;
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -159,16 +184,8 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
}
public String value(Document document) {
Field field = (Field) document.getField(names.indexName());
return field == null ? null : value(field);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
Field field = (Field) document.getField(fieldType.names().indexName());
return field == null ? null : (String)value(field);
}
@Override
@ -194,10 +211,10 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
String routing = context.sourceToParse().routing();
if (routing != null) {
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) {
context.ignoredValue(names.indexName(), routing);
context.ignoredValue(fieldType.names().indexName(), routing);
return;
}
fields.add(new Field(names.indexName(), routing, fieldType));
fields.add(new Field(fieldType.names().indexName(), routing, fieldType));
}
}
}

View File

@ -20,16 +20,18 @@
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
@ -53,10 +55,12 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
public static final String NAME = CONTENT_TYPE;
public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED;
public static final FieldType SIZE_FIELD_TYPE = new FieldType(IntegerFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType SIZE_FIELD_TYPE = IntegerFieldMapper.Defaults.FIELD_TYPE.clone();
static {
SIZE_FIELD_TYPE.setStored(true);
SIZE_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_32_BIT);
SIZE_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
SIZE_FIELD_TYPE.freeze();
}
}
@ -66,7 +70,7 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.SIZE_FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT);
super(Defaults.NAME, Defaults.SIZE_FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT);
builder = this;
}
@ -77,8 +81,19 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
@Override
public SizeFieldMapper build(BuilderContext context) {
setupFieldType(context);
return new SizeFieldMapper(enabledState, fieldType, fieldDataSettings, context.indexSettings());
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 32;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -104,12 +119,12 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
private EnabledAttributeMapper enabledState;
public SizeFieldMapper(Settings indexSettings) {
this(Defaults.ENABLED_STATE, new FieldType(Defaults.SIZE_FIELD_TYPE), null, indexSettings);
this(Defaults.ENABLED_STATE, Defaults.SIZE_FIELD_TYPE.clone(), null, indexSettings);
}
public SizeFieldMapper(EnabledAttributeMapper enabled, FieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(Defaults.NAME), Defaults.PRECISION_STEP_32_BIT, Defaults.BOOST, fieldType, false, Defaults.NULL_VALUE,
Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, null, fieldDataSettings,
public SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, false, Defaults.NULL_VALUE,
Defaults.IGNORE_MALFORMED, Defaults.COERCE, fieldDataSettings,
indexSettings, MultiFields.empty(), null);
this.enabledState = enabled;
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
import com.google.common.base.Objects;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef;
@ -45,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -78,12 +78,15 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
public static final long COMPRESS_THRESHOLD = -1;
public static final String FORMAT = null; // default format is to use the one provided
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new SourceFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.NONE); // not indexed
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
}
@ -138,7 +141,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
@Override
public SourceFieldMapper build(BuilderContext context) {
return new SourceFieldMapper(name, enabled, format, compress, compressThreshold, includes, excludes, context.indexSettings());
return new SourceFieldMapper(enabled, format, compress, compressThreshold, includes, excludes, context.indexSettings());
}
}
@ -195,6 +198,39 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
}
}
public static class SourceFieldType extends MappedFieldType {
public SourceFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected SourceFieldType(SourceFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new SourceFieldType(this);
}
@Override
public byte[] value(Object value) {
if (value == null) {
return null;
}
BytesReference bValue;
if (value instanceof BytesRef) {
bValue = new BytesArray((BytesRef) value);
} else {
bValue = (BytesReference) value;
}
try {
return CompressorFactory.uncompressIfNeeded(bValue).toBytes();
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
}
}
}
private final boolean enabled;
@ -212,13 +248,12 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
private XContentType formatContentType;
public SourceFieldMapper(Settings indexSettings) {
this(Defaults.NAME, Defaults.ENABLED, Defaults.FORMAT, null, -1, null, null, indexSettings);
this(Defaults.ENABLED, Defaults.FORMAT, null, -1, null, null, indexSettings);
}
protected SourceFieldMapper(String name, boolean enabled, String format, Boolean compress, long compressThreshold,
protected SourceFieldMapper(boolean enabled, String format, Boolean compress, long compressThreshold,
String[] includes, String[] excludes, Settings indexSettings) {
super(new Names(name, name, name, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), false,
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, null, indexSettings); // Only stored.
super(Defaults.FIELD_TYPE.clone(), false, null, indexSettings); // Only stored.
this.enabled = enabled;
this.compress = compress;
this.compressThreshold = compressThreshold;
@ -247,7 +282,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -358,25 +393,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
if (!source.hasArray()) {
source = source.toBytesArray();
}
fields.add(new StoredField(names().indexName(), source.array(), source.arrayOffset(), source.length()));
}
@Override
public byte[] value(Object value) {
if (value == null) {
return null;
}
BytesReference bValue;
if (value instanceof BytesRef) {
bValue = new BytesArray((BytesRef) value);
} else {
bValue = (BytesReference) value;
}
try {
return CompressorFactory.uncompressIfNeeded(bValue).toBytes();
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
}
fields.add(new StoredField(fieldType().names().indexName(), source.array(), source.arrayOffset(), source.length()));
}
@Override

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
@ -30,6 +29,9 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AlreadyExpiredException;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -59,12 +61,14 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper {
public static class Defaults extends LongFieldMapper.Defaults {
public static final String NAME = TTLFieldMapper.CONTENT_TYPE;
public static final FieldType TTL_FIELD_TYPE = new FieldType(LongFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType TTL_FIELD_TYPE = new TTLFieldType();
static {
TTL_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
TTL_FIELD_TYPE.setStored(true);
TTL_FIELD_TYPE.setTokenized(false);
TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT);
TTL_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
TTL_FIELD_TYPE.freeze();
}
@ -78,7 +82,7 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper {
private long defaultTTL = Defaults.DEFAULT;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.TTL_FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT);
super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
}
public Builder enabled(EnabledAttributeMapper enabled) {
@ -93,8 +97,19 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper {
@Override
public TTLFieldMapper build(BuilderContext context) {
setupFieldType(context);
return new TTLFieldMapper(fieldType, enabledState, defaultTTL, ignoreMalformed(context),coerce(context), fieldDataSettings, context.indexSettings());
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 64;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -121,18 +136,46 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper {
}
}
public static class TTLFieldType extends LongFieldType {
public TTLFieldType() {
}
protected TTLFieldType(TTLFieldType ref) {
super(ref);
}
@Override
public LongFieldType clone() {
return new TTLFieldType(this);
}
// Overrides valueForSearch to display live value of remaining ttl
@Override
public Object valueForSearch(Object value) {
long now;
SearchContext searchContext = SearchContext.current();
if (searchContext != null) {
now = searchContext.nowInMillis();
} else {
now = System.currentTimeMillis();
}
long val = value(value);
return val - now;
}
}
private EnabledAttributeMapper enabledState;
private long defaultTTL;
public TTLFieldMapper(Settings indexSettings) {
this(new FieldType(Defaults.TTL_FIELD_TYPE), Defaults.ENABLED_STATE, Defaults.DEFAULT, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings);
this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings);
}
protected TTLFieldMapper(FieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, Explicit<Boolean> ignoreMalformed,
protected TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, Explicit<Boolean> ignoreMalformed,
Explicit<Boolean> coerce, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), Defaults.PRECISION_STEP_64_BIT,
Defaults.BOOST, fieldType, false, Defaults.NULL_VALUE, ignoreMalformed, coerce,
null, null, fieldDataSettings, indexSettings, MultiFields.empty(), null);
super(fieldType, false, Defaults.NULL_VALUE, ignoreMalformed, coerce,
fieldDataSettings, indexSettings, MultiFields.empty(), null);
this.enabledState = enabled;
this.defaultTTL = defaultTTL;
}
@ -145,20 +188,6 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper {
return this.defaultTTL;
}
// Overrides valueForSearch to display live value of remaining ttl
@Override
public Object valueForSearch(Object value) {
long now;
SearchContext searchContext = SearchContext.current();
if (searchContext != null) {
now = searchContext.nowInMillis();
} else {
now = System.currentTimeMillis();
}
long val = value(value);
return val - now;
}
// Other implementation for realtime get display
public Object valueForSearch(long expirationTime) {
return expirationTime - System.currentTimeMillis();
@ -207,7 +236,7 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper {
throw new AlreadyExpiredException(context.index(), context.type(), context.id(), timestamp, ttl, now);
}
// the expiration timestamp (timestamp + ttl) is set as field
fields.add(new CustomLongNumericField(this, expire, fieldType));
fields.add(new CustomLongNumericField(this, expire, (NumberFieldType)fieldType));
}
}
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
@ -32,6 +31,9 @@ import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -46,7 +48,6 @@ import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.timestamp;
@ -63,21 +64,26 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
public static final String NAME = "_timestamp";
// TODO: this should be removed
public static final FieldType PRE_20_FIELD_TYPE;
public static final FieldType FIELD_TYPE = new FieldType(DateFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType PRE_20_FIELD_TYPE;
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT);
public static final DateFieldType FIELD_TYPE = new TimestampFieldType();
static {
FIELD_TYPE.setStored(true);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER);
FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Defaults.PRECISION_STEP_64_BIT));
FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Integer.MAX_VALUE));
FIELD_TYPE.freeze();
PRE_20_FIELD_TYPE = new FieldType(FIELD_TYPE);
PRE_20_FIELD_TYPE = FIELD_TYPE.clone();
PRE_20_FIELD_TYPE.setStored(false);
PRE_20_FIELD_TYPE.freeze();
}
public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED;
public static final String PATH = null;
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT);
public static final String DEFAULT_TIMESTAMP = "now";
}
@ -85,13 +91,16 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
private String path = Defaults.PATH;
private FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER;
private String defaultTimestamp = Defaults.DEFAULT_TIMESTAMP;
private boolean explicitStore = false;
private Boolean ignoreMissing = null;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT);
super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
}
DateFieldType fieldType() {
return (DateFieldType)fieldType;
}
public Builder enabled(EnabledAttributeMapper enabledState) {
@ -105,8 +114,8 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
}
public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
this.dateTimeFormatter = dateTimeFormatter;
return builder;
fieldType().setDateTimeFormatter(dateTimeFormatter);
return this;
}
public Builder defaultTimestamp(String defaultTimestamp) {
@ -131,9 +140,20 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
assert fieldType.stored();
fieldType.setStored(false);
}
return new TimestampFieldMapper(fieldType, docValues, enabledState, path, dateTimeFormatter, defaultTimestamp,
setupFieldType(context);
return new TimestampFieldMapper(fieldType, docValues, enabledState, path, defaultTimestamp,
ignoreMissing,
ignoreMalformed(context), coerce(context), normsLoading, fieldDataSettings, context.indexSettings());
ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings());
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
return NumericDateAnalyzer.buildNamedAnalyzer(fieldType().dateTimeFormatter(), precisionStep);
}
@Override
protected int maxPrecisionStep() {
return 64;
}
}
@ -190,7 +210,29 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
}
}
private static FieldType defaultFieldType(Settings settings) {
public static class TimestampFieldType extends DateFieldType {
public TimestampFieldType() {}
protected TimestampFieldType(TimestampFieldType ref) {
super(ref);
}
@Override
public DateFieldType clone() {
return new TimestampFieldType(this);
}
/**
* Override the default behavior to return a timestamp
*/
@Override
public Object valueForSearch(Object value) {
return value(value);
}
}
private static MappedFieldType defaultFieldType(Settings settings) {
return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE;
}
@ -198,23 +240,18 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
private final String path;
private final String defaultTimestamp;
private final FieldType defaultFieldType;
private final MappedFieldType defaultFieldType;
private final Boolean ignoreMissing;
public TimestampFieldMapper(Settings indexSettings) {
this(new FieldType(defaultFieldType(indexSettings)), null, Defaults.ENABLED, Defaults.PATH, Defaults.DATE_TIME_FORMATTER, Defaults.DEFAULT_TIMESTAMP,
null, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, null, indexSettings);
this(defaultFieldType(indexSettings).clone(), null, Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP,
null, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings);
}
protected TimestampFieldMapper(FieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path,
FormatDateTimeFormatter dateTimeFormatter, String defaultTimestamp,
Boolean ignoreMissing,
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, Loading normsLoading,
protected TimestampFieldMapper(MappedFieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path,
String defaultTimestamp, Boolean ignoreMissing, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
@Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), dateTimeFormatter,
Defaults.PRECISION_STEP_64_BIT, Defaults.BOOST, fieldType, docValues,
Defaults.NULL_VALUE, TimeUnit.MILLISECONDS /*always milliseconds*/,
ignoreMalformed, coerce, null, normsLoading, fieldDataSettings,
super(fieldType, docValues, Defaults.NULL_VALUE, ignoreMalformed, coerce, fieldDataSettings,
indexSettings, MultiFields.empty(), null);
this.enabledState = enabledState;
this.path = path;
@ -224,7 +261,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return defaultFieldType;
}
@ -249,19 +286,6 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
return this.ignoreMissing;
}
@Override
public FormatDateTimeFormatter dateTimeFormatter() {
return this.dateTimeFormatter;
}
/**
* Override the default behavior to return a timestamp
*/
@Override
public Object valueForSearch(Object value) {
return value(value);
}
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
@ -281,14 +305,14 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (enabledState.enabled) {
long timestamp = context.sourceToParse().timestamp();
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored() && !hasDocValues()) {
context.ignoredValue(names.indexName(), String.valueOf(timestamp));
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored() && !fieldType().hasDocValues()) {
context.ignoredValue(fieldType.names().indexName(), String.valueOf(timestamp));
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
fields.add(new LongFieldMapper.CustomLongNumericField(this, timestamp, fieldType));
fields.add(new LongFieldMapper.CustomLongNumericField(this, timestamp, (NumberFieldType)fieldType));
}
if (hasDocValues()) {
fields.add(new NumericDocValuesField(names.indexName(), timestamp));
if (fieldType().hasDocValues()) {
fields.add(new NumericDocValuesField(fieldType.names().indexName(), timestamp));
}
}
}
@ -306,10 +330,10 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
// if all are defaults, no sense to write it at all
if (!includeDefaults && indexed == indexedDefault && customFieldDataSettings == null &&
fieldType.stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH
&& dateTimeFormatter.format().equals(Defaults.DATE_TIME_FORMATTER.format())
fieldType.stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH
&& fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format())
&& Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)
&& defaultDocValues() == hasDocValues()) {
&& defaultDocValues() == fieldType().hasDocValues()) {
return builder;
}
builder.startObject(CONTENT_TYPE);
@ -326,8 +350,8 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
if (includeDefaults || path != Defaults.PATH) {
builder.field("path", path);
}
if (includeDefaults || !dateTimeFormatter.format().equals(Defaults.DATE_TIME_FORMATTER.format())) {
builder.field("format", dateTimeFormatter.format());
if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format())) {
builder.field("format", fieldType().dateTimeFormatter().format());
}
if (includeDefaults || !Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) {
builder.field("default", defaultTimestamp);
@ -338,7 +362,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
}
builder.endObject();

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
@ -36,6 +35,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -65,13 +65,16 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = TypeFieldMapper.NAME;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new TypeFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
}
}
@ -79,13 +82,14 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
public static class Builder extends AbstractFieldMapper.Builder<Builder, TypeFieldMapper> {
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE));
super(Defaults.NAME, Defaults.FIELD_TYPE);
indexName = Defaults.NAME;
}
@Override
public TypeFieldMapper build(BuilderContext context) {
return new TypeFieldMapper(name, indexName, boost, fieldType, fieldDataSettings, context.indexSettings());
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
return new TypeFieldMapper(fieldType, fieldDataSettings, context.indexSettings());
}
}
@ -101,17 +105,53 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
}
}
public TypeFieldMapper(Settings indexSettings) {
this(Defaults.NAME, Defaults.NAME, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null, indexSettings);
public static class TypeFieldType extends MappedFieldType {
public TypeFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected TypeFieldType(TypeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new TypeFieldType(this);
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
if (indexOptions() == IndexOptions.NONE) {
return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value)))));
}
return new ConstantScoreQuery(new TermQuery(createTerm(value)));
}
}
public TypeFieldMapper(String name, String indexName, float boost, FieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), boost, fieldType, false, Lucene.KEYWORD_ANALYZER,
Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings);
public TypeFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), null, indexSettings);
}
public TypeFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, false, fieldDataSettings, indexSettings);
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -120,26 +160,6 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
return new FieldDataType("string");
}
@Override
public String value(Object value) {
if (value == null) {
return null;
}
return value.toString();
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
if (fieldType.indexOptions() == IndexOptions.NONE) {
return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value)))));
}
return new ConstantScoreQuery(new TermQuery(createTerm(value)));
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public void preParse(ParseContext context) throws IOException {
@ -161,9 +181,9 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) {
return;
}
fields.add(new Field(names.indexName(), context.type(), fieldType));
if (hasDocValues()) {
fields.add(new SortedSetDocValuesField(names.indexName(), new BytesRef(context.type())));
fields.add(new Field(fieldType.names().indexName(), context.type(), fieldType));
if (fieldType().hasDocValues()) {
fields.add(new SortedSetDocValuesField(fieldType.names().indexName(), new BytesRef(context.type())));
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
@ -32,6 +31,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -61,17 +61,20 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = UidFieldMapper.NAME;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
public static final FieldType NESTED_FIELD_TYPE;
public static final MappedFieldType FIELD_TYPE = new UidFieldType();
public static final MappedFieldType NESTED_FIELD_TYPE;
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.freeze();
NESTED_FIELD_TYPE = new FieldType(FIELD_TYPE);
NESTED_FIELD_TYPE = FIELD_TYPE.clone();
NESTED_FIELD_TYPE.setStored(false);
NESTED_FIELD_TYPE.freeze();
}
@ -86,7 +89,8 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
@Override
public UidFieldMapper build(BuilderContext context) {
return new UidFieldMapper(name, indexName, docValues, fieldDataSettings, context.indexSettings());
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
return new UidFieldMapper(fieldType, docValues, fieldDataSettings, context.indexSettings());
}
}
@ -102,13 +106,36 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
}
}
public UidFieldMapper(Settings indexSettings) {
this(Defaults.NAME, Defaults.NAME, null, null, indexSettings);
public static class UidFieldType extends MappedFieldType {
public UidFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected UidFieldType(UidFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new UidFieldType(this);
}
@Override
public Uid value(Object value) {
if (value == null) {
return null;
}
return Uid.createUid(value.toString());
}
}
protected UidFieldMapper(String name, String indexName, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), docValuesEnabled(docValues, indexSettings),
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings);
public UidFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings);
}
protected UidFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, docValuesEnabled(docValues, indexSettings), fieldDataSettings, indexSettings);
}
static Boolean docValuesEnabled(Boolean docValues, Settings indexSettings) {
@ -119,7 +146,7 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -171,21 +198,13 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
Field uid = new Field(NAME, Uid.createUid(context.stringBuilder(), context.type(), context.id()), Defaults.FIELD_TYPE);
context.uid(uid);
fields.add(uid);
if (hasDocValues()) {
if (fieldType().hasDocValues()) {
fields.add(new BinaryDocValuesField(NAME, new BytesRef(uid.stringValue())));
}
}
@Override
public Uid value(Object value) {
if (value == null) {
return null;
}
return Uid.createUid(value.toString());
}
public Term term(String uid) {
return createTerm(uid);
return new Term(fieldType().names().indexName(), fieldType().indexedValueForSearch(uid));
}
@Override
@ -210,7 +229,7 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
}
builder.endObject();

View File

@ -20,13 +20,14 @@
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.DocValuesType;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -52,9 +53,13 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe
public static class Defaults {
public static final String NAME = VersionFieldMapper.NAME;
public static final float BOOST = 1.0f;
public static final FieldType FIELD_TYPE = NumericDocValuesField.TYPE;
public static final MappedFieldType FIELD_TYPE = new VersionFieldType();
static {
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setDocValuesType(DocValuesType.NUMERIC);
FIELD_TYPE.freeze();
}
}
public static class Builder extends Mapper.Builder<Builder, VersionFieldMapper> {
@ -86,6 +91,31 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe
}
}
public static class VersionFieldType extends MappedFieldType {
public VersionFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
protected VersionFieldType(VersionFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new VersionFieldType(this);
}
@Override
public Long value(Object value) {
if (value == null || (value instanceof Long)) {
return (Long) value;
} else {
return Long.parseLong(value.toString());
}
}
}
private final ThreadLocal<Field> fieldCache = new ThreadLocal<Field>() {
@Override
protected Field initialValue() {
@ -94,7 +124,7 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe
};
public VersionFieldMapper(Settings indexSettings) {
super(new Names(NAME, NAME, NAME, NAME), Defaults.BOOST, Defaults.FIELD_TYPE, true, null, null, null, null, null, indexSettings);
super(Defaults.FIELD_TYPE, true, null, indexSettings);
}
@Override
@ -116,15 +146,6 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe
return null;
}
@Override
public Long value(Object value) {
if (value == null || (value instanceof Long)) {
return (Long) value;
} else {
return Long.parseLong(value.toString());
}
}
@Override
public void postParse(ParseContext context) throws IOException {
// In the case of nested docs, let's fill nested docs with version=1 so that Lucene doesn't write a Bitset for documents
@ -136,7 +157,7 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}

View File

@ -20,10 +20,8 @@
package org.elasticsearch.index.mapper.ip;
import com.google.common.net.InetAddresses;
import org.apache.lucene.analysis.NumericTokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
@ -43,6 +41,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericAnalyzer;
import org.elasticsearch.index.analysis.NumericTokenizer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -51,7 +50,6 @@ import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
@ -101,7 +99,7 @@ public class IpFieldMapper extends NumberFieldMapper {
public static class Defaults extends NumberFieldMapper.Defaults {
public static final String NULL_VALUE = null;
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
public static final MappedFieldType FIELD_TYPE = new IpFieldType();
static {
FIELD_TYPE.freeze();
@ -113,7 +111,7 @@ public class IpFieldMapper extends NumberFieldMapper {
protected String nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT);
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
builder = this;
}
@ -124,13 +122,23 @@ public class IpFieldMapper extends NumberFieldMapper {
@Override
public IpFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
IpFieldMapper fieldMapper = new IpFieldMapper(buildNames(context),
fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context),
similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
setupFieldType(context);
IpFieldMapper fieldMapper = new IpFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context),
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
String name = precisionStep == Integer.MAX_VALUE ? "_ip/max" : ("_ip/" + precisionStep);
return new NamedAnalyzer(name, new NumericIpAnalyzer(precisionStep));
}
@Override
protected int maxPrecisionStep() {
return 64;
}
}
public static class TypeParser implements Mapper.TypeParser {
@ -154,21 +162,90 @@ public class IpFieldMapper extends NumberFieldMapper {
}
}
public static class IpFieldType extends NumberFieldType {
public IpFieldType() {}
protected IpFieldType(IpFieldType ref) {
super(ref);
}
@Override
public NumberFieldType clone() {
return new IpFieldType(this);
}
@Override
public Long value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToLong((BytesRef) value);
}
return ipToLong(value.toString());
}
/**
* IPs should return as a string.
*/
@Override
public Object valueForSearch(Object value) {
Long val = value(value);
if (val == null) {
return null;
}
return longToIp(val);
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
long iValue = ipToLong(value);
long iSim;
try {
iSim = ipToLong(fuzziness.asString());
} catch (IllegalArgumentException e) {
iSim = fuzziness.asLong();
}
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
}
}
private String nullValue;
protected IpFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
protected IpFieldMapper(MappedFieldType fieldType, Boolean docValues,
String nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings,
@Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues,
ignoreMalformed, coerce, new NamedAnalyzer("_ip/" + precisionStep, new NumericIpAnalyzer(precisionStep)),
new NamedAnalyzer("_ip/max", new NumericIpAnalyzer(Integer.MAX_VALUE)),
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
super(fieldType, docValues, ignoreMalformed, coerce,
fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
}
@Override
public FieldType defaultFieldType() {
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@ -177,45 +254,7 @@ public class IpFieldMapper extends NumberFieldMapper {
return new FieldDataType("long");
}
@Override
protected int maxPrecisionStep() {
return 64;
}
@Override
public Long value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToLong((BytesRef) value);
}
return ipToLong(value.toString());
}
/**
* IPs should return as a string.
*/
@Override
public Object valueForSearch(Object value) {
Long val = value(value);
if (val == null) {
return null;
}
return longToIp(val);
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
private long parseValue(Object value) {
private static long parseValue(Object value) {
if (value instanceof Number) {
return ((Number) value).longValue();
}
@ -225,29 +264,6 @@ public class IpFieldMapper extends NumberFieldMapper {
return ipToLong(value.toString());
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
long iValue = ipToLong(value);
long iSim;
try {
iSim = ipToLong(fuzziness.asString());
} catch (IllegalArgumentException e) {
iSim = fuzziness.asLong();
}
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
@ -276,16 +292,16 @@ public class IpFieldMapper extends NumberFieldMapper {
return;
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), ipAsString, boost);
context.allEntries().addText(fieldType.names().fullName(), ipAsString, fieldType.boost());
}
final long value = ipToLong(ipAsString);
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType);
field.setBoost(boost);
field.setBoost(fieldType.boost());
fields.add(field);
}
if (hasDocValues()) {
if (fieldType().hasDocValues()) {
addDocValue(context, fields, value);
}
}
@ -310,8 +326,8 @@ public class IpFieldMapper extends NumberFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", precisionStep);
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", fieldType.numericPrecisionStep());
}
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);

View File

@ -165,7 +165,7 @@ public class CommonTermsQueryParser implements QueryParser {
String field;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
field = mapper.names().indexName();
field = mapper.fieldType().names().indexName();
} else {
field = fieldName;
}
@ -173,7 +173,7 @@ public class CommonTermsQueryParser implements QueryParser {
Analyzer analyzer = null;
if (queryAnalyzer == null) {
if (mapper != null) {
analyzer = mapper.searchAnalyzer();
analyzer = mapper.fieldType().searchAnalyzer();
}
if (analyzer == null && mapper != null) {
analyzer = parseContext.getSearchAnalyzer(mapper);

View File

@ -98,7 +98,7 @@ public class ExistsQueryParser implements QueryParser {
if (fieldNamesMapper!= null && fieldNamesMapper.enabled()) {
final String f;
if (mapper != null) {
f = mapper.names().indexName();
f = mapper.fieldType().names().indexName();
} else {
f = field;
}

View File

@ -92,7 +92,7 @@ public class FieldMaskingSpanQueryParser implements QueryParser {
FieldMapper mapper = parseContext.fieldMapper(field);
if (mapper != null) {
field = mapper.names().indexName();
field = mapper.fieldType().names().indexName();
}
FieldMaskingSpanQuery query = new FieldMaskingSpanQuery(inner, field);

View File

@ -150,9 +150,9 @@ public class GeoShapeQueryParser implements QueryParser {
GeoShapeFieldMapper shapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
PrefixTreeStrategy strategy = shapeFieldMapper.defaultStrategy();
PrefixTreeStrategy strategy = shapeFieldMapper.fieldType().defaultStrategy();
if (strategyName != null) {
strategy = shapeFieldMapper.resolveStrategy(strategyName);
strategy = shapeFieldMapper.fieldType().resolveStrategy(strategyName);
}
Query query;
if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) {

View File

@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
@ -71,11 +72,11 @@ public class GeohashCellQuery {
* @return a new GeoBoundinboxfilter
*/
public static Query create(QueryParseContext context, GeoPointFieldMapper fieldMapper, String geohash, @Nullable List<CharSequence> geohashes) {
if (fieldMapper.geoHashStringMapper() == null) {
MappedFieldType geoHashMapper = fieldMapper.fieldType().geohashFieldType();
if (geoHashMapper == null) {
throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled");
}
StringFieldMapper geoHashMapper = fieldMapper.geoHashStringMapper();
if (geohashes == null || geohashes.size() == 0) {
return geoHashMapper.termQuery(geohash, context);
} else {
@ -246,7 +247,7 @@ public class GeohashCellQuery {
}
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
if (!geoMapper.isEnableGeohashPrefix()) {
if (!geoMapper.fieldType().isGeohashPrefixEnabled()) {
throw new QueryParsingException(parseContext, "can't execute geohash_cell on field [" + fieldName
+ "], geohash_prefix is not enabled");
}

View File

@ -116,7 +116,7 @@ public class MissingQueryParser implements QueryParser {
if (fieldNamesMapper != null && fieldNamesMapper.enabled()) {
final String f;
if (mapper != null) {
f = mapper.names().indexName();
f = mapper.fieldType().names().indexName();
} else {
f = field;
}

View File

@ -167,7 +167,7 @@ public class MoreLikeThisQueryParser implements QueryParser {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String field = parser.text();
FieldMapper mapper = parseContext.fieldMapper(field);
moreLikeFields.add(mapper == null ? field : mapper.names().indexName());
moreLikeFields.add(mapper == null ? field : mapper.fieldType().names().indexName());
}
} else if (Fields.DOCUMENT_IDS.match(currentFieldName, parseContext.parseFlags())) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {

View File

@ -287,8 +287,8 @@ public class QueryParseContext {
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchAnalyzer(FieldMapper mapper) {
if (mapper.searchAnalyzer() != null) {
return mapper.searchAnalyzer();
if (mapper.fieldType().searchAnalyzer() != null) {
return mapper.fieldType().searchAnalyzer();
}
return mapperService().searchAnalyzer();
}
@ -297,8 +297,8 @@ public class QueryParseContext {
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchQuoteAnalyzer(FieldMapper mapper) {
if (mapper.searchQuoteAnalyzer() != null) {
return mapper.searchQuoteAnalyzer();
if (mapper.fieldType().searchQuoteAnalyzer() != null) {
return mapper.fieldType().searchQuoteAnalyzer();
}
return mapperService().searchQuoteAnalyzer();
}

View File

@ -128,7 +128,7 @@ public class RangeQueryParser implements QueryParser {
"[range] time_zone when using ms since epoch format as it's UTC based can not be applied to [" + fieldName
+ "]");
}
query = ((DateFieldMapper) mapper).rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext);
query = ((DateFieldMapper) mapper).fieldType().rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext);
} else {
if (timeZone != null) {
throw new QueryParsingException(parseContext, "[range] time_zone can not be applied to non date field ["

View File

@ -132,7 +132,7 @@ public class SimpleQueryStringParser implements QueryParser {
} else {
FieldMapper mapper = parseContext.fieldMapper(fField);
if (mapper != null) {
fieldsAndWeights.put(mapper.names().indexName(), fBoost);
fieldsAndWeights.put(mapper.fieldType().names().indexName(), fBoost);
} else {
fieldsAndWeights.put(fField, fBoost);
}

View File

@ -95,7 +95,7 @@ public class SpanTermQueryParser implements QueryParser {
BytesRef valueBytes = null;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
fieldName = mapper.names().indexName();
fieldName = mapper.fieldType().names().indexName();
valueBytes = mapper.indexedValueForSearch(value);
}
if (valueBytes == null) {

View File

@ -160,7 +160,7 @@ public class TermsQueryParser implements QueryParser {
FieldMapper fieldMapper = parseContext.fieldMapper(fieldName);
if (fieldMapper != null) {
fieldName = fieldMapper.names().indexName();
fieldName = fieldMapper.fieldType().names().indexName();
}
if (lookupId != null) {

View File

@ -95,7 +95,7 @@ public class WildcardQueryParser implements QueryParser {
BytesRef valueBytes;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
fieldName = mapper.names().indexName();
fieldName = mapper.fieldType().names().indexName();
valueBytes = mapper.indexedValueForSearch(value);
} else {
valueBytes = new BytesRef(value);

View File

@ -263,7 +263,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
}
long origin = SearchContext.current().nowInMillis();
if (originString != null) {
origin = dateFieldMapper.parseToMilliseconds(originString);
origin = dateFieldMapper.fieldType().parseToMilliseconds(originString, false, null, null);
}
if (scaleString == null) {

View File

@ -157,7 +157,7 @@ public class MatchQuery {
final String field;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
field = mapper.names().indexName();
field = mapper.fieldType().names().indexName();
} else {
field = fieldName;
}

View File

@ -165,7 +165,7 @@ public class MultiMatchQuery extends MatchQuery {
FieldMapper mapper = parseContext.fieldMapper(name);
if (mapper != null) {
Analyzer actualAnalyzer = getAnalyzer(mapper);
name = mapper.names().indexName();
name = mapper.fieldType().names().indexName();
if (!groups.containsKey(actualAnalyzer)) {
groups.put(actualAnalyzer, new ArrayList<FieldAndMapper>());
}

View File

@ -31,7 +31,7 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
public class IndexedGeoBoundingBoxQuery {
public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) {
if (!fieldMapper.isEnableLatLon()) {
if (!fieldMapper.fieldType().isLatLonEnabled()) {
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldMapper.name() + "], can't use indexed filter on it");
}
//checks to see if bounding box crosses 180 degrees
@ -45,16 +45,16 @@ public class IndexedGeoBoundingBoxQuery {
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) {
BooleanQuery filter = new BooleanQuery();
filter.setMinimumNumberShouldMatch(1);
filter.add(fieldMapper.lonMapper().rangeFilter(null, bottomRight.lon(), true, true), Occur.SHOULD);
filter.add(fieldMapper.lonMapper().rangeFilter(topLeft.lon(), null, true, true), Occur.SHOULD);
filter.add(fieldMapper.latMapper().rangeFilter(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, null), Occur.SHOULD);
filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), null, true, true, null), Occur.SHOULD);
filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
return new ConstantScoreQuery(filter);
}
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) {
BooleanQuery filter = new BooleanQuery();
filter.add(fieldMapper.lonMapper().rangeFilter(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST);
filter.add(fieldMapper.latMapper().rangeFilter(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, null), Occur.MUST);
filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
return new ConstantScoreQuery(filter);
}
}

View File

@ -100,7 +100,7 @@ public class SimilarityService extends AbstractIndexComponent {
@Override
public Similarity get(String name) {
FieldMapper mapper = mapperService.smartNameFieldMapper(name);
return (mapper != null && mapper.similarity() != null) ? mapper.similarity().get() : defaultSimilarity;
return (mapper != null && mapper.fieldType().similarity() != null) ? mapper.fieldType().similarity().get() : defaultSimilarity;
}
}
}

View File

@ -236,7 +236,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) {
analyzer = mapperService.analysisService().analyzer(perFieldAnalyzer.get(field).toString());
} else {
analyzer = mapperService.smartNameFieldMapper(field).indexAnalyzer();
analyzer = mapperService.smartNameFieldMapper(field).fieldType().indexAnalyzer();
}
if (analyzer == null) {
analyzer = mapperService.analysisService().defaultIndexAnalyzer();

View File

@ -37,6 +37,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils;
import org.elasticsearch.index.shard.IndexShard;
@ -99,7 +100,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
this.closed = true;
}
public IndexFieldDataCache buildIndexFieldDataCache(IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) {
public IndexFieldDataCache buildIndexFieldDataCache(IndexService indexService, Index index, MappedFieldType.Names fieldNames, FieldDataType fieldDataType) {
return new IndexFieldCache(logger, cache, indicesFieldDataCacheListener, indexService, index, fieldNames, fieldDataType);
}
@ -139,12 +140,12 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
private final ESLogger logger;
private final IndexService indexService;
final Index index;
final FieldMapper.Names fieldNames;
final MappedFieldType.Names fieldNames;
final FieldDataType fieldDataType;
private final Cache<Key, Accountable> cache;
private final IndicesFieldDataCacheListener indicesFieldDataCacheListener;
IndexFieldCache(ESLogger logger,final Cache<Key, Accountable> cache, IndicesFieldDataCacheListener indicesFieldDataCacheListener, IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) {
IndexFieldCache(ESLogger logger,final Cache<Key, Accountable> cache, IndicesFieldDataCacheListener indicesFieldDataCacheListener, IndexService indexService, Index index, MappedFieldType.Names fieldNames, FieldDataType fieldDataType) {
this.logger = logger;
this.indexService = indexService;
this.index = index;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
/**
@ -43,13 +44,14 @@ public class IndicesFieldDataCacheListener implements IndexFieldDataCache.Listen
}
@Override
public void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, Accountable fieldData) {
public void onLoad(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable fieldData) {
}
@Override
public void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onUnload(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
assert sizeInBytes >= 0 : "When reducing circuit breaker, it should be adjusted with a number higher or equal to 0 and not [" + sizeInBytes + "]";
circuitBreakerService.getBreaker(CircuitBreaker.FIELDDATA).addWithoutBreaking(-sizeInBytes);
}
}

View File

@ -60,8 +60,8 @@ class SingleDocumentPercolatorIndex implements PercolatorIndex {
if (tokenStream != null) {
memoryIndex.addField(field.name(), tokenStream, field.boost());
}
} catch (IOException e) {
throw new ElasticsearchException("Failed to create token stream", e);
} catch (Exception e) {
throw new ElasticsearchException("Failed to create token stream for [" + field.name() + "]", e);
}
}
context.initialize(new DocEngineSearcher(memoryIndex), parsedDocument);

View File

@ -59,7 +59,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapper.Loading;
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.TemplateQueryParser;
import org.elasticsearch.index.search.stats.StatsGroupsParseElement;
@ -840,8 +840,12 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
final ObjectSet<String> warmUp = new ObjectHashSet<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
final String indexName = fieldMapper.names().indexName();
if (fieldMapper.fieldType().indexOptions() != IndexOptions.NONE && !fieldMapper.fieldType().omitNorms() && fieldMapper.normsLoading(defaultLoading) == Loading.EAGER) {
final String indexName = fieldMapper.fieldType().names().indexName();
Loading normsLoading = fieldMapper.fieldType().normsLoading();
if (normsLoading == null) {
normsLoading = defaultLoading;
}
if (fieldMapper.fieldType().indexOptions() != IndexOptions.NONE && !fieldMapper.fieldType().omitNorms() && normsLoading == Loading.EAGER) {
warmUp.add(indexName);
}
}
@ -896,7 +900,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
final Map<String, FieldMapper> warmUp = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
final FieldDataType fieldDataType = fieldMapper.fieldDataType();
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
if (fieldDataType == null) {
continue;
}
@ -904,7 +908,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
continue;
}
final String indexName = fieldMapper.names().indexName();
final String indexName = fieldMapper.fieldType().names().indexName();
if (warmUp.containsKey(indexName)) {
continue;
}
@ -924,10 +928,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
final long start = System.nanoTime();
indexFieldDataService.getForField(fieldMapper).load(ctx);
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.names().fullName());
indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.fieldType().names().fullName());
} finally {
latch.countDown();
}
@ -950,14 +954,14 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
final Map<String, FieldMapper> warmUpGlobalOrdinals = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
final FieldDataType fieldDataType = fieldMapper.fieldDataType();
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
if (fieldDataType == null) {
continue;
}
if (fieldDataType.getLoading() != Loading.EAGER_GLOBAL_ORDINALS) {
continue;
}
final String indexName = fieldMapper.names().indexName();
final String indexName = fieldMapper.fieldType().names().indexName();
if (warmUpGlobalOrdinals.containsKey(indexName)) {
continue;
}
@ -976,10 +980,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldMapper);
ifd.loadGlobal(context.reader());
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.names().fullName());
indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.fieldType().names().fullName());
} finally {
latch.countDown();
}

View File

@ -89,7 +89,7 @@ public class ChildrenParser implements Aggregator.Parser {
parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter());
childFilter = new QueryWrapperFilter(childDocMapper.typeFilter());
ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper);
config.fieldContext(new FieldContext(parentFieldMapper.names().indexName(), parentChildIndexFieldData, parentFieldMapper));
config.fieldContext(new FieldContext(parentFieldMapper.fieldType().names().indexName(), parentChildIndexFieldData, parentFieldMapper));
} else {
config.unmapped(true);
}

View File

@ -105,7 +105,7 @@ public class AggregationContext {
if (config.fieldContext != null && config.fieldContext.mapper() instanceof DateFieldMapper) {
final DateFieldMapper mapper = (DateFieldMapper) config.fieldContext.mapper();
try {
missing = mapper.dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis();
missing = mapper.fieldType().dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis();
} catch (IllegalArgumentException e) {
throw new SearchParseException(context, "Expected a date value in [missing] but got [" + config.missing + "]", null, e);
}

View File

@ -72,7 +72,7 @@ public class ValueFormat {
}
public static DateTime mapper(DateFieldMapper mapper) {
return new DateTime(mapper.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(mapper), ValueParser.DateMath.mapper(mapper));
return new DateTime(mapper.fieldType().dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(mapper), ValueParser.DateMath.mapper(mapper));
}
public DateTime(String pattern, ValueFormatter formatter, ValueParser parser) {

View File

@ -105,7 +105,7 @@ public interface ValueFormatter extends Streamable {
private DateTimeZone timeZone = DateTimeZone.UTC;
public static DateTime mapper(DateFieldMapper mapper) {
return new DateTime(mapper.dateTimeFormatter());
return new DateTime(mapper.fieldType().dateTimeFormatter());
}
static final byte ID = 2;

View File

@ -110,7 +110,7 @@ public interface ValueParser {
}
public static DateMath mapper(DateFieldMapper mapper) {
return new DateMath(new DateMathParser(mapper.dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT));
return new DateMath(new DateMathParser(mapper.fieldType().dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT));
}
}

View File

@ -152,7 +152,7 @@ public class FetchPhase implements SearchPhase {
if (fieldNames == null) {
fieldNames = new HashSet<>();
}
fieldNames.add(mapper.names().indexName());
fieldNames.add(mapper.fieldType().names().indexName());
} else {
if (extractFieldNames == null) {
extractFieldNames = newArrayList();

Some files were not shown because too many files have changed in this diff Show More